From 1d6bd417d93bdaf24811cb0bcae0eae18b18955b Mon Sep 17 00:00:00 2001 From: dx-tan Date: Tue, 5 Mar 2024 10:22:15 +0700 Subject: [PATCH 1/3] Update: #61 on prod environment --- docker-compose-prod.yml | 5 +---- scripts/crawl_database_by_time.py | 13 +++++++++---- 2 files changed, 10 insertions(+), 8 deletions(-) diff --git a/docker-compose-prod.yml b/docker-compose-prod.yml index a482a89..f3d56b5 100644 --- a/docker-compose-prod.yml +++ b/docker-compose-prod.yml @@ -196,10 +196,7 @@ services: - ctel-sbt dashboard_refresh: - build: - context: api-cronjob - dockerfile: Dockerfile - image: sidp/api-caller-sbt:{{tag}} + image: public.ecr.aws/v4n9y6r8/sidp/api-caller-sbt:{{tag}} environment: - PROXY=http://be-ctel-sbt:9000 - ADMIN_USER_NAME=${ADMIN_USER_NAME} diff --git a/scripts/crawl_database_by_time.py b/scripts/crawl_database_by_time.py index 449e3f6..4628592 100644 --- a/scripts/crawl_database_by_time.py +++ b/scripts/crawl_database_by_time.py @@ -4,15 +4,20 @@ import boto3 import os from tqdm import tqdm from datetime import datetime, timedelta -from pytz import timezone +import pytz +from django.utils import timezone from dotenv import load_dotenv load_dotenv("../.env_prod") -OUTPUT_NAME = "all_0226_0304" -START_DATE = datetime(2024, 2, 26, tzinfo=timezone('Asia/Ho_Chi_Minh')) -END_DATE = datetime(2024, 3, 4, tzinfo=timezone('Asia/Ho_Chi_Minh')) +tz = pytz.timezone('Asia/Singapore') + +OUTPUT_NAME = "Feb29" +START_DATE = datetime(2024, 2, 29) +END_DATE = datetime(2024, 3, 1) +START_DATE = timezone.make_aware(START_DATE, tz) +END_DATE = timezone.make_aware(END_DATE, tz) # Database connection details db_host = os.environ.get('DB_HOST', "") From 51623c3fa15c7952bd3d8178e31c9319e58fdad9 Mon Sep 17 00:00:00 2001 From: dx-tan Date: Tue, 5 Mar 2024 15:39:51 +0700 Subject: [PATCH 2/3] Fix #54 --- cope2n-api/fwd_api/api/accuracy_view.py | 7 ++++--- .../celery_worker/process_report_tasks.py | 4 +++- .../management/commands/migrate-csv.py | 4 ++-- cope2n-api/fwd_api/migrations/0184_caching.py | 21 +++++++++++++++++++ cope2n-api/fwd_api/models/Caching.py | 8 +++++++ cope2n-api/fwd_api/models/__init__.py | 3 +-- cope2n-api/fwd_api/utils/cache.py | 19 +++++++++++++++++ docker-compose-dev.yml | 17 +++++++-------- 8 files changed, 66 insertions(+), 17 deletions(-) create mode 100644 cope2n-api/fwd_api/migrations/0184_caching.py create mode 100644 cope2n-api/fwd_api/models/Caching.py create mode 100644 cope2n-api/fwd_api/utils/cache.py diff --git a/cope2n-api/fwd_api/api/accuracy_view.py b/cope2n-api/fwd_api/api/accuracy_view.py index 03659f9..9d14f51 100644 --- a/cope2n-api/fwd_api/api/accuracy_view.py +++ b/cope2n-api/fwd_api/api/accuracy_view.py @@ -19,6 +19,7 @@ from ..utils.accuracy import shadow_report, MonthReportAccumulate, first_of_list from ..utils.file import download_from_S3, dict2xlsx, save_report_to_S3, build_S3_url from ..utils.redis import RedisUtils from ..utils.process import string_to_boolean +from ..utils.cache import get_cache, set_cache from ..request.ReportCreationSerializer import ReportCreationSerializer from ..utils.subsidiary import map_subsidiary_long_to_short, map_subsidiary_short_to_long from ..utils.report import aggregate_overview @@ -467,7 +468,7 @@ class AccuracyViewSet(viewsets.ViewSet): # Retrive data from Redis key = f"{subsidiary}_{duration}" - data = json.loads(redis_client.get_specific_cache(settings.OVERVIEW_REPORT_ROOT, key)).get("data", []) + data = get_cache(key).get("data", []) response = { 'overview_data': data, } @@ -510,7 +511,7 @@ class AccuracyViewSet(viewsets.ViewSet): for sub in subsidiaries_to_include: key = f"{sub}_{duration}" try: - this_overview = json.loads(redis_client.get_specific_cache(settings.OVERVIEW_REPORT_ROOT, key)).get("data", []) + this_overview = get_cache(key).get("data", []) if sub != "seao": this_overview = [d for d in this_overview if d.get("subs") != "+"] else: @@ -551,7 +552,7 @@ class AccuracyViewSet(viewsets.ViewSet): else: # Retrive data from Redis key = f"{subsidiary}_{duration}" - data = json.loads(redis_client.get_specific_cache(settings.OVERVIEW_REPORT_ROOT, key)).get("data", []) + data = get_cache(key).get("data", []) response = { 'overview_data': data, } diff --git a/cope2n-api/fwd_api/celery_worker/process_report_tasks.py b/cope2n-api/fwd_api/celery_worker/process_report_tasks.py index 7e483da..aeab9b1 100644 --- a/cope2n-api/fwd_api/celery_worker/process_report_tasks.py +++ b/cope2n-api/fwd_api/celery_worker/process_report_tasks.py @@ -7,6 +7,7 @@ from ..utils.accuracy import update_temp_accuracy, IterAvg, calculate_and_save_s from ..utils.file import dict2xlsx, save_workbook_file, save_report_to_S3, save_images_to_csv_briefly from ..utils import time_stuff from ..utils.redis import RedisUtils +from ..utils.cache import set_cache, get_cache from django.utils import timezone from django.db.models import Q import json @@ -315,7 +316,8 @@ def make_a_report_2(report_id, query_set): overview_filename = query_set["subsidiary"] + "_" + query_set["report_overview_duration"] + ".xlsx" local_workbook = save_workbook_file(overview_filename, report, data_workbook, settings.OVERVIEW_REPORT_ROOT) s3_key=save_report_to_S3(report.report_id, local_workbook) - redis_client.set_cache(settings.OVERVIEW_REPORT_ROOT, overview_filename.replace(".xlsx", ""), json.dumps(save_data)) + # redis_client.set_cache(settings.OVERVIEW_REPORT_ROOT, overview_filename.replace(".xlsx", ""), json.dumps(save_data)) + set_cache(overview_filename.replace(".xlsx", ""), save_data) except IndexError as e: print(e) diff --git a/cope2n-api/fwd_api/management/commands/migrate-csv.py b/cope2n-api/fwd_api/management/commands/migrate-csv.py index f3c8ebd..b4ad435 100644 --- a/cope2n-api/fwd_api/management/commands/migrate-csv.py +++ b/cope2n-api/fwd_api/management/commands/migrate-csv.py @@ -1,5 +1,5 @@ # myapp/management/commands/mycustomcommand.py -from io import StringIO +from fwd_api.constant.common import FileCategory from django.core.management.base import BaseCommand from tqdm import tqdm from fwd_api.models import SubscriptionRequestFile, SubscriptionRequest @@ -37,7 +37,7 @@ class Command(BaseCommand): "sold_to_party": "" } - images = SubscriptionRequestFile.objects.filter(request=request) + images = SubscriptionRequestFile.objects.filter(request=request, file_category=FileCategory.Origin.value) is_match = False try: for i, image in enumerate(images): diff --git a/cope2n-api/fwd_api/migrations/0184_caching.py b/cope2n-api/fwd_api/migrations/0184_caching.py new file mode 100644 index 0000000..3716493 --- /dev/null +++ b/cope2n-api/fwd_api/migrations/0184_caching.py @@ -0,0 +1,21 @@ +# Generated by Django 4.1.3 on 2024-03-05 08:10 + +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ('fwd_api', '0183_report_num_no_reviewed_report_num_not_reviewed_and_more'), + ] + + operations = [ + migrations.CreateModel( + name='Caching', + fields=[ + ('id', models.AutoField(primary_key=True, serialize=False)), + ('key', models.CharField(max_length=200, unique=True)), + ('value', models.JSONField(null=True)), + ], + ), + ] diff --git a/cope2n-api/fwd_api/models/Caching.py b/cope2n-api/fwd_api/models/Caching.py new file mode 100644 index 0000000..a7ad5f8 --- /dev/null +++ b/cope2n-api/fwd_api/models/Caching.py @@ -0,0 +1,8 @@ +from django.db import models +from django.utils import timezone +from fwd_api.models.Subscription import Subscription + +class Caching(models.Model): + id = models.AutoField(primary_key=True) + key = models.CharField(max_length=200, unique=True) # Change to request_id + value = models.JSONField(null=True) # Change to request_id diff --git a/cope2n-api/fwd_api/models/__init__.py b/cope2n-api/fwd_api/models/__init__.py index 47f23f0..be7fa22 100755 --- a/cope2n-api/fwd_api/models/__init__.py +++ b/cope2n-api/fwd_api/models/__init__.py @@ -8,5 +8,4 @@ from .Subscription import Subscription from .FeedbackRequest import FeedbackRequest from .Report import Report from .ReportFile import ReportFile - - +from .Caching import Caching \ No newline at end of file diff --git a/cope2n-api/fwd_api/utils/cache.py b/cope2n-api/fwd_api/utils/cache.py new file mode 100644 index 0000000..ea68137 --- /dev/null +++ b/cope2n-api/fwd_api/utils/cache.py @@ -0,0 +1,19 @@ +from fwd_api.models import Caching + +def set_cache(key, value): + cache = Caching.objects.filter(key=key) + if cache.count() == 0: + this_cache = Caching(key=key, value=value) + else: + this_cache = cache.first() + this_cache.value = value + this_cache.save() + return this_cache + + +def get_cache(key): + value = {} + cache = Caching.objects.filter(key=key) + if cache.count() > 0: + value = cache.first().value + return value \ No newline at end of file diff --git a/docker-compose-dev.yml b/docker-compose-dev.yml index 57d8a25..3426a9d 100755 --- a/docker-compose-dev.yml +++ b/docker-compose-dev.yml @@ -89,12 +89,12 @@ services: depends_on: db-sbt: condition: service_started - command: sh -c "chmod -R 777 /app; sleep 5; python manage.py collectstatic --no-input && - python manage.py makemigrations && - python manage.py migrate && - python manage.py compilemessages && - gunicorn fwd.asgi:application -k uvicorn.workers.UvicornWorker --timeout 300 -b 0.0.0.0:9000" # pre-makemigrations on prod - # command: bash -c "tail -f > /dev/null" + # command: sh -c "chmod -R 777 /app; sleep 5; python manage.py collectstatic --no-input && + # python manage.py makemigrations && + # python manage.py migrate && + # python manage.py compilemessages && + # gunicorn fwd.asgi:application -k uvicorn.workers.UvicornWorker --timeout 300 -b 0.0.0.0:9000" # pre-makemigrations on prod + command: bash -c "tail -f > /dev/null" minio: image: minio/minio @@ -179,8 +179,8 @@ services: - ./cope2n-api:/app working_dir: /app - command: sh -c "celery -A fwd_api.celery_worker.worker worker -l INFO -c 5" - # command: bash -c "tail -f > /dev/null" + # command: sh -c "celery -A fwd_api.celery_worker.worker worker -l INFO -c 5" + command: bash -c "tail -f > /dev/null" # Back-end persistent db-sbt: @@ -250,7 +250,6 @@ services: networks: - ctel-sbt - volumes: db_data: BE_static: From 3581c95a8ed349a6f09b96a914d42901a3b7637c Mon Sep 17 00:00:00 2001 From: dx-tan Date: Tue, 5 Mar 2024 16:15:59 +0700 Subject: [PATCH 3/3] Fix: #64 --- cope2n-fe/src/utils/time.ts | 4 ++-- docker-compose-dev.yml | 16 ++++++++-------- 2 files changed, 10 insertions(+), 10 deletions(-) diff --git a/cope2n-fe/src/utils/time.ts b/cope2n-fe/src/utils/time.ts index 51bc88c..870ef53 100644 --- a/cope2n-fe/src/utils/time.ts +++ b/cope2n-fe/src/utils/time.ts @@ -4,6 +4,6 @@ export function datetimeStrToDate(dateTimeStr: string, targetTimeZone: string): } const options: Intl.DateTimeFormatOptions = { timeZone: targetTimeZone, year: 'numeric', month: '2-digit', day: '2-digit' }; const date = new Date(dateTimeStr); - const convertedDateTimeStr = date.toLocaleDateString('en-US', options).split('/').reverse().join('-'); - return convertedDateTimeStr; + const convertedDateTimeStr = date.toLocaleDateString('en-US', options).split('/').reverse(); + return convertedDateTimeStr[0] + "-" + convertedDateTimeStr[2] + "-" + convertedDateTimeStr[1]; } \ No newline at end of file diff --git a/docker-compose-dev.yml b/docker-compose-dev.yml index 3426a9d..ffb2d60 100755 --- a/docker-compose-dev.yml +++ b/docker-compose-dev.yml @@ -89,12 +89,12 @@ services: depends_on: db-sbt: condition: service_started - # command: sh -c "chmod -R 777 /app; sleep 5; python manage.py collectstatic --no-input && - # python manage.py makemigrations && - # python manage.py migrate && - # python manage.py compilemessages && - # gunicorn fwd.asgi:application -k uvicorn.workers.UvicornWorker --timeout 300 -b 0.0.0.0:9000" # pre-makemigrations on prod - command: bash -c "tail -f > /dev/null" + command: sh -c "chmod -R 777 /app; sleep 5; python manage.py collectstatic --no-input && + python manage.py makemigrations && + python manage.py migrate && + python manage.py compilemessages && + gunicorn fwd.asgi:application -k uvicorn.workers.UvicornWorker --timeout 300 -b 0.0.0.0:9000" # pre-makemigrations on prod + # command: bash -c "tail -f > /dev/null" minio: image: minio/minio @@ -179,8 +179,8 @@ services: - ./cope2n-api:/app working_dir: /app - # command: sh -c "celery -A fwd_api.celery_worker.worker worker -l INFO -c 5" - command: bash -c "tail -f > /dev/null" + command: sh -c "celery -A fwd_api.celery_worker.worker worker -l INFO -c 5" + # command: bash -c "tail -f > /dev/null" # Back-end persistent db-sbt: