diff --git a/.gitignore b/.gitignore index 62673f27..b8c6c94f 100644 --- a/.gitignore +++ b/.gitignore @@ -138,7 +138,6 @@ db.sqlite db.sqlite-journal #kern temp files -user_activity_backup.tmp project_export* tmp/* !tmp/.gitkeep diff --git a/alembic/versions/2c0029684bd7_remove_user_activity.py b/alembic/versions/2c0029684bd7_remove_user_activity.py new file mode 100644 index 00000000..63f42bd5 --- /dev/null +++ b/alembic/versions/2c0029684bd7_remove_user_activity.py @@ -0,0 +1,38 @@ +"""remove user activity + +Revision ID: 2c0029684bd7 +Revises: 7aa933ec5de9 +Create Date: 2024-11-25 14:48:25.980269 + +""" +from alembic import op +import sqlalchemy as sa +from sqlalchemy.dialects import postgresql + +# revision identifiers, used by Alembic. +revision = '2c0029684bd7' +down_revision = '7aa933ec5de9' +branch_labels = None +depends_on = None + + +def upgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.drop_index('ix_user_activity_created_by', table_name='user_activity') + op.drop_table('user_activity') + # ### end Alembic commands ### + + +def downgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.create_table('user_activity', + sa.Column('id', postgresql.UUID(), autoincrement=False, nullable=False), + sa.Column('created_by', postgresql.UUID(), autoincrement=False, nullable=True), + sa.Column('activity', postgresql.JSON(astext_type=sa.Text()), autoincrement=False, nullable=True), + sa.Column('created_at', postgresql.TIMESTAMP(), autoincrement=False, nullable=True), + sa.Column('from_backup', sa.BOOLEAN(), autoincrement=False, nullable=True), + sa.ForeignKeyConstraint(['created_by'], ['user.id'], name='user_activity_created_by_fkey', ondelete='CASCADE'), + sa.PrimaryKeyConstraint('id', name='user_activity_pkey') + ) + op.create_index('ix_user_activity_created_by', 'user_activity', ['created_by'], unique=False) + # ### end Alembic commands ### diff --git a/controller/user/manager.py b/controller/user/manager.py index 9ccf8f05..64e0479b 100644 --- a/controller/user/manager.py +++ b/controller/user/manager.py @@ -1,6 +1,6 @@ -from typing import Any, Dict, List, Optional +from typing import Dict, Optional from submodules.model import User, daemon, enums -from submodules.model.business_objects import user, user_activity, general +from submodules.model.business_objects import user, general from controller.auth import kratos from submodules.model.exceptions import EntityNotFoundException from controller.organization import manager as organization_manager @@ -107,12 +107,11 @@ def get_active_users_filtered( @param_throttle(seconds=10) def update_last_interaction(user_id: str) -> None: - user_activity.update_last_interaction(user_id) + user.update_last_interaction(user_id) def delete_user(user_id: str) -> None: user.delete(user_id, with_commit=True) - user_activity.delete_user_activity(user_id, with_commit=True) kratos.__refresh_identity_cache() diff --git a/fast_api/routes/misc.py b/fast_api/routes/misc.py index 54adf984..b9850ff3 100644 --- a/fast_api/routes/misc.py +++ b/fast_api/routes/misc.py @@ -1,4 +1,3 @@ -import json from fastapi import APIRouter, Body, Request, status from fastapi.responses import PlainTextResponse from fast_api.models import ( @@ -17,7 +16,6 @@ from controller.task_master import manager as task_master_manager from submodules.model import enums from submodules.model.global_objects import customer_button as customer_button_db_go -import util.user_activity from submodules.model.util import sql_alchemy_to_dict from submodules.model.enums import ( try_parse_enum_value, @@ -164,33 +162,6 @@ def get_task_queue_pause(request: Request): return pack_json_result({"taskQueuePause": task_queue_pause}) -@router.get("/all-users-activity") -def get_all_users_activity(request: Request): - auth.check_admin_access(request.state.info) - data = util.user_activity.resolve_all_users_activity() - - activity = [] - - for user in data: - user_activity = [] - if "user_activity" in user and user["user_activity"] is not None: - for activity_item in user["user_activity"]: - user_activity.append(json.dumps(activity_item)) - - activity.append( - { - "user": { - "id": str(user["user_id"]), - }, - "userActivity": user_activity, - "warning": user["warning"], - "warningText": user["warning_text"], - } - ) - - return pack_json_result({"data": {"allUsersActivity": activity}}) - - # this endpoint is meant to be used by the frontend to get the customer buttons for the current user # location is a filter to prevent the frontend from having to filter the buttons itself # also doesn't convert the key! diff --git a/submodules/model b/submodules/model index 12f62019..bf76a4da 160000 --- a/submodules/model +++ b/submodules/model @@ -1 +1 @@ -Subproject commit 12f62019a262ce831552614c0cc15d9a230b4b41 +Subproject commit bf76a4daec06a56b267b958df6da6b6e49564ed2 diff --git a/util/user_activity.py b/util/user_activity.py deleted file mode 100644 index c85f526b..00000000 --- a/util/user_activity.py +++ /dev/null @@ -1,83 +0,0 @@ -import time -from typing import Dict, Union, Any, List -from submodules.model.business_objects import user_activity, general -from submodules.model import daemon -import os -from datetime import datetime -import json - -BACKUP_FILE_PATH = "user_activity_backup.tmp" -__thread_running = False - - -def add_user_activity_entry( - user_id: str, activity: Any, caller_dict: Dict[str, Union[str, int]] -) -> None: - if isinstance(activity, str): - activity = {**caller_dict, "activity": activity} - else: - activity.update( - caller_dict, - ) - - global __thread_running - if not __thread_running: - __thread_running = True - daemon.run_without_db_token(__start_thread_db_write) - - activity_set = [user_id, activity, datetime.now(), False] - __write_backup_file(activity_set) - - -def __write_backup_file(content: Any) -> None: - file = open(BACKUP_FILE_PATH, "a+") - to_write = [content[0], content[1], "%s" % (content[2]), True] - to_write = json.dumps(to_write) - file.write(to_write + "\n") - file.close() - - -def __read_backup_file() -> List[Any]: - if not os.path.exists(BACKUP_FILE_PATH): - return None - file = open(BACKUP_FILE_PATH, "r") - content = file.read() - content = content.split("\n") - content = [json.loads(entry) for entry in content if len(entry) > 0] - for c in content: - c[2] = datetime.strptime(c[2], "%Y-%m-%d %H:%M:%S.%f") - return content - - -def resolve_all_users_activity(): - return_values = [ - { - "user_id": entry.user_id, - "user_activity": entry.activity_feed, - "warning": entry.has_warning, - "warning_text": entry.warning_text, - } - for entry in user_activity.get_all_user_activity() - ] - - return return_values - - -def __start_thread_db_write() -> None: - time.sleep(300) # only write every 5 min to db to prevent overuse - ctx_token = general.get_ctx_token() - - if not os.path.exists(BACKUP_FILE_PATH): - # for multi container environment - return - entries_to_add = __read_backup_file() - if entries_to_add is None: - return - - user_activity.write_user_activity_safe(entries_to_add) - # further cleanup - if os.path.exists(BACKUP_FILE_PATH): - os.remove(BACKUP_FILE_PATH) - global __thread_running - __thread_running = False - general.remove_and_refresh_session(ctx_token)