Skip to content

Commit

Permalink
Restructure purge feature
Browse files Browse the repository at this point in the history
Purging a job no longer removes cracking results from the corresponding hash list
Created a new endpoint for purging hash lists, which removes all cracking results from the hash list and kills all corresponding jobs
  • Loading branch information
alpatron committed Feb 26, 2024
1 parent a9e0be4 commit 4c2a46e
Show file tree
Hide file tree
Showing 3 changed files with 39 additions and 5 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -159,7 +159,7 @@ def build_hash_query(args:reqparse.ParseResult,hash_list_id:int):
return hash_query


def acquire_hash_list(id:str,name:Optional[str]) -> FcHashList:
def acquire_hash_list(id:str,name:Optional[str]=None) -> FcHashList:
"""
Common function for all "add hashes to hash list" endpoints. Given an id, returns an FcHashList object.
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -8,12 +8,14 @@

from flask import request, send_file
from flask_restx import Resource, abort
from flask_login import current_user

from src.api.apiConfig import api
from src.api.fitcrack.endpoints.hashlists.argumentsParser import make_empty_hash_list_parser, hash_list_parser, hash_list_add_hash_list_parser, hash_list_add_hash_file_parser, hash_list_hashes_parser_paginated, hash_list_hashes_parser, hash_list_add_protected_file_parser
from src.api.fitcrack.endpoints.hashlists.responseModels import empty_hash_list_created_model, page_of_hash_lists_model, hash_addition_result_model, page_of_hashes_model, hash_list_model_long
from src.api.fitcrack.endpoints.hashlists.functions import upload_hash_list, build_hash_query, acquire_hash_list
from src.api.fitcrack.endpoints.protectedFile.functions import addProtectedFile
from src.api.fitcrack.endpoints.job.functions import editable_jobs_ids, kill_job
from src.database import db
from src.database.models import FcHashList, FcHash

Expand Down Expand Up @@ -107,6 +109,7 @@ def delete(self, id):
db.session.commit()
return None, 204


@ns.route('/<int:id>/details')
class HashListHashes(Resource):
@api.expect(hash_list_hashes_parser_paginated)
Expand All @@ -129,6 +132,7 @@ def get(self,id:int):

return hash_page


@ns.route('/<int:id>/download')
class exportHashes(Resource):
@api.expect(hash_list_hashes_parser)
Expand Down Expand Up @@ -159,6 +163,7 @@ def get(self, id:int):
filename = hash_list.name + ".txt"
return send_file(hash_list_file, attachment_filename=filename, as_attachment=True, mimetype="text/plain")


@ns.route('/<id>/hashes')
class hashListUploadList(Resource):

Expand Down Expand Up @@ -230,3 +235,35 @@ def post(self,id:str):
result = addProtectedFile(args.file)

return upload_hash_list([result['hash']],hash_list,int(result['hash_type']),'fail_invalid',False)


@ns.route('/<int:id>/purge')
class hashListPurge(Resource):
@api.response(200, 'Hash list purged.')
@api.response(403, 'Hash list contains jobs that you do not have rights to; cannot perform purge.')
def post(self,id:int):
"""
Removes all cracked hashes from the hash list; it will be as if the hash list was created anew.
This also kills all jobs that are attached to the hash list.
This endpoint does check that the user has rights to all jobs that are part of the hash list.
"""
hash_list = acquire_hash_list(id)
jobs = hash_list.jobs
if not current_user.role.EDIT_ALL_JOBS: #Logic taken from job/multiJobOperation endpoint.
editable = editable_jobs_ids()
if not {job.id for job in jobs} <= set(editable):
abort(403, 'Hash list contains jobs that you do not have rights to; cannot perform purge.')

for job in jobs:
kill_job(job,db)

for job_hash in hash_list.hashes:
job_hash.result = None
job_hash.time_cracked = None

try:
db.session.commit()
except:
return 'Something went wrong.', 500

return 'Hash list purged', 200
Original file line number Diff line number Diff line change
Expand Up @@ -33,7 +33,7 @@
def stop_job(job):
job.status = status_to_code['finishing']

def kill_job(job, db):
def kill_job(job:FcJob, db):
id = job.id
# Job is stopped in Generator after sending BOINC commands
if (int(job.status) != status_to_code['running']) and (int(job.status) != status_to_code['finishing']):
Expand Down Expand Up @@ -70,9 +70,6 @@ def kill_job(job, db):
for item in graphData:
db.session.delete(item)

for job_hash in job.hash_list.hashes:
job_hash.result = None
job_hash.time_cracked = None

def start_job(job, db):
hosts = [ a[0] for a in db.session.query(Host.id).all() ]
Expand Down

0 comments on commit 4c2a46e

Please sign in to comment.