From 1cee58d712bec925d559968bb89ec4394de2f04c Mon Sep 17 00:00:00 2001 From: piotrj Date: Fri, 23 Feb 2024 18:28:33 +0100 Subject: [PATCH 01/20] CDE multithreaded (draft), optimisations --- src/core.py | 304 +++++++++++++++++++++++++++++++++++++------------- src/librer.py | 39 +++++-- src/record.py | 6 +- 3 files changed, 263 insertions(+), 86 deletions(-) diff --git a/src/core.py b/src/core.py index c2487ae..d64a51e 100644 --- a/src/core.py +++ b/src/core.py @@ -78,6 +78,11 @@ SEARCH_DAT_FILE = 'searchinfo' SIGNAL_FILE = 'signal' +CD_OK_ID = 0 +CD_INDEX_ID = 1 +CD_DATA_ID = 2 + + def get_dev_labes_dict(): lsblk = subprocess_run(['lsblk','-fJ'],capture_output = True,text = True) lsblk.dict = json_loads(lsblk.stdout) @@ -597,11 +602,11 @@ def prepare_customdata_pool_rec(self,print_func,abort_list,scan_like_data,parent #print('prepare_customdata_pool_rec',e,entry_name,size,is_dir,is_file,is_symlink,is_bind,has_files,mtime) print_func( ('error','prepare_customdata_pool_rec:{e},{entry_name},{size},{is_dir},{is_file},{is_symlink},{is_bind},{has_files},{mtime}') ) - def extract_customdata(self,print_func,abort_list): + def extract_customdata(self,print_func,abort_list,threads_quant=0): self_header = self.header scan_path = self_header.scan_path - print_func( ('info','custom data extraction ...'),True) + print_func( ('info',f'custom data extraction {threads_quant=}...'),True) self_header.files_cde_quant = 0 self_header.files_cde_size = 0 @@ -614,33 +619,63 @@ def extract_customdata(self,print_func,abort_list): print_func( ('cdeinit',files_cde_quant_sum,files_cde_size_sum),True) - customdata_helper={} - customdata_stats_size=defaultdict(int) customdata_stats_uniq=defaultdict(int) customdata_stats_refs=defaultdict(int) customdata_stats_time=defaultdict(float) customdata_stats_time_all=[0] + + if threads_quant==0: + threads_quant = cpu_count() + + customdata_pool_per_thread = defaultdict(list) + timeout_semi_list_per_thread = { thread_index:[None] for thread_index in range(threads_quant) } + self.killed = { thread_index:False for thread_index in range(threads_quant) } + + #per_thread_customdata_dict={} + thread_index = 0 + for val_tuple in self.customdata_pool.values(): + customdata_pool_per_thread[thread_index].append(val_tuple) + thread_index+=1 + thread_index %= threads_quant + #per_thread_customdata_dict[thread_index]={} + + #print(f'{thread_index=}') + + CD_OK_ID_loc = CD_OK_ID + CD_DATA_ID_loc = CD_DATA_ID + + all_threads_data_list={} + #files_cde_errors_quant={} + all_threads_files_cde_errors_quant = {} + + for thread_index in range(threads_quant): + all_threads_data_list[thread_index]=[0,0,0,0] + #files_cde_errors_quant[thread_index]=defaultdict(int) + all_threads_files_cde_errors_quant[thread_index]=defaultdict(int) + + time_start_all = perf_counter() + ############################################################# - def threaded_cde(timeout_semi_list): - cd_index=0 - self_customdata_append = self.customdata.append + def threaded_cde(timeout_semi_list,thread_index,thread_data_list,files_cde_errors_quant): - time_start_all = perf_counter() + #curr_per_thread_customdata_dict = per_thread_customdata_dict[thread_index] + #cd_index_per_thread = 0 aborted_string = 'Custom data extraction was aborted.' - files_cde_errors_quant = defaultdict(int) + #files_cde_errors_quant = defaultdict(int) files_cde_quant = 0 files_cde_size = 0 files_cde_size_extracted = 0 files_cde_errors_quant_all = 0 - for (scan_like_list,subpath,rule_nr,size) in self.customdata_pool.values(): + #for (scan_like_list,subpath,rule_nr,size) in self.customdata_pool.values(): + for (scan_like_list,subpath,rule_nr,size) in customdata_pool_per_thread[thread_index]: - self.killed=False + self.killed[thread_index]=False time_start = perf_counter() if abort_list[0] : #wszystko @@ -656,7 +691,7 @@ def threaded_cde(timeout_semi_list): command,command_info = get_command(executable,parameters,full_file_path,shell) #print_func( ('cde',f'{full_file_path} ({bytes_to_str(size)})',size,files_cde_size_extracted,files_cde_errors_quant_all,files_cde_quant,files_cde_quant_sum,files_cde_size,files_cde_size_sum) ) - print_func( ('cde',f'{full_file_path} ({bytes_to_str(size)})',size,files_cde_size_extracted,files_cde_errors_quant_all,files_cde_quant,files_cde_size) ) + #print_func( ('cde',f'{full_file_path} ({bytes_to_str(size)})',size,files_cde_size_extracted,files_cde_errors_quant_all,files_cde_quant,files_cde_size) ) timeout_val=time()+timeout if timeout else None ##################################### @@ -685,7 +720,7 @@ def threaded_cde(timeout_semi_list): timeout_semi_list[0] = None break - if self.killed: + if self.killed[thread_index]: output_list_append('Killed.') output = '\n'.join(output_list).strip() @@ -698,7 +733,7 @@ def threaded_cde(timeout_semi_list): time_end = perf_counter() customdata_stats_time[rule_nr]+=time_end-time_start - if returncode or self.killed or aborted: + if returncode or self.killed[thread_index] or aborted: files_cde_errors_quant[rule_nr]+=1 files_cde_errors_quant_all+=1 @@ -707,13 +742,112 @@ def threaded_cde(timeout_semi_list): files_cde_size += size files_cde_size_extracted += asizeof(output) - new_elem={} - new_elem['cd_ok']= bool(returncode==0 and not self.killed and not aborted) + thread_data_list[0]=files_cde_size_extracted + thread_data_list[1]=files_cde_errors_quant_all + thread_data_list[2]=files_cde_quant + thread_data_list[3]=files_cde_size + + new_elem={ + CD_OK_ID_loc:bool(returncode==0 and not self.killed[thread_index] and not aborted), + CD_DATA_ID_loc:(rule_nr,returncode,output) + } + + scan_like_list.append(new_elem) #dostep z wielu watkow + + sys.exit() #thread + + #timeout_semi_list = [None] + + cde_threads = {} + cde_thread_is_alive = {} + any_thread_alive = True + + for thread_index in range(threads_quant): + cde_threads[thread_index] = cde_thread = Thread(target = lambda : threaded_cde(timeout_semi_list_per_thread[thread_index],thread_index,all_threads_data_list[thread_index],all_threads_files_cde_errors_quant[thread_index]),daemon=True) + cde_thread.start() + + #rules + files_cde_errors_quant = defaultdict(int) + + while any_thread_alive: + any_thread_alive = False + now = time() + for thread_index in range(threads_quant): + #cde_thread = cde_threads[thread_index] + #cde_threads[thread_index] = cde_thread = Thread(target = lambda : threaded_cde(timeout_semi_list_per_thread[thread_index]),daemon=True) + #cde_thread.start() + #cde_thread_is_alive[thread_index] = cde_thread.is_alive + + if cde_threads[thread_index].is_alive(): + any_thread_alive = True + if timeout_semi_list_per_thread[thread_index][0]: + timeout_val,subprocess = timeout_semi_list_per_thread[thread_index][0] + if any(abort_list) or (timeout_val and now>timeout_val): + kill_subprocess(subprocess,print_func) + self.killed[thread_index]=True + abort_list[1]=False + #sleep(0.2) + #else: + #sleep(0.4) + + #print_func( ('cde',f'{full_file_path} ({bytes_to_str(size)})',size,files_cde_size_extracted,files_cde_errors_quant_all,files_cde_quant,files_cde_size) ) + + files_cde_size_extracted=0 + files_cde_errors_quant_all=0 + files_cde_quant=0 + files_cde_size=0 + + for thread_index in range(threads_quant): + thread_data_list = all_threads_data_list[thread_index] + + files_cde_size_extracted+=thread_data_list[0] + files_cde_errors_quant_all+=thread_data_list[1] + files_cde_quant+=thread_data_list[2] + files_cde_size+=thread_data_list[3] + + for rule_nr,val in all_threads_files_cde_errors_quant[thread_index].items(): + files_cde_errors_quant[rule_nr] += val + + + print_func( ('cde',f'(multithread run)',0,files_cde_size_extracted,files_cde_errors_quant_all,files_cde_quant,files_cde_size) ) + + sleep(0.4) + + + + time_end_all = perf_counter() + + self_header.files_cde_errors_quant=files_cde_errors_quant + self_header.files_cde_errors_quant_all = files_cde_errors_quant_all - cd_field=(rule_nr,returncode,output) - if cd_field not in customdata_helper: - customdata_helper[cd_field]=cd_index - new_elem['cd_index']=cd_index + self_header.files_cde_quant = files_cde_quant + self_header.files_cde_size = files_cde_size + self_header.files_cde_size_extracted = files_cde_size_extracted + + customdata_stats_time_all[0]=time_end_all-time_start_all + + + + + + print_func( ('info','custom data extraction finished.'),True) + + customdata_helper={} + cd_index=0 + self_customdata_append = self.customdata.append + + CD_INDEX_ID_loc = CD_INDEX_ID + for thread_index in range(threads_quant): + for (scan_like_list,subpath,rule_nr,size) in customdata_pool_per_thread[thread_index]: + new_elem = scan_like_list[-1] + cd_field = new_elem[CD_DATA_ID_loc] + + try: + used_cd_index = customdata_helper[cd_field] + new_elem[CD_INDEX_ID_loc]=used_cd_index + customdata_stats_refs[rule_nr]+=1 + except: + customdata_helper[cd_field] = new_elem[CD_INDEX_ID_loc] = cd_index cd_index+=1 self_customdata_append(cd_field) @@ -721,46 +855,32 @@ def threaded_cde(timeout_semi_list): customdata_stats_size[rule_nr]+=asizeof(cd_field) customdata_stats_uniq[rule_nr]+=1 customdata_stats_refs[rule_nr]+=1 - else: - new_elem['cd_index']=customdata_helper[cd_field] - customdata_stats_refs[rule_nr]+=1 - #if do_crc: - # new_elem['crc_val']=crc_val - scan_like_list.append(new_elem) - time_end_all = perf_counter() + #if cd_field not in customdata_helper: + # customdata_helper[cd_field]=cd_index + # new_elem[CD_INDEX_ID_loc] = cd_index + # new_elem['cd_index']=cd_index + # cd_index+=1 - self_header.files_cde_errors_quant=files_cde_errors_quant - self_header.files_cde_errors_quant_all = files_cde_errors_quant_all + # self_customdata_append(cd_field) - self_header.files_cde_quant = files_cde_quant - self_header.files_cde_size = files_cde_size - self_header.files_cde_size_extracted = files_cde_size_extracted + # customdata_stats_size[rule_nr]+=asizeof(cd_field) + # customdata_stats_uniq[rule_nr]+=1 + # customdata_stats_refs[rule_nr]+=1 + #else: + # new_elem['cd_index']=customdata_helper[cd_field] + # new_elem[CD_INDEX_ID_loc]=customdata_helper[cd_field] + # customdata_stats_refs[rule_nr]+=1 - customdata_stats_time_all[0]=time_end_all-time_start_all - sys.exit() #thread - timeout_semi_list = [None] + print_func( ('info','custom data post-processing finished.'),True) - cde_thread = Thread(target = lambda : threaded_cde(timeout_semi_list),daemon=True) - cde_thread.start() - cde_thread_is_alive = cde_thread.is_alive + for thread_index in range(threads_quant): + cde_threads[thread_index].join() - while cde_thread_is_alive(): - if timeout_semi_list[0]: - timeout_val,subprocess = timeout_semi_list[0] - if any(abort_list) or (timeout_val and time()>timeout_val): - kill_subprocess(subprocess,print_func) - self.killed=True - abort_list[1]=False - sleep(0.2) - else: - sleep(0.4) - - print_func( ('info','custom data extraction finished.'),True) - - cde_thread.join() + #print(f'{customdata_helper=}') + #print(f'{self.customdata=}') del self.customdata_pool del customdata_helper @@ -834,19 +954,21 @@ def tupelize_rec(self,scan_like_data,results_queue_put): cd_ok = False has_crc = False else: - if 'cd_ok' in info_dict: - cd_ok = info_dict['cd_ok'] - cd_index = info_dict['cd_index'] + #if 'cd_ok' in info_dict: + if CD_OK_ID in info_dict: + cd_ok = info_dict[CD_OK_ID] + cd_index = info_dict[CD_INDEX_ID] has_cd = True else: cd_ok = False has_cd = False - if 'crc_val' in info_dict: - crc_val = info_dict['crc_val'] - has_crc = True - else: - has_crc = False + #if 'crc_val' in info_dict: + # crc_val = info_dict['crc_val'] + # has_crc = True + #else: + # has_crc = False + has_crc = False code_new = LUT_encode_loc[ (is_dir,is_file,is_symlink,is_bind,has_cd,has_files,cd_ok,has_crc,False,False) ] @@ -961,6 +1083,17 @@ def find_items(self, self_customdata = self.customdata + name_func_to_call_bool = bool(name_func_to_call) + cd_func_to_call_bool = bool(cd_func_to_call) + + size_min_bool = bool(size_min) + size_max_bool = bool(size_max) + timestamp_min_bool = bool(timestamp_min) + timestamp_max_bool = bool(timestamp_max) + + name_func_to_call_res_cache = {} + cd_func_to_call_res_cache = {} + while search_list: filestructure,parent_path_components = search_list_pop() @@ -995,8 +1128,17 @@ def find_items(self, if is_dir : if when_folder_may_apply: #katalog moze spelniac kryteria naazwy pliku ale nie ma rozmiaru i custom data - if name_func_to_call: - if name_func_to_call(name): + if name_func_to_call_bool: + try: + name_func_to_call_res = name_func_to_call_res_cache[name_nr] + except: + try: + name_func_to_call_res = name_func_to_call_res_cache[name_nr] = name_func_to_call(name) + except Exception as e: + print_info_fn(f'find_items(1a):{e}' ) + continue + + if name_func_to_call_res: print_func( (search_progress,size,mtime,*next_level) ) if sub_data: @@ -1008,26 +1150,31 @@ def find_items(self, if size<0: continue - if size_min: + if size_min_bool: if sizesize_max: continue if use_timestamp: - if timestamp_min: + if timestamp_min_bool: if mtimetimestamp_max: continue if name_func_to_call: try: - if not name_func_to_call(name): + name_func_to_call_res = name_func_to_call_res_cache[name_nr] + except: + try: + name_func_to_call_res = name_func_to_call_res_cache[name_nr] = name_func_to_call(name) + except Exception as e: + print_info_fn(f'find_items(1b):{e}' ) continue - except Exception as e: - print_info_fn(f'find_items(1):{e}' ) + + if not name_func_to_call_res: continue #oczywistosc @@ -1043,12 +1190,17 @@ def find_items(self, else: continue - if cd_func_to_call: + if cd_func_to_call_bool: try: - if not cd_func_to_call(cd_data): + cd_func_to_call_res = cd_func_to_call_res_cache[cd_nr] + except: + try: + cd_func_to_call_res = cd_func_to_call_res_cache[cd_nr] = cd_func_to_call(cd_data) + except Exception as e: + print_info_fn(f'find_items(2):{e}' ) continue - except Exception as e: - print_info_fn(f'find_items(2):{e}' ) + + if not cd_func_to_call_res: continue else: @@ -1794,8 +1946,8 @@ def wii_data_to_scan_like_data(self,path_list,curr_dict_ref,scan_like_data,custo if cd: new_elem={} - new_elem['cd_index']=cd_index - new_elem['cd_ok']=True + new_elem[CD_INDEX_ID]=cd_index + new_elem[CD_OK_ID]=True temp_list_ref.append(new_elem) @@ -2183,6 +2335,8 @@ def create_new_record(self,temp_dir,update_callback,group=None): self.stdout_files_cde_size_sum=0 def threaded_run(command,results_semi_list,info_semi_list,processes_semi_list): + command_str = ' '.join(command) + print(f'create_new_record - threaded_run {command_str=}') try: subprocess = uni_popen(command,stdin=PIPE) except Exception as re: diff --git a/src/librer.py b/src/librer.py index 70b74d2..a8449f8 100644 --- a/src/librer.py +++ b/src/librer.py @@ -26,7 +26,7 @@ # #################################################################################### -from os import sep,system,getcwd,name as os_name +from os import sep,system,getcwd,name as os_name,cpu_count from os.path import abspath,normpath,dirname,join as path_join,isfile as path_isfile from gc import disable as gc_disable, enable as gc_enable,collect as gc_collect @@ -177,9 +177,9 @@ def read(self): line_list2 = ['0','*.txt,*.nfo','1','256kB','more %','','1','5','0'] line_list3 = ['0','*.pls,*.m3u,*.cue','','','more %','','1','5','0'] line_list4 = ['0','*.mp3,*.mp4,*.mpeg,*.mkv','','','ffprobe.exe -hide_banner %','','1','5','0'] - line_list4a = ['0','*.mp3,*.mp4,*.mpeg,*.mkv','','','MediaInfo.exe','%','0','5','0'] + line_list4a = ['0','*.mp3,*.mp4,*.mpeg,*.mkv','','','MediaInfo.exe','%','0','5','0'] line_list5 = ['0','*.jpg','','','exiftool.exe','%','0','5','0'] - line_list5a = ['0','*.exe','','','exiftool.exe','%','0','5','0'] + line_list5a = ['0','*.exe','','','exiftool.exe','%','0','5','0'] cde_sklejka_list=[line_list1,line_list1a,line_list2,line_list3,line_list4,line_list4a,line_list5,line_list5a] else: @@ -1046,6 +1046,9 @@ def use_checkbutton_mod(self,e,do_configure_scan_button=True): def scan_comp_set(self): self.scan_compr_var_int.set(int(self.scan_compr_var.get())) + def scan_threads_set(self): + self.scan_threads_var_int.set(int(self.scan_threads_var.get())) + scan_dialog_created = False @restore_status_line @block @@ -1124,17 +1127,31 @@ def get_scan_dialog(self): self.scan_compr_var = IntVar() self.scan_compr_var_int = IntVar() + self.scan_threads_var = IntVar() + self.scan_threads_var_int = IntVar() + self.scan_compr_var.set(9) self.scan_compr_var_int.set(9) - (compr_in_label := Label(scan_options_frame, textvariable=self.scan_compr_var_int,width=3,bg=self.bg_color,relief='groove',borderwidth=2)).pack(side='right',padx=2,pady=2) - (compr_scale := Scale(scan_options_frame, variable=self.scan_compr_var, orient='horizontal',from_=0, to=22,command=lambda x : self.scan_comp_set(),style="TScale",length=200)).pack(fill='x',side='right',expand=1,padx=2) + self.scan_threads_var.set(1) + self.scan_threads_var_int.set(1) + (compr_label := Label(scan_options_frame, text='Compression:',bg=self.bg_color,relief='flat')).pack(side='left',padx=2,pady=2) + (compr_scale := Scale(scan_options_frame, variable=self.scan_compr_var, orient='horizontal',from_=0, to=22,command=lambda x : self.scan_comp_set(),style="TScale",length=160)).pack(fill='x',side='left',expand=1,padx=2) + (compr_in_label := Label(scan_options_frame, textvariable=self.scan_compr_var_int,width=3,bg=self.bg_color,relief='groove',borderwidth=2)).pack(side='left',padx=2,pady=2) compr_tooltip = "Data record internal compression. A higher value\nmeans a smaller file and longer compression time.\nvalues above 20 may result in extremely long compression\nand memory consumption. The default value is 9." self.widget_tooltip(compr_scale,compr_tooltip) self.widget_tooltip(compr_label,compr_tooltip) self.widget_tooltip(compr_in_label,compr_tooltip) + (threads_in_label := Label(scan_options_frame, textvariable=self.scan_threads_var_int,width=3,bg=self.bg_color,relief='groove',borderwidth=2)).pack(side='right',padx=2,pady=2) + (threads_scale := Scale(scan_options_frame, variable=self.scan_threads_var, orient='horizontal',from_=0, to=cpu_count(),command=lambda x : self.scan_threads_set(),style="TScale",length=160)).pack(fill='x',side='right',expand=1,padx=2) + (threads_label := Label(scan_options_frame, text='CDE Threads:',bg=self.bg_color,relief='flat')).pack(side='left',padx=2,pady=2) + threads_tooltip = "Number of threads used to extract Custom Data\n\n0 - all available CPU cores\n1 - single thread (default value)\n\nThe optimal value depends on the CPU cores performace,\nIO subsystem performance and Custom Data Extractor specifics.\n\nConsider limitations of parallel CDE execution e.g.\nnumber of licenses of used software,\nused working directory, needed memory etc." + self.widget_tooltip(threads_scale,threads_tooltip) + self.widget_tooltip(threads_label,threads_tooltip) + self.widget_tooltip(threads_in_label,threads_tooltip) + self.single_device=BooleanVar() single_device_button = Checkbutton(dialog.area_buttons,text='one device mode',variable=self.single_device) single_device_button.pack(side='right',padx=2,pady=2) @@ -3521,8 +3538,10 @@ def scan_wrapper(self): self.scanning_in_progress=True compression_level = self.scan_compr_var_int.get() + threads = self.scan_threads_var_int.get() + try: - if self.scan(compression_level,group): + if self.scan(compression_level,threads,group): self.scan_dialog_hide_wrapper() except Exception as e: l_error(f'scan_wraper: {e}') @@ -3540,7 +3559,7 @@ def scan_dialog_hide_wrapper(self): @restore_status_line @logwrapper - def scan(self,compression_level,group=None): + def scan(self,compression_level,threads,group=None): path_to_scan_from_entry = abspath(self.path_to_scan_entry_var.get()) if not path_to_scan_from_entry: @@ -3691,7 +3710,11 @@ def scan(self,compression_level,group=None): try: with open(sep.join([self.temp_dir,SCAN_DAT_FILE]), "wb") as f: - f.write(ZstdCompressor(level=8,threads=1).compress(dumps([new_label,path_to_scan_from_entry,check_dev,cde_list]))) + f.write(ZstdCompressor(level=8,threads=1).compress(dumps([new_label,path_to_scan_from_entry,check_dev,compression_level,threads,cde_list]))) + + #debug + #with open(sep.join(['/home/xy/private/essential/librer-devel/tmp1',SCAN_DAT_FILE]), "wb") as f: + # f.write(ZstdCompressor(level=8,threads=1).compress(dumps([new_label,path_to_scan_from_entry,check_dev,compression_level,threads,cde_list]))) except Exception as e: print(e) else: diff --git a/src/record.py b/src/record.py index 66c84c2..0915b0e 100644 --- a/src/record.py +++ b/src/record.py @@ -334,7 +334,7 @@ def proper_exit(code): with open(sep.join([comm_dir,SCAN_DAT_FILE]),"rb") as f: create_list = loads(ZstdDecompressor().decompress(f.read())) - label,path_to_scan,check_dev,cde_list = create_list + label,path_to_scan,check_dev,compression_level,threads,cde_list = create_list except Exception as e: print_info(f'create error:{e}') proper_exit(2) @@ -351,14 +351,14 @@ def proper_exit(code): if cde_list : try: print_func(['stage',1],True) - new_record.extract_customdata(print_func,abort_list) + new_record.extract_customdata(print_func,abort_list,threads_quant=threads) except Exception as cde: print_info(f'cde error:{cde}') print_func(['stage',2],True) new_record.pack_data(print_func) print_func(['stage',3],True) - new_record.save(print_func,file_path=args.file,compression_level=9) + new_record.save(print_func,file_path=args.file,compression_level=compression_level) print_func(['stage',4],True) ##################################################################### From da3fbbea6360bb70879e948f67b32a23ad596f83 Mon Sep 17 00:00:00 2001 From: piotrj Date: Fri, 23 Feb 2024 18:28:50 +0100 Subject: [PATCH 02/20] layout minor mod --- src/dialogs.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/dialogs.py b/src/dialogs.py index 281d87f..cecccfc 100644 --- a/src/dialogs.py +++ b/src/dialogs.py @@ -384,7 +384,7 @@ def __init__(self,parent,icon,bg_color,pre_show=None,post_close=None,min_width=1 self.text_search_pool_index=0 self.find_lab=Label(self.area_mark) - self.find_lab.pack(side='right', anchor='e',padx=5,pady=5) + self.find_lab.pack(side='right', anchor='e',padx=5,pady=5,expand='yes', fill='both') try: self.find_lab.configure(text='Mark:',compound='left') From 3bc7ac8584de289a3df4bc3b934d26b6139a03e7 Mon Sep 17 00:00:00 2001 From: piotrj Date: Fri, 23 Feb 2024 18:33:42 +0100 Subject: [PATCH 03/20] nuitka update --- requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements.txt b/requirements.txt index 1d7fb3b..e09884f 100644 --- a/requirements.txt +++ b/requirements.txt @@ -5,4 +5,4 @@ psutil==5.9.8 pympler==1.0.1 ciso8601==2.3.1 pywin32==306; sys.platform == 'win32' -nuitka==2.0.2 +nuitka==2.0.3 From 06fccb7da28b5cebc9a28c6fbc934f9e1d6a211a Mon Sep 17 00:00:00 2001 From: piotrj Date: Fri, 23 Feb 2024 20:21:02 +0100 Subject: [PATCH 04/20] better templates, CS checkbutton, RC --- src/dialogs.py | 6 +++++- src/librer.py | 24 ++++++++++++++---------- 2 files changed, 19 insertions(+), 11 deletions(-) diff --git a/src/dialogs.py b/src/dialogs.py index cecccfc..9f51111 100644 --- a/src/dialogs.py +++ b/src/dialogs.py @@ -372,6 +372,10 @@ def __init__(self,parent,icon,bg_color,pre_show=None,post_close=None,min_width=1 self.find_prev_butt=Button(self.area_mark, command=lambda : self.find_next_prev(-1), width=1) self.find_prev_butt.pack(side='right', anchor='w',padx=2,pady=5,fill='both') + self.find_cs_var=BooleanVar() + self.find_cs=Checkbutton(self.area_mark,text='C.S.',variable=self.find_cs_var, command=lambda : self.find_key_binding() ) + self.find_cs.pack(side='right', anchor='e',padx=5,pady=5,expand='yes', fill='both') + self.find_var=StringVar() self.find_entry=Entry(self.area_mark, textvariable=self.find_var, width=22) self.find_entry.pack(side='right', anchor='w',padx=2,pady=5,fill='both') @@ -459,7 +463,7 @@ def find_key_binding(self,event=None): len_search_str = len(search_str) while True: - start_index = self_text_search(search_str, start_index, 'end') + start_index = self_text_search(search_str, start_index, 'end',nocase=0 if self.find_cs_var.get() else 1) if not start_index: break end_index = f"{start_index}+{len_search_str}c" diff --git a/src/librer.py b/src/librer.py index a8449f8..ed21587 100644 --- a/src/librer.py +++ b/src/librer.py @@ -172,21 +172,21 @@ def read(self): #use,mask,smin,smax,exe,pars,shell,timeout,crc if windows: - line_list1 = ['0','*.rar,*.zip,*.xz,*.z,*.gzip,*.iso','','','C:\\Program Files\\WinRAR\\UnRAR.exe','l %','0','5','0'] - line_list1a = ['0','*.7z,*.zip,*.bzip2,*.xz,*.z,*.gzip,*.iso,*.rar','','','C:\\Program Files\\7-Zip\\7z.exe l % | more +12','','1','5','0'] + line_list1 = ['0','*.rar,*.zip,*.cab,*.arj,*.lzh,*.uue,*.z,*.bz2,*.gzip,*.iso,*.7z','','','C:\\Program Files\\WinRAR\\UnRAR.exe','l %','0','10','0'] + line_list1a = ['0','*.7z,*.zip,*.bz2,*.xz,*.z,*.gzip,*.iso,*.rar,*.arj,*.cab,*.lzh,*.lzma,*.vdi,*.vhd','','','C:\\Program Files\\7-Zip\\7z.exe','l %','0','10','0'] line_list2 = ['0','*.txt,*.nfo','1','256kB','more %','','1','5','0'] - line_list3 = ['0','*.pls,*.m3u,*.cue','','','more %','','1','5','0'] - line_list4 = ['0','*.mp3,*.mp4,*.mpeg,*.mkv','','','ffprobe.exe -hide_banner %','','1','5','0'] + line_list3 = ['0','*.pls,*.m3u,*.cue,*.plp,*.m3u8,*.mpcpl','','','more %','','1','5','0'] + line_list4 = ['0','*.aac,*.ac3,*.aiff,*.dts,*.dtshd,*.flac,*.h261,*.h263,*.h264,*.iff,*.m4v,*.matroska,*.mpc,*.mp3,*.mp4,*.mpeg,*.mkv,*.ts,*.ogg,*.wav,*.wv','','','ffprobe.exe','-hide_banner %','0','5','0'] line_list4a = ['0','*.mp3,*.mp4,*.mpeg,*.mkv','','','MediaInfo.exe','%','0','5','0'] line_list5 = ['0','*.jpg','','','exiftool.exe','%','0','5','0'] line_list5a = ['0','*.exe','','','exiftool.exe','%','0','5','0'] cde_sklejka_list=[line_list1,line_list1a,line_list2,line_list3,line_list4,line_list4a,line_list5,line_list5a] else: - line_list1 = ['0','*.7z,*.zip,*.bzip2,*.xz,*.z,*.gzip,*.iso,*.rar','','','7z l % | tail -n+10','','1','5','0'] + line_list1 = ['0','*.7z,*.zip,*.bz2,*.xz,*.z,*.gzip,*.iso,*.rar,*.arj,*.cab,*.lzh,*.lzma,*.vdi,*.vhd','','','7z','l %','0','10','0'] line_list2 = ['0','*.txt,*.nfo','1','256kB','cat','%','0','5','0'] - line_list3 = ['0','*.pls,*.m3u,*.cue','','','cat','%','0','5','0'] - line_list4 = ['0','*.mp3,*.mp4,*.mpeg','','','ffprobe','-hide_banner %','0','5','0'] + line_list3 = ['0','*.pls,*.m3u,*.cue,*.plp,*.m3u8,*.mpcpl','','','cat','%','0','5','0'] + line_list4 = ['0','*.aac,*.ac3,*.aiff,*.dts,*.dtshd,*.flac,*.h261,*.h263,*.h264,*.iff,*.m4v,*.matroska,*.mpc,*.mp3,*.mp4,*.mpeg,*.mkv,*.ts,*.ogg,*.wav,*.wv','','','ffprobe','-hide_banner %','0','5','0'] line_list5 = ['0','*.jpg','','','exif','%','0','5','0'] cde_sklejka_list=[line_list1,line_list2,line_list3,line_list4,line_list5] @@ -515,7 +515,7 @@ def __init__(self,cwd): (status_frame := Frame(self_main,bg=self.bg_color)).pack(side='bottom', fill='both') - self.status_records_all=Label(status_frame,image=self.ico_record,text='--',width=200,borderwidth=2,bg=self.bg_color,relief='groove',anchor='w') + self.status_records_all=Label(status_frame,image=self.ico_records_all,text='--',width=200,borderwidth=2,bg=self.bg_color,relief='groove',anchor='w') self.status_records_all.pack(fill='x',expand=0,side='left') self.status_records_all_configure = lambda x : self.status_records_all.configure(image = self.ico_records_all, text = x,compound='left') self.widget_tooltip(self.status_records_all,'All records in repository') @@ -1313,8 +1313,12 @@ def fix_text_dialog(self,dialog): dialog.find_prev_butt.configure(image=self.ico_left) dialog.find_next_butt.configure(image=self.ico_right) - self.widget_tooltip(dialog.find_prev_butt,'Find Prev (Shift+F3)') - self.widget_tooltip(dialog.find_next_butt,'Find Next (F3)') + self.widget_tooltip(dialog.find_prev_butt,'Select Prev (Shift+F3)') + self.widget_tooltip(dialog.find_next_butt,'Select Next (F3)') + self.widget_tooltip(dialog.find_cs,'Case Sensitive') + self.widget_tooltip(dialog.find_info_lab,'index of the selected search result / search results total ') + + dialog.find_cs_var.set(False if windows else True) progress_dialog_on_scan_created = False @restore_status_line From 1ae954fd579a61bc0913509d53e816126c42278d Mon Sep 17 00:00:00 2001 From: piotrj Date: Sat, 24 Feb 2024 14:34:59 +0100 Subject: [PATCH 05/20] parallel CDE improved --- src/core.py | 326 +++++++++++++++++++++++--------------------------- src/record.py | 26 ++-- 2 files changed, 162 insertions(+), 190 deletions(-) diff --git a/src/core.py b/src/core.py index d64a51e..abd85ae 100644 --- a/src/core.py +++ b/src/core.py @@ -82,7 +82,6 @@ CD_INDEX_ID = 1 CD_DATA_ID = 2 - def get_dev_labes_dict(): lsblk = subprocess_run(['lsblk','-fJ'],capture_output = True,text = True) lsblk.dict = json_loads(lsblk.stdout) @@ -232,20 +231,20 @@ def send_signal(subproc,temp_dir,kind=0): except Exception as se: print(f'subprocess signal error: {se}') -def kill_subprocess(subproc,print_func=print): +def kill_subprocess( subproc,print_func=lambda x,force=None : print(x) ): try: pid = subproc.pid if windows: kill_cmd = ['taskkill', '/F', '/T', '/PID', str(pid)] - print_func( ('info',f'killing pid: {pid}') ) + print_func( ('info',f'killing pid: {pid}'),True ) subprocess_run(kill_cmd) else: - print_func( ('info',f'killing process group of pid {pid}') ) + print_func( ('info',f'killing process group of pid {pid}'),True ) killpg(getpgid(pid), SIGTERM) except Exception as ke: - print_func( ('error',f'kill_subprocess error: {ke}') ) + print_func( ('error',f'kill_subprocess error: {ke}'),True ) def compress_with_header_update(header,data,compression,datalabel,zip_file): t0 = perf_counter() @@ -283,7 +282,7 @@ def __init__(self,label='',scan_path=''): self.files_cde_quant = 0 self.files_cde_quant_sum = 0 - self.files_cde_size_extracted = 0 + self.cde_size_extracted = 0 self.items_names=0 self.items_cd=0 @@ -293,7 +292,7 @@ def __init__(self,label='',scan_path=''): self.cde_list = [] self.files_cde_errors_quant = {} - self.files_cde_errors_quant_all = 0 + self.cde_errors_quant_all = 0 self.cde_stats_time_all = 0 self.zipinfo = {} @@ -358,6 +357,7 @@ def load(self,file_path): return False + label_of_datalabel = {'filestructure':'Filestructure','filenames':'Filenames','customdata':'Custom Data','header':'Header'} def save(self,print_func,file_path=None,compression_level=9): if file_path: filename = basename(normpath(file_path)) @@ -371,9 +371,10 @@ def save(self,print_func,file_path=None,compression_level=9): self.header.compression_level = compression_level + self_label_of_datalabel = self.label_of_datalabel with ZipFile(file_path, "w") as zip_file: def compress_with_header_update_wrapp(data,datalabel): - print_func(['save',f'compressing {datalabel}'],True) + print_func(('save',f'Compressing {self_label_of_datalabel[datalabel]} ({bytes_to_str(asizeof(data))})'),True) compress_with_header_update(self.header,data,compression_level,datalabel,zip_file) compress_with_header_update_wrapp(self.filestructure,'filestructure') @@ -405,7 +406,7 @@ def compress_with_header_update_wrapp(data,datalabel): self.prepare_info() - print_func(['save','finished'],True) + print_func(('save','finished'),True) def scan_rec(self,print_func,abort_list,path, scan_like_data,filenames_set,check_dev=True,dev_call=None) : if any(abort_list) : @@ -448,7 +449,7 @@ def scan_rec(self,print_func,abort_list,path, scan_like_data,filenames_set,check mtime = int(stat_res.st_mtime) dev=stat_res.st_dev except Exception as e: - print_func( ('error',f'stat {entry_name} error:{e}') ) + print_func( ('error',f'stat {entry_name} error:{e}'),True ) #size -1 <=> error, dev,in ==0 is_bind = False size=-1 @@ -462,7 +463,7 @@ def scan_rec(self,print_func,abort_list,path, scan_like_data,filenames_set,check if dev_call: if dev_call!=dev: #self.log.info('devices mismatch:%s %s %s %s' % (path,entry_name,dev_call,dev) ) - print_func( ('info',f'devices mismatch:{path},{entry_name},{dev_call},{dev}') ) + print_func( ('info',f'devices mismatch:{path},{entry_name},{dev_call},{dev}'),True ) is_bind=True else: dev_call=dev @@ -507,7 +508,7 @@ def scan_rec(self,print_func,abort_list,path, scan_like_data,filenames_set,check print_func( ('scan',self_header.sum_size,self_header.quant_files,self_header.quant_folders,path) ) except Exception as e: - print_func( ('error', f'scandir {path} error:{e}') ) + print_func( ('error', f'scandir {path} error:{e}'),True ) return (local_folder_size_with_subtree+local_folder_size,subitems) @@ -537,7 +538,7 @@ def scan(self,print_func,abort_list,cde_list,check_dev=True): self.customdata_pool_index = 0 if cde_list: - print_func( ('info','estimating files pool for custom data extraction') ) + print_func( ('info','Estimating files pool for custom data extraction.'),True ) self.prepare_customdata_pool_rec(print_func,abort_list,self.scan_data,[]) def prepare_customdata_pool_rec(self,print_func,abort_list,scan_like_data,parent_path): @@ -600,84 +601,74 @@ def prepare_customdata_pool_rec(self,print_func,abort_list,scan_like_data,parent except Exception as e: #self.log.error('prepare_customdata_pool_rec error::%s',e ) #print('prepare_customdata_pool_rec',e,entry_name,size,is_dir,is_file,is_symlink,is_bind,has_files,mtime) - print_func( ('error','prepare_customdata_pool_rec:{e},{entry_name},{size},{is_dir},{is_file},{is_symlink},{is_bind},{has_files},{mtime}') ) + print_func( ('error','prepare_customdata_pool_rec:{e},{entry_name},{size},{is_dir},{is_file},{is_symlink},{is_bind},{has_files},{mtime}'),True ) - def extract_customdata(self,print_func,abort_list,threads_quant=0): + def extract_customdata(self,print_func,abort_list,threads=0): self_header = self.header scan_path = self_header.scan_path - print_func( ('info',f'custom data extraction {threads_quant=}...'),True) + print_func( ('info',f'custom data extraction {threads=}...'),True) self_header.files_cde_quant = 0 self_header.files_cde_size = 0 - self_header.files_cde_size_extracted = 0 + self_header.cde_size_extracted = 0 self_header.files_cde_errors_quant = defaultdict(int) - self_header.files_cde_errors_quant_all = 0 + self_header.cde_errors_quant_all = 0 + self_header.threads = threads + files_cde_quant_sum = self_header.files_cde_quant_sum = len(self.customdata_pool) files_cde_size_sum = self_header.files_cde_size_sum cde_list = self.header.cde_list print_func( ('cdeinit',files_cde_quant_sum,files_cde_size_sum),True) - customdata_stats_size=defaultdict(int) - customdata_stats_uniq=defaultdict(int) - customdata_stats_refs=defaultdict(int) - customdata_stats_time=defaultdict(float) - - customdata_stats_time_all=[0] - - if threads_quant==0: - threads_quant = cpu_count() + if threads==0: + threads = cpu_count() customdata_pool_per_thread = defaultdict(list) - timeout_semi_list_per_thread = { thread_index:[None] for thread_index in range(threads_quant) } - self.killed = { thread_index:False for thread_index in range(threads_quant) } + timeout_semi_list_per_thread = { thread_index:[None] for thread_index in range(threads) } + self.killed = { thread_index:False for thread_index in range(threads) } - #per_thread_customdata_dict={} thread_index = 0 for val_tuple in self.customdata_pool.values(): customdata_pool_per_thread[thread_index].append(val_tuple) thread_index+=1 - thread_index %= threads_quant - #per_thread_customdata_dict[thread_index]={} - - #print(f'{thread_index=}') + thread_index %= threads CD_OK_ID_loc = CD_OK_ID CD_DATA_ID_loc = CD_DATA_ID all_threads_data_list={} - #files_cde_errors_quant={} all_threads_files_cde_errors_quant = {} + all_threads_customdata_stats_time = {} - for thread_index in range(threads_quant): + for thread_index in range(threads): all_threads_data_list[thread_index]=[0,0,0,0] - #files_cde_errors_quant[thread_index]=defaultdict(int) all_threads_files_cde_errors_quant[thread_index]=defaultdict(int) + all_threads_customdata_stats_time[thread_index]=defaultdict(float) time_start_all = perf_counter() + single_thread = bool(threads==1) ############################################################# - def threaded_cde(timeout_semi_list,thread_index,thread_data_list,files_cde_errors_quant): - - #curr_per_thread_customdata_dict = per_thread_customdata_dict[thread_index] - #cd_index_per_thread = 0 + def threaded_cde(timeout_semi_list,thread_index,thread_data_list,cde_errors_quant,cde_stats_time): aborted_string = 'Custom data extraction was aborted.' - #files_cde_errors_quant = defaultdict(int) - files_cde_quant = 0 files_cde_size = 0 - files_cde_size_extracted = 0 + cde_size_extracted = 0 + + cde_errors_quant_all = 0 + + perf_counter_loc = perf_counter + self_killed = self.killed - files_cde_errors_quant_all = 0 - #for (scan_like_list,subpath,rule_nr,size) in self.customdata_pool.values(): for (scan_like_list,subpath,rule_nr,size) in customdata_pool_per_thread[thread_index]: - self.killed[thread_index]=False + self_killed[thread_index]=False - time_start = perf_counter() + time_start = perf_counter_loc() if abort_list[0] : #wszystko returncode=200 output = aborted_string @@ -690,8 +681,8 @@ def threaded_cde(timeout_semi_list,thread_index,thread_data_list,files_cde_error full_file_path = normpath(abspath(sep.join([scan_path,subpath]))).replace('/',sep) command,command_info = get_command(executable,parameters,full_file_path,shell) - #print_func( ('cde',f'{full_file_path} ({bytes_to_str(size)})',size,files_cde_size_extracted,files_cde_errors_quant_all,files_cde_quant,files_cde_quant_sum,files_cde_size,files_cde_size_sum) ) - #print_func( ('cde',f'{full_file_path} ({bytes_to_str(size)})',size,files_cde_size_extracted,files_cde_errors_quant_all,files_cde_quant,files_cde_size) ) + if single_thread: + print_func( ('cde',f'{full_file_path} ({bytes_to_str(size)})',size,cde_size_extracted,cde_errors_quant_all,files_cde_quant,files_cde_size) ) timeout_val=time()+timeout if timeout else None ##################################### @@ -720,7 +711,7 @@ def threaded_cde(timeout_semi_list,thread_index,thread_data_list,files_cde_error timeout_semi_list[0] = None break - if self.killed[thread_index]: + if self_killed[thread_index]: output_list_append('Killed.') output = '\n'.join(output_list).strip() @@ -730,25 +721,21 @@ def threaded_cde(timeout_semi_list,thread_index,thread_data_list,files_cde_error ##################################### - time_end = perf_counter() - customdata_stats_time[rule_nr]+=time_end-time_start + cde_stats_time[rule_nr]+=perf_counter_loc()-time_start - if returncode or self.killed[thread_index] or aborted: - files_cde_errors_quant[rule_nr]+=1 - files_cde_errors_quant_all+=1 + if returncode or self_killed[thread_index] or aborted: + cde_errors_quant[rule_nr]+=1 + cde_errors_quant_all+=1 if not aborted: files_cde_quant += 1 files_cde_size += size - files_cde_size_extracted += asizeof(output) + cde_size_extracted += asizeof(output) - thread_data_list[0]=files_cde_size_extracted - thread_data_list[1]=files_cde_errors_quant_all - thread_data_list[2]=files_cde_quant - thread_data_list[3]=files_cde_size + thread_data_list[0:4]=[cde_size_extracted,cde_errors_quant_all,files_cde_quant,files_cde_size] new_elem={ - CD_OK_ID_loc:bool(returncode==0 and not self.killed[thread_index] and not aborted), + CD_OK_ID_loc:bool(returncode==0 and not self_killed[thread_index] and not aborted), CD_DATA_ID_loc:(rule_nr,returncode,output) } @@ -756,88 +743,74 @@ def threaded_cde(timeout_semi_list,thread_index,thread_data_list,files_cde_error sys.exit() #thread - #timeout_semi_list = [None] - cde_threads = {} cde_thread_is_alive = {} any_thread_alive = True - for thread_index in range(threads_quant): - cde_threads[thread_index] = cde_thread = Thread(target = lambda : threaded_cde(timeout_semi_list_per_thread[thread_index],thread_index,all_threads_data_list[thread_index],all_threads_files_cde_errors_quant[thread_index]),daemon=True) + for thread_index in range(threads): + cde_threads[thread_index] = cde_thread = Thread(target = lambda : threaded_cde(timeout_semi_list_per_thread[thread_index],thread_index,all_threads_data_list[thread_index],all_threads_files_cde_errors_quant[thread_index],all_threads_customdata_stats_time[thread_index]),daemon=True) cde_thread.start() - #rules - files_cde_errors_quant = defaultdict(int) - while any_thread_alive: any_thread_alive = False now = time() - for thread_index in range(threads_quant): - #cde_thread = cde_threads[thread_index] - #cde_threads[thread_index] = cde_thread = Thread(target = lambda : threaded_cde(timeout_semi_list_per_thread[thread_index]),daemon=True) - #cde_thread.start() - #cde_thread_is_alive[thread_index] = cde_thread.is_alive - + for thread_index in range(threads): if cde_threads[thread_index].is_alive(): any_thread_alive = True if timeout_semi_list_per_thread[thread_index][0]: timeout_val,subprocess = timeout_semi_list_per_thread[thread_index][0] if any(abort_list) or (timeout_val and now>timeout_val): kill_subprocess(subprocess,print_func) - self.killed[thread_index]=True + self_killed[thread_index]=True abort_list[1]=False - #sleep(0.2) - #else: - #sleep(0.4) - - #print_func( ('cde',f'{full_file_path} ({bytes_to_str(size)})',size,files_cde_size_extracted,files_cde_errors_quant_all,files_cde_quant,files_cde_size) ) - files_cde_size_extracted=0 - files_cde_errors_quant_all=0 + cde_size_extracted=0 + cde_errors_quant_all=0 files_cde_quant=0 files_cde_size=0 - for thread_index in range(threads_quant): + for thread_index in range(threads): thread_data_list = all_threads_data_list[thread_index] - files_cde_size_extracted+=thread_data_list[0] - files_cde_errors_quant_all+=thread_data_list[1] + cde_size_extracted+=thread_data_list[0] + cde_errors_quant_all+=thread_data_list[1] files_cde_quant+=thread_data_list[2] files_cde_size+=thread_data_list[3] - for rule_nr,val in all_threads_files_cde_errors_quant[thread_index].items(): - files_cde_errors_quant[rule_nr] += val - - - print_func( ('cde',f'(multithread run)',0,files_cde_size_extracted,files_cde_errors_quant_all,files_cde_quant,files_cde_size) ) - - sleep(0.4) - + if threads!=1: + print_func( ('cde','(multithread run)',0,cde_size_extracted,cde_errors_quant_all,files_cde_quant,files_cde_size) ) + sleep(0.1) - time_end_all = perf_counter() - - self_header.files_cde_errors_quant=files_cde_errors_quant - self_header.files_cde_errors_quant_all = files_cde_errors_quant_all + self_header.cde_errors_quant_all = cde_errors_quant_all self_header.files_cde_quant = files_cde_quant self_header.files_cde_size = files_cde_size - self_header.files_cde_size_extracted = files_cde_size_extracted - - customdata_stats_time_all[0]=time_end_all-time_start_all + self_header.cde_size_extracted = cde_size_extracted + self_header.cde_stats_time_all = perf_counter()-time_start_all - - - - print_func( ('info','custom data extraction finished.'),True) + print_func( ('info','Custom data extraction finished. Merging ...'),True) customdata_helper={} cd_index=0 self_customdata_append = self.customdata.append + files_cde_errors_quant = defaultdict(int) + customdata_stats_size=defaultdict(int) + customdata_stats_uniq=defaultdict(int) + customdata_stats_refs=defaultdict(int) + customdata_stats_time=defaultdict(float) + CD_INDEX_ID_loc = CD_INDEX_ID - for thread_index in range(threads_quant): + for thread_index in range(threads): + + for rule_nr,val in all_threads_files_cde_errors_quant[thread_index].items(): + files_cde_errors_quant[rule_nr] += val + + for rule_nr,val in all_threads_customdata_stats_time[thread_index].items(): + customdata_stats_time[rule_nr] += val + for (scan_like_list,subpath,rule_nr,size) in customdata_pool_per_thread[thread_index]: new_elem = scan_like_list[-1] cd_field = new_elem[CD_DATA_ID_loc] @@ -857,39 +830,20 @@ def threaded_cde(timeout_semi_list,thread_index,thread_data_list,files_cde_error customdata_stats_refs[rule_nr]+=1 - #if cd_field not in customdata_helper: - # customdata_helper[cd_field]=cd_index - # new_elem[CD_INDEX_ID_loc] = cd_index - # new_elem['cd_index']=cd_index - # cd_index+=1 - - # self_customdata_append(cd_field) - - # customdata_stats_size[rule_nr]+=asizeof(cd_field) - # customdata_stats_uniq[rule_nr]+=1 - # customdata_stats_refs[rule_nr]+=1 - #else: - # new_elem['cd_index']=customdata_helper[cd_field] - # new_elem[CD_INDEX_ID_loc]=customdata_helper[cd_field] - # customdata_stats_refs[rule_nr]+=1 - + print_func( ('info','Custom data post-processing finished.'),True) - print_func( ('info','custom data post-processing finished.'),True) - - for thread_index in range(threads_quant): + for thread_index in range(threads): cde_threads[thread_index].join() - #print(f'{customdata_helper=}') - #print(f'{self.customdata=}') - del self.customdata_pool - del customdata_helper - self.header.cde_stats_size=customdata_stats_size - self.header.cde_stats_uniq=customdata_stats_uniq - self.header.cde_stats_refs=customdata_stats_refs - self.header.cde_stats_time=customdata_stats_time - self.header.cde_stats_time_all=customdata_stats_time_all[0] + self_header.files_cde_errors_quant=files_cde_errors_quant + self_header.cde_stats_size=customdata_stats_size + self_header.cde_stats_uniq=customdata_stats_uniq + self_header.cde_stats_refs=customdata_stats_refs + self_header.cde_stats_time=customdata_stats_time + + ############################################################# def sld_recalc_rec(self,scan_like_data): @@ -1250,15 +1204,6 @@ def prepare_info(self): local_time = strftime('%Y/%m/%d %H:%M:%S',localtime_catched(self_header.creation_time)) info_list.append(f'record label : {self_header.label}') - #if file_name in self.aliases: - # info_list.append(f'record label : {self_header.label} alias:{file_name}') - #else: - - info_list.append('') - info_list.append(f'scanned path : {self_header.scan_path}') - info_list.append(f'scanned space : {bytes_to_str(self_header.sum_size)}') - info_list.append(f'scanned files : {fnumber(self_header.quant_files)}') - info_list.append(f'scanned folders : {fnumber(self_header.quant_folders)}') info_list.append('') info_list.append(f'creation host : {self_header.creation_host} ({self_header.creation_os})') @@ -1267,50 +1212,75 @@ def prepare_info(self): self.txtinfo_short = '\n'.join(info_list) self.txtinfo_basic = '\n'.join(info_list) - info_list.append('') - info_list.append(f'record file : {file_name} ({bytes_to_str(file_size)}, compression level:{self.header.compression_level})') - info_list.append('') - info_list.append( 'data collection times:') - info_list.append(f'filesystem : {str(round(self_header.scanning_time,2))}s') - if self_header.cde_stats_time_all: - info_list.append(f'custom data : {str(round(self_header.cde_stats_time_all,2))}s') + threads_str = None + try: + threads_str= str(self_header.threads) + except: + pass + info_list.append(f'record file : {file_name} ({bytes_to_str(file_size)}, compression level:{self.header.compression_level}, cde threads:{threads_str})') info_list.append('') - info_list.append( 'serializing and compression times:') + info_list.append(f'scanned path : {self_header.scan_path}') + info_list.append(f'scanned space : {bytes_to_str(self_header.sum_size)}') + info_list.append(f'scanned files : {fnumber(self_header.quant_files)}') + info_list.append(f'scanned folders : {fnumber(self_header.quant_folders)}') + + scanning_time_str = f'{str(round(self_header.scanning_time,2))}' + + cde_stats_time_all_str = '' + if self_header.cde_stats_time_all: + cde_stats_time_all_str = f'{str(round(self_header.cde_stats_time_all,2))}' filestructure_time = self.header.compression_time['filestructure'] filenames_time = self.header.compression_time['filenames'] customdata_time = self.header.compression_time['customdata'] - info_list.append(f'file structure : {str(round(filestructure_time,2))}s') - info_list.append(f'file names : {str(round(filenames_time,2))}s') - info_list.append(f'custom data : {str(round(customdata_time,2))}s') - info_list.append('') - info_list.append(f'custom data extraction errors : {fnumber(self_header.files_cde_errors_quant_all)}') + cde_errors = 0 + try: + #obsolete + cde_errors = self_header.files_cde_errors_quant_all + except: + pass + + try: + cde_errors = self_header.cde_errors_quant_all + except: + pass + info_list.append('') - info_list.append( 'internal sizes : compressed serialized original items references CDE time CDE errors') - info_list.append('') + info_list.append('----------------+------------------------------------------------------------------------------------------------') + info_list.append('Internals | compressed serialized original items references read time compr.time CDE errors') + info_list.append('----------------+------------------------------------------------------------------------------------------------') h_data = self_header.zipinfo["header"] fs_data = self_header.zipinfo["filestructure"] fn_data = self_header.zipinfo["filenames"] cd_data = self_header.zipinfo["customdata"] - info_list.append(f'header :{bytes_to_str_mod(h_data[0]).rjust(12) }{bytes_to_str_mod(h_data[1]).rjust(12) }{bytes_to_str_mod(h_data[2]).rjust(12) }') - info_list.append(f'filestructure :{bytes_to_str_mod(fs_data[0]).rjust(12) }{bytes_to_str_mod(fs_data[1]).rjust(12) }{bytes_to_str_mod(fs_data[2]).rjust(12) }') - info_list.append(f'file names :{bytes_to_str_mod(fn_data[0]).rjust(12) }{bytes_to_str_mod(fn_data[1]).rjust(12) }{bytes_to_str_mod(fn_data[2]).rjust(12) }{fnumber(self_header.items_names).rjust(12) }{fnumber(self_header.references_names).rjust(12)}') + info_list.append(f'Header |{bytes_to_str_mod(h_data[0]).rjust(12) }{bytes_to_str_mod(h_data[1]).rjust(12) }{bytes_to_str_mod(h_data[2]).rjust(12) }') + info_list.append(f'Filestructure |{bytes_to_str_mod(fs_data[0]).rjust(12) }{bytes_to_str_mod(fs_data[1]).rjust(12) }{bytes_to_str_mod(fs_data[2]).rjust(12) }{"".rjust(12)}{"".rjust(12)}{scanning_time_str.rjust(11)}s{str(round(filestructure_time,2)).rjust(11)}s') + info_list.append(f'File Names |{bytes_to_str_mod(fn_data[0]).rjust(12) }{bytes_to_str_mod(fn_data[1]).rjust(12) }{bytes_to_str_mod(fn_data[2]).rjust(12) }{fnumber(self_header.items_names).rjust(12) }{fnumber(self_header.references_names).rjust(12)}{"".rjust(12)}{str(round(filenames_time,2)).rjust(11)}s') if cd_data[0]: - info_list.append(f'custom data :{bytes_to_str_mod(cd_data[0]).rjust(12) }{bytes_to_str_mod(cd_data[1]).rjust(12) }{bytes_to_str_mod(cd_data[2]).rjust(12) }{fnumber(self_header.items_cd).rjust(12) }{fnumber(self_header.references_cd).rjust(12)}') + info_list.append(f'Custom Data |{bytes_to_str_mod(cd_data[0]).rjust(12) }{bytes_to_str_mod(cd_data[1]).rjust(12) }{bytes_to_str_mod(cd_data[2]).rjust(12) }{fnumber(self_header.items_cd).rjust(12) }{fnumber(self_header.references_cd).rjust(12)}{cde_stats_time_all_str.rjust(11)}s{str(round(customdata_time,2)).rjust(11)}s{fnumber(cde_errors).rjust(12)}') + + try: + if self_header.cde_list: + info_list.append('----------------+------------------------------------------------------------------------------------------------') + for nr,(expressions,use_smin,smin_int,use_smax,smax_int,executable,parameters,shell,timeout,crc) in enumerate(self_header.cde_list): + info_list.append(f'rule nr {str(nr).rjust(2)} | {bytes_to_str(self_header.cde_stats_size[nr]).rjust(12)}{fnumber(self_header.cde_stats_uniq[nr]).rjust(12)}{fnumber(self_header.cde_stats_refs[nr]).rjust(12)}{str(round(self_header.cde_stats_time[nr],2)).rjust(11)}s{"".rjust(12)}{fnumber(self_header.files_cde_errors_quant[nr]).rjust(12)}') + info_list.append('----------------+------------------------------------------------------------------------------------------------') + except Exception as EE: + info_list.append(str(EE)) info_list.append('') try: if self_header.cde_list: - info_list.append('\nCustom data with details about the rules:') + info_list.append('Custom Data Extractors and rules:') for nr,(expressions,use_smin,smin_int,use_smax,smax_int,executable,parameters,shell,timeout,crc) in enumerate(self_header.cde_list): - info_list.append(f'\nrule nr : {nr} {bytes_to_str(self_header.cde_stats_size[nr]).rjust(12)}{fnumber(self_header.cde_stats_uniq[nr]).rjust(12)}{fnumber(self_header.cde_stats_refs[nr]).rjust(12)}{str(round(self_header.cde_stats_time[nr],2)).rjust(12)}s{fnumber(self_header.files_cde_errors_quant[nr]).rjust(11)}') + info_list.append(f'\nrule nr : {nr}') expressions_expanded = ','.join(list(expressions)) info_list.append(f'files : {expressions_expanded}') @@ -1324,11 +1294,9 @@ def prepare_info(self): if timeout: info_list.append(f'timeout : {timeout}s') - except Exception as EE: info_list.append(str(EE)) - info_list.append('') loaded_fs_info = 'filesystem - ' + ('loaded' if self.decompressed_filestructure else 'not loaded yet') loaded_cd_info = 'custom data - ' + ('not present' if not bool(cd_data[0]) else 'loaded' if self.decompressed_customdata else 'not loaded yet') @@ -2184,7 +2152,7 @@ def repack_record(self,record,new_label,new_compression,keep_cd,update_callback, if compression_change: data_filenames = loads(dec_dec(src_zip_file.read('filenames'))) - self.info_line = f'compressing filenames' + self.info_line = f'Compressing Filenames ({bytes_to_str(asizeof(data_filenames))})' compress_with_header_update(new_header,data_filenames,new_compression,'filenames',zip_file) else: zip_file.writestr('filenames',src_zip_file.read('filenames')) @@ -2192,16 +2160,16 @@ def repack_record(self,record,new_label,new_compression,keep_cd,update_callback, if keep_cd!=bool(record.header.items_cd): data_filestructure = record.remove_cd_rec(loads(dec_dec(src_zip_file.read('filestructure')))) - self.info_line = f'compressing filestructure' + self.info_line = f'Compressing Filestructure ({bytes_to_str(asizeof(data_filestructure))})' compress_with_header_update(new_header,data_filestructure,new_compression,'filestructure',zip_file) new_header.zipinfo["customdata"]=(0,0,0) - new_header.files_cde_size_extracted = 0 + new_header.cde_size_extracted = 0 new_header.items_cd=0 new_header.references_cd = 0 new_header.cde_list = [] new_header.files_cde_errors_quant = {} - new_header.files_cde_errors_quant_all = 0 + new_header.cde_errors_quant_all = 0 new_header.cde_stats_time_all = 0 new_header.compression_time['customdata']=0 @@ -2213,13 +2181,13 @@ def repack_record(self,record,new_label,new_compression,keep_cd,update_callback, else: data_filestructure = loads(dec_dec(src_zip_file.read('filestructure'))) - self.info_line = f'compressing filestructure' + self.info_line = f'compressing Filestructure ({bytes_to_str(asizeof(data_filestructure))})' compress_with_header_update(new_header,data_filestructure,new_compression,'filestructure',zip_file) if header.items_cd: data_customdata = loads(dec_dec(src_zip_file.read('customdata'))) - self.info_line = f'compressing customdata' + self.info_line = f'Compressing Custom Data ({bytes_to_str(asizeof(data_customdata))})' compress_with_header_update(new_header,data_customdata,new_compression,'customdata',zip_file) header_ser = dumps(new_header) @@ -2296,8 +2264,8 @@ def find_results_clean(self): stdout_info_line_current = '' stdout_cde_size = 0 - stdout_files_cde_size_extracted=0 - stdout_files_cde_errors_quant_all=0 + stdout_cde_size_extracted=0 + stdout_cde_errors_quant_all=0 stdout_files_cde_quant=0 stdout_files_cde_quant_sum=0 stdout_files_cde_size=0 @@ -2327,8 +2295,8 @@ def create_new_record(self,temp_dir,update_callback,group=None): self.stage = 0 - self.stdout_files_cde_size_extracted=0 - self.stdout_files_cde_errors_quant_all=0 + self.stdout_cde_size_extracted=0 + self.stdout_cde_errors_quant_all=0 self.stdout_files_cde_quant=0 self.stdout_files_cde_quant_sum=0 self.stdout_files_cde_size=0 @@ -2336,7 +2304,7 @@ def create_new_record(self,temp_dir,update_callback,group=None): def threaded_run(command,results_semi_list,info_semi_list,processes_semi_list): command_str = ' '.join(command) - print(f'create_new_record - threaded_run {command_str=}') + try: subprocess = uni_popen(command,stdin=PIPE) except Exception as re: @@ -2378,8 +2346,8 @@ def threaded_run(command,results_semi_list,info_semi_list,processes_semi_list): self.stdout_info_line_current = val[1] self.stdout_cde_size = val[2] - self.stdout_files_cde_size_extracted=val[3] - self.stdout_files_cde_errors_quant_all=val[4] + self.stdout_cde_size_extracted=val[3] + self.stdout_cde_errors_quant_all=val[4] self.stdout_files_cde_quant=val[5] self.stdout_files_cde_size=val[6] else: diff --git a/src/record.py b/src/record.py index 0915b0e..4616af8 100644 --- a/src/record.py +++ b/src/record.py @@ -126,17 +126,21 @@ def caretaker(signal_file): sys_stdout_flush = sys.stdout.flush lines_non_stop=0 + json_dumps_loc = json_dumps + stdout_data_queue_loc = stdout_data_queue + path_exists_loc = path_exists + def flush_last_data_not_printed(flush): nonlocal last_data_not_printed if last_data_not_printed: - print(json_dumps(last_data_not_printed),flush=flush) + print(json_dumps_loc(last_data_not_printed),flush=flush) last_data_not_printed=None while True: now=perf_counter() now_grater_than_next_time_print = bool(now>next_time_print) - if stdout_data_queue: + if stdout_data_queue_loc: data,always=stdout_data_queue_get() if data==True: @@ -146,7 +150,7 @@ def flush_last_data_not_printed(flush): flush_last_data_not_printed(False) if always or now_grater_than_next_time_print: - print(json_dumps(data),flush=True) + print(json_dumps_loc(data),flush=True) next_time_print=now+print_min_time_period lines_non_stop+=1 last_data_not_printed=None @@ -163,7 +167,7 @@ def flush_last_data_not_printed(flush): if now>next_signal_file_check: next_signal_file_check=now+signal_file_check_period - if path_exists(signal_file): + if path_exists_loc(signal_file): try: with open(signal_file,'r') as sf: got_int = int(sf.read().strip()) @@ -174,7 +178,7 @@ def flush_last_data_not_printed(flush): except Exception as pe: print_info(f'check_abort error:{pe}') - sleep(0.01) + sleep(0.001) sys.exit(0) #thread @@ -342,7 +346,7 @@ def proper_exit(code): new_record = LibrerRecord(label=label,scan_path=path_to_scan) try: - print_func(['stage',0],True) + print_func(('stage',0),True) new_record.scan(print_func,abort_list,tuple(cde_list),check_dev) except Exception as fe: print_info(f'scan error:{fe}') @@ -350,16 +354,16 @@ def proper_exit(code): if not abort_list[0]: if cde_list : try: - print_func(['stage',1],True) - new_record.extract_customdata(print_func,abort_list,threads_quant=threads) + print_func(('stage',1),True) + new_record.extract_customdata(print_func,abort_list,threads=threads) except Exception as cde: print_info(f'cde error:{cde}') - print_func(['stage',2],True) + print_func(('stage',2),True) new_record.pack_data(print_func) - print_func(['stage',3],True) + print_func(('stage',3),True) new_record.save(print_func,file_path=args.file,compression_level=compression_level) - print_func(['stage',4],True) + print_func(('stage',4),True) ##################################################################### else: From b2a5dd97abb1ad9d2770973d72e2b25da68f1a26 Mon Sep 17 00:00:00 2001 From: piotrj Date: Sat, 24 Feb 2024 14:36:49 +0100 Subject: [PATCH 06/20] better templates, improvements --- src/librer.py | 83 +++++++++++++++++++++++++++++++++++---------------- 1 file changed, 57 insertions(+), 26 deletions(-) diff --git a/src/librer.py b/src/librer.py index ed21587..2961bce 100644 --- a/src/librer.py +++ b/src/librer.py @@ -28,7 +28,7 @@ from os import sep,system,getcwd,name as os_name,cpu_count from os.path import abspath,normpath,dirname,join as path_join,isfile as path_isfile -from gc import disable as gc_disable, enable as gc_enable,collect as gc_collect +from gc import disable as gc_disable, enable as gc_enable,collect as gc_collect,set_threshold as gc_set_threshold, get_threshold as gc_get_threshold from pathlib import Path from time import strftime,time,mktime @@ -172,8 +172,8 @@ def read(self): #use,mask,smin,smax,exe,pars,shell,timeout,crc if windows: - line_list1 = ['0','*.rar,*.zip,*.cab,*.arj,*.lzh,*.uue,*.z,*.bz2,*.gzip,*.iso,*.7z','','','C:\\Program Files\\WinRAR\\UnRAR.exe','l %','0','10','0'] - line_list1a = ['0','*.7z,*.zip,*.bz2,*.xz,*.z,*.gzip,*.iso,*.rar,*.arj,*.cab,*.lzh,*.lzma,*.vdi,*.vhd','','','C:\\Program Files\\7-Zip\\7z.exe','l %','0','10','0'] + line_list1 = ['0','*.7z,*.zip,*.bz2,*.xz,*.z,*.gzip,*.iso,*.rar,*.arj,*.lzh,*.lzma,*.vdi,*.vhd','','','C:\\Program Files\\7-Zip\\7z.exe','l %','0','10','0'] + line_list1a = ['0','*.rar','','','C:\\Program Files\\WinRAR\\UnRAR.exe','l %','0','10','0'] line_list2 = ['0','*.txt,*.nfo','1','256kB','more %','','1','5','0'] line_list3 = ['0','*.pls,*.m3u,*.cue,*.plp,*.m3u8,*.mpcpl','','','more %','','1','5','0'] line_list4 = ['0','*.aac,*.ac3,*.aiff,*.dts,*.dtshd,*.flac,*.h261,*.h263,*.h264,*.iff,*.m4v,*.matroska,*.mpc,*.mp3,*.mp4,*.mpeg,*.mkv,*.ts,*.ogg,*.wav,*.wv','','','ffprobe.exe','-hide_banner %','0','5','0'] @@ -183,7 +183,7 @@ def read(self): cde_sklejka_list=[line_list1,line_list1a,line_list2,line_list3,line_list4,line_list4a,line_list5,line_list5a] else: - line_list1 = ['0','*.7z,*.zip,*.bz2,*.xz,*.z,*.gzip,*.iso,*.rar,*.arj,*.cab,*.lzh,*.lzma,*.vdi,*.vhd','','','7z','l %','0','10','0'] + line_list1 = ['0','*.7z,*.zip,*.bz2,*.xz,*.z,*.gzip,*.iso,*.rar,*.arj,*.lzh,*.lzma,*.vdi,*.vhd','','','7z','l %','0','10','0'] line_list2 = ['0','*.txt,*.nfo','1','256kB','cat','%','0','5','0'] line_list3 = ['0','*.pls,*.m3u,*.cue,*.plp,*.m3u8,*.mpcpl','','','cat','%','0','5','0'] line_list4 = ['0','*.aac,*.ac3,*.aiff,*.dts,*.dtshd,*.flac,*.h261,*.h263,*.h264,*.iff,*.m4v,*.matroska,*.mpc,*.mp3,*.mp4,*.mpeg,*.mkv,*.ts,*.ogg,*.wav,*.wv','','','ffprobe','-hide_banner %','0','5','0'] @@ -3570,7 +3570,7 @@ def scan(self,compression_level,threads,group=None): self.get_info_dialog_on_scan().show('Error. No paths to scan.','Add paths to scan.') return False - #wryfikacja + #weryfikacja for e in range(self.CDE_ENTRIES_MAX): if self.CDE_use_var_list[e].get(): mask = self.CDE_mask_var_list[e].get().strip() @@ -3590,6 +3590,24 @@ def scan(self,compression_level,threads,group=None): command,command_info = get_command(executable,parameters,'dummy full_file_path',shell) + all_timeout_set = True + for e in range(self.CDE_ENTRIES_MAX): + if self.CDE_use_var_list[e].get(): + timeout = self.CDE_timeout_var_list[e].get().strip() + + try: + timeout_int = int(timeout) + except: + all_timeout_set = False + + if not all_timeout_set: + ask_dialog = self.get_text_ask_dialog_on_scan() + ask_dialog.show('CDE Timeout not set?','Continue without Custom Data Extractor timeout ?') + + if not ask_dialog.res_bool: + return False + + self.last_dir = path_to_scan_from_entry new_label = self.scan_label_entry_var.get() @@ -3717,7 +3735,7 @@ def scan(self,compression_level,threads,group=None): f.write(ZstdCompressor(level=8,threads=1).compress(dumps([new_label,path_to_scan_from_entry,check_dev,compression_level,threads,cde_list]))) #debug - #with open(sep.join(['/home/xy/private/essential/librer-devel/tmp1',SCAN_DAT_FILE]), "wb") as f: + #with open(sep.join(['./tmp1',SCAN_DAT_FILE]), "wb") as f: # f.write(ZstdCompressor(level=8,threads=1).compress(dumps([new_label,path_to_scan_from_entry,check_dev,compression_level,threads,cde_list]))) except Exception as e: print(e) @@ -3795,16 +3813,20 @@ def scan(self,compression_level,threads,group=None): if not switch_done: self_progress_dialog_on_scan.widget.title('Creating new data record (Custom Data Extraction)') self_progress_dialog_on_scan.abort_single_button.pack(side='left', anchor='center',padx=5,pady=5) - self_progress_dialog_on_scan.abort_single_button.configure(image=self.ico_abort,text='Abort single file',compound='left',width=15,command=lambda : self.abort_single(),state='normal') + + if threads==1: + self_progress_dialog_on_scan.abort_single_button.configure(image=self.ico_abort,text='Abort single file',compound='left',width=15,command=lambda : self.abort_single(),state='normal') + else: + self_progress_dialog_on_scan.abort_single_button.configure(image=self.ico_abort,text='Abort single file',compound='left',width=15,state='disabled') self_progress_dialog_on_scan.abort_button.configure(image=self.ico_abort,text='Abort',compound='left',width=15,state='normal') self_tooltip_message[str_self_progress_dialog_on_scan_abort_button]='If you abort at this stage,\nCustom Data will be incomplete.' - self_tooltip_message[str_self_progress_dialog_on_scan_abort_single_button]='Use if CDE has no timeout set and seems like stuck.\nCD of only single file will be incomplete.\nCDE will continue.' + self_tooltip_message[str_self_progress_dialog_on_scan_abort_single_button]='Use if CDE has no timeout set and seems like stuck.\nCD of only single file will be incomplete.\nCDE will continue.\n\nAvailable only for single thread mode.' switch_done=True - change3 = self_progress_dialog_on_scan_update_lab_text(3,'Extracted Custom Data: ' + local_bytes_to_str(librer_core.stdout_files_cde_size_extracted) ) - change4 = self_progress_dialog_on_scan_update_lab_text(4,'Extraction Errors : ' + fnumber(librer_core.stdout_files_cde_errors_quant_all) ) + change3 = self_progress_dialog_on_scan_update_lab_text(3,'Extracted Custom Data: ' + local_bytes_to_str(librer_core.stdout_cde_size_extracted) ) + change4 = self_progress_dialog_on_scan_update_lab_text(4,'Extraction Errors : ' + fnumber(librer_core.stdout_cde_errors_quant_all) ) files_q = librer_core.stdout_files_cde_quant @@ -3817,24 +3839,11 @@ def scan(self,compression_level,threads,group=None): self_progress_dialog_on_scan_lab_r1_config(text=f'{local_bytes_to_str(librer_core.stdout_files_cde_size)} / {local_bytes_to_str(librer_core.stdout_files_cde_size_sum)}') self_progress_dialog_on_scan_lab_r2_config(text=f'{fnumber(files_q)} / {fnumber(librer_core.stdout_files_cde_quant_sum)}') - if change3 or change4: - time_to_show_busy_sign=now+1.0 - - if update_once: - update_once=False - self_progress_dialog_on_scan_update_lab_image(2,self_ico_empty) - self_progress_dialog_on_scan_update_lab_text(0,'') - else : - if now>time_to_show_busy_sign: - if len(librer_core.stdout_info_line_current)>50: - change0 = self_progress_dialog_on_scan_update_lab_text(0,f'...{librer_core.stdout_info_line_current[-50:]}') - else: - change0 = self_progress_dialog_on_scan_update_lab_text(0,librer_core.stdout_info_line_current) - - self_progress_dialog_on_scan_update_lab_image(2,self_get_hg_ico()) - update_once=True else: + change3 = False + change4 = False + self_progress_dialog_on_scan.abort_button.configure(state='disabled') self_progress_dialog_on_scan.abort_single_button.configure(state='disabled') @@ -3843,6 +3852,25 @@ def scan(self,compression_level,threads,group=None): else: change0 = self_progress_dialog_on_scan_update_lab_text(0,librer_core.stdout_info_line_current) + ############################################### + if change3 or change4: + time_to_show_busy_sign=now+1.0 + + if update_once: + update_once=False + self_progress_dialog_on_scan_update_lab_image(2,self_ico_empty) + self_progress_dialog_on_scan_update_lab_text(0,'') + else : + if now>time_to_show_busy_sign: + if len(librer_core.stdout_info_line_current)>50: + change0 = self_progress_dialog_on_scan_update_lab_text(0,f'...{librer_core.stdout_info_line_current[-50:]}') + else: + change0 = self_progress_dialog_on_scan_update_lab_text(0,librer_core.stdout_info_line_current) + + self_progress_dialog_on_scan_update_lab_image(2,self_get_hg_ico()) + update_once=True + ############################################### + except Exception as e: print(e) l_error(e) @@ -4558,6 +4586,9 @@ def show_homepage(self): if __name__ == "__main__": try: + allocs, g1, g2 = gc_get_threshold() + gc_set_threshold(100_000, g1*5, g2*10) + LIBRER_FILE = normpath(__file__) LIBRER_DIR = dirname(LIBRER_FILE) From 05b9e90d400251aadc464c45c19138f2fb0942bb Mon Sep 17 00:00:00 2001 From: piotrj Date: Sat, 24 Feb 2024 15:42:29 +0100 Subject: [PATCH 07/20] better ask dialog, fix scan on abort --- src/core.py | 4 ++-- src/librer.py | 17 +++++++++++++++-- 2 files changed, 17 insertions(+), 4 deletions(-) diff --git a/src/core.py b/src/core.py index abd85ae..76ad65e 100644 --- a/src/core.py +++ b/src/core.py @@ -751,6 +751,8 @@ def threaded_cde(timeout_semi_list,thread_index,thread_data_list,cde_errors_quan cde_threads[thread_index] = cde_thread = Thread(target = lambda : threaded_cde(timeout_semi_list_per_thread[thread_index],thread_index,all_threads_data_list[thread_index],all_threads_files_cde_errors_quant[thread_index],all_threads_customdata_stats_time[thread_index]),daemon=True) cde_thread.start() + self_killed = self.killed + while any_thread_alive: any_thread_alive = False now = time() @@ -843,8 +845,6 @@ def threaded_cde(timeout_semi_list,thread_index,thread_data_list,cde_errors_quan self_header.cde_stats_refs=customdata_stats_refs self_header.cde_stats_time=customdata_stats_time - - ############################################################# def sld_recalc_rec(self,scan_like_data): new_size_on_this_level = 0 diff --git a/src/librer.py b/src/librer.py index 2961bce..fa643a7 100644 --- a/src/librer.py +++ b/src/librer.py @@ -1398,6 +1398,19 @@ def get_text_ask_dialog_on_scan(self): return self.text_ask_dialog_on_scan + ask_dialog_on_scan_created = False + @restore_status_line + @block + def get_ask_dialog_on_scan(self): + if not self.ask_dialog_on_scan_created: + self.status("Creating dialog ...") + + self.ask_dialog_on_scan = LabelDialogQuestion(self.scan_dialog.widget,self.main_icon_tuple,self.bg_color,pre_show=lambda new_widget: self.pre_show(on_main_window_dialog=False,new_widget=new_widget),post_close=lambda : self.post_close(on_main_window_dialog=False),image=self.ico_warning) + + self.ask_dialog_on_scan_created = True + + return self.ask_dialog_on_scan + text_ask_dialog_on_main_created = False @restore_status_line @block @@ -3601,8 +3614,8 @@ def scan(self,compression_level,threads,group=None): all_timeout_set = False if not all_timeout_set: - ask_dialog = self.get_text_ask_dialog_on_scan() - ask_dialog.show('CDE Timeout not set?','Continue without Custom Data Extractor timeout ?') + ask_dialog = self.get_ask_dialog_on_scan() + ask_dialog.show('CDE Timeout not set','Continue without Custom Data Extractor timeout ?') if not ask_dialog.res_bool: return False From 76004f77081341a2363f324c4fdee4f1bca904ec Mon Sep 17 00:00:00 2001 From: piotrj Date: Sat, 24 Feb 2024 20:17:09 +0100 Subject: [PATCH 08/20] memory leak fix, find dialog better bindings --- src/core.py | 19 ++++++---- src/librer.py | 99 +++++++++++++++++++++++++++++++++++++++------------ 2 files changed, 89 insertions(+), 29 deletions(-) diff --git a/src/core.py b/src/core.py index 76ad65e..6eb4248 100644 --- a/src/core.py +++ b/src/core.py @@ -1366,11 +1366,10 @@ def decompress_filestructure(self): with ZipFile(self.file_path, "r") as zip_file: decompressor = ZstdDecompressor() - filestructure_ser = decompressor.decompress(zip_file.read('filestructure')) - self.filestructure = loads( filestructure_ser ) + self.filestructure = loads( decompressor.decompress(zip_file.read('filestructure')) ) + self.filenames = loads( decompressor.decompress(zip_file.read('filenames')) ) - filenames_ser = decompressor.decompress(zip_file.read('filenames')) - self.filenames = loads(filenames_ser) + del decompressor self.decompressed_filestructure = True self.prepare_info() @@ -1381,6 +1380,8 @@ def decompress_filestructure(self): def unload_filestructure(self): self.decompressed_filestructure = False + del self.filestructure + gc_collect() self.filestructure = () self.prepare_info() @@ -1388,13 +1389,15 @@ def unload_filestructure(self): def decompress_customdata(self): if not self.decompressed_customdata: with ZipFile(self.file_path, "r") as zip_file: + decompressor = ZstdDecompressor() try: - customdata_ser_comp = zip_file.read('customdata') - customdata_ser = ZstdDecompressor().decompress(customdata_ser_comp) - self.customdata = loads( customdata_ser ) + self.customdata = loads( decompressor.decompress( zip_file.read('customdata') ) ) except: self.customdata = [] + del decompressor + gc_collect() + self.decompressed_customdata = True self.prepare_info() @@ -1404,6 +1407,8 @@ def decompress_customdata(self): def unload_customdata(self): self.decompressed_customdata = False + del self.customdata + gc_collect() self.customdata = [] self.prepare_info() diff --git a/src/librer.py b/src/librer.py index fa643a7..f4d0b8b 100644 --- a/src/librer.py +++ b/src/librer.py @@ -1661,17 +1661,33 @@ def ver_number(var): (find_filename_frame := LabelFrame(sfdma,text='Search range',bd=2,bg=self.bg_color,takefocus=False)).grid(row=0,column=0,sticky='news',padx=4,pady=4) self.find_range_cb1 = Radiobutton(find_filename_frame,text='Selected record / group',variable=self.find_range_all,value=False,command=self.find_mod) self.find_range_cb1.grid(row=0, column=0, sticky='news',padx=4,pady=4) + self.find_range_cb1.bind('', lambda event : self.find_items()) - (find_range_cb2 := Radiobutton(find_filename_frame,text='All records',variable=self.find_range_all,value=True,command=self.find_mod)).grid(row=0, column=1, sticky='news',padx=4,pady=4) + + find_range_cb2 = Radiobutton(find_filename_frame,text='All records',variable=self.find_range_all,value=True,command=self.find_mod) + find_range_cb2.grid(row=0, column=1, sticky='news',padx=4,pady=4) + find_range_cb2.bind('', lambda event : self.find_items()) (find_filename_frame := LabelFrame(sfdma,text='Path elements',bd=2,bg=self.bg_color,takefocus=False)).grid(row=1,column=0,sticky='news',padx=4,pady=4) - Radiobutton(find_filename_frame,text="Don't use this criterion",variable=self.find_filename_search_kind_var,value='dont',command=self.find_mod,width=30).grid(row=0, column=0, sticky='news',padx=4,pady=4) - Radiobutton(find_filename_frame,text="files with error on access",variable=self.find_filename_search_kind_var,value='error',command=self.find_mod) + r_dont = Radiobutton(find_filename_frame,text="Don't use this criterion",variable=self.find_filename_search_kind_var,value='dont',command=self.find_mod,width=30) + r_dont.grid(row=0, column=0, sticky='news',padx=4,pady=4) + r_dont.bind('', lambda event : self.find_items()) + + #Radiobutton(find_filename_frame,text="files with error on access",variable=self.find_filename_search_kind_var,value='error',command=self.find_mod) #.grid(row=1, column=0, sticky='news',padx=4,pady=4) - (regexp_radio_name:=Radiobutton(find_filename_frame,text="by regular expression",variable=self.find_filename_search_kind_var,value='regexp',command=self.find_mod)).grid(row=2, column=0, sticky='news',padx=4,pady=4) - (glob_radio_name:=Radiobutton(find_filename_frame,text="by glob pattern",variable=self.find_filename_search_kind_var,value='glob',command=self.find_mod)).grid(row=3, column=0, sticky='news',padx=4,pady=4) - (fuzzy_radio_name:=Radiobutton(find_filename_frame,text="by fuzzy match",variable=self.find_filename_search_kind_var,value='fuzzy',command=self.find_mod)).grid(row=4, column=0, sticky='news',padx=4,pady=4) + + regexp_radio_name=Radiobutton(find_filename_frame,text="by regular expression",variable=self.find_filename_search_kind_var,value='regexp',command=self.find_mod) + regexp_radio_name.grid(row=2, column=0, sticky='news',padx=4,pady=4) + regexp_radio_name.bind('', lambda event : self.find_items()) + + glob_radio_name=Radiobutton(find_filename_frame,text="by glob pattern",variable=self.find_filename_search_kind_var,value='glob',command=self.find_mod) + glob_radio_name.grid(row=3, column=0, sticky='news',padx=4,pady=4) + glob_radio_name.bind('', lambda event : self.find_items()) + + fuzzy_radio_name=Radiobutton(find_filename_frame,text="by fuzzy match",variable=self.find_filename_search_kind_var,value='fuzzy',command=self.find_mod) + fuzzy_radio_name.grid(row=4, column=0, sticky='news',padx=4,pady=4) + fuzzy_radio_name.bind('', lambda event : self.find_items()) regexp_tooltip = "Regular expression\n" regexp_tooltip_name = "Checked on the file\nor folder name." @@ -1720,13 +1736,33 @@ def ver_number(var): (find_cd_frame := LabelFrame(sfdma,text='Custom Data',bd=2,bg=self.bg_color,takefocus=False)).grid(row=2,column=0,sticky='news',padx=4,pady=4) - Radiobutton(find_cd_frame,text="Don't use this criterion",variable=self.find_cd_search_kind_var,value='dont',command=self.find_mod,width=30).grid(row=0, column=0, sticky='news',padx=4,pady=4) - Radiobutton(find_cd_frame,text="files without Custom Data ",variable=self.find_cd_search_kind_var,value='without',command=self.find_mod).grid(row=1, column=0, sticky='news',padx=4,pady=4) - Radiobutton(find_cd_frame,text="files with any correct Custom Data ",variable=self.find_cd_search_kind_var,value='any',command=self.find_mod).grid(row=2, column=0, sticky='news',padx=4,pady=4) - Radiobutton(find_cd_frame,text="files with error on CD extraction",variable=self.find_cd_search_kind_var,value='error',command=self.find_mod).grid(row=3, column=0, sticky='news',padx=4,pady=4) - (regexp_radio_cd:=Radiobutton(find_cd_frame,text="by regular expression",variable=self.find_cd_search_kind_var,value='regexp',command=self.find_mod)).grid(row=4, column=0, sticky='news',padx=4,pady=4) - (glob_radio_cd:=Radiobutton(find_cd_frame,text="by glob pattern",variable=self.find_cd_search_kind_var,value='glob',command=self.find_mod)).grid(row=5, column=0, sticky='news',padx=4,pady=4) - (fuzzy_radio_cd:=Radiobutton(find_cd_frame,text="by fuzzy match",variable=self.find_cd_search_kind_var,value='fuzzy',command=self.find_mod)).grid(row=6, column=0, sticky='news',padx=4,pady=4) + r_dont2 = Radiobutton(find_cd_frame,text="Don't use this criterion",variable=self.find_cd_search_kind_var,value='dont',command=self.find_mod,width=30) + r_dont2.grid(row=0, column=0, sticky='news',padx=4,pady=4) + r_dont2.bind('', lambda event : self.find_items()) + + r_without = Radiobutton(find_cd_frame,text="files without Custom Data ",variable=self.find_cd_search_kind_var,value='without',command=self.find_mod) + r_without.grid(row=1, column=0, sticky='news',padx=4,pady=4) + r_without.bind('', lambda event : self.find_items()) + + r_correct = Radiobutton(find_cd_frame,text="files with any correct Custom Data ",variable=self.find_cd_search_kind_var,value='any',command=self.find_mod) + r_correct.grid(row=2, column=0, sticky='news',padx=4,pady=4) + r_correct.bind('', lambda event : self.find_items()) + + r_error = Radiobutton(find_cd_frame,text="files with error on CD extraction",variable=self.find_cd_search_kind_var,value='error',command=self.find_mod) + r_error.grid(row=3, column=0, sticky='news',padx=4,pady=4) + r_error.bind('', lambda event : self.find_items()) + + regexp_radio_cd = Radiobutton(find_cd_frame,text="by regular expression",variable=self.find_cd_search_kind_var,value='regexp',command=self.find_mod) + regexp_radio_cd.grid(row=4, column=0, sticky='news',padx=4,pady=4) + regexp_radio_cd.bind('', lambda event : self.find_items()) + + glob_radio_cd = Radiobutton(find_cd_frame,text="by glob pattern",variable=self.find_cd_search_kind_var,value='glob',command=self.find_mod) + glob_radio_cd.grid(row=5, column=0, sticky='news',padx=4,pady=4) + glob_radio_cd.bind('', lambda event : self.find_items()) + + fuzzy_radio_cd = Radiobutton(find_cd_frame,text="by fuzzy match",variable=self.find_cd_search_kind_var,value='fuzzy',command=self.find_mod) + fuzzy_radio_cd.grid(row=6, column=0, sticky='news',padx=4,pady=4) + fuzzy_radio_cd.bind('', lambda event : self.find_items()) self.find_cd_regexp_entry = Entry(find_cd_frame,textvariable=self.find_cd_regexp_var,validate="key") self.find_cd_glob_entry = Entry(find_cd_frame,textvariable=self.find_cd_glob_var,validate="key") @@ -3922,25 +3958,27 @@ def scan(self,compression_level,threads,group=None): return True def remove_record(self): - label = librer_core.get_record_name(self.current_record) - path = self.current_record.header.scan_path - creation_time = self.current_record.header.creation_time + record = self.current_record + label = librer_core.get_record_name(record) + path = record.header.scan_path + creation_time = record.header.creation_time dialog = self.get_simple_question_dialog() - dialog.show('Delete selected data record ?',librer_core.record_info_alias_wrapper(self.current_record,self.current_record.txtinfo_short) ) + dialog.show('Delete selected data record ?',librer_core.record_info_alias_wrapper(record,record.txtinfo_short) ) if dialog.res_bool: - record_item = self.record_to_item[self.current_record] + record_item = self.record_to_item[record] self.tree.delete(record_item) - del self.record_to_item[self.current_record] + del self.record_to_item[record] del self.item_to_record[record_item] - res=librer_core.delete_record(self.current_record) + res=librer_core.delete_record(record) l_info(f'deleted file:{res}') self.find_clear() + #record.find_results_clean() self.status_record_configure('') if remaining_records := self.tree.get_children(): @@ -4285,6 +4323,7 @@ def open_item(self,item=None): if tree.tag_has(self.RECORD_RAW,item): self.access_filestructure(record) + self_item_to_data[item] = record.filestructure self.tree.item(item,tags=self.RECORD, image=self.ico_record_cd if record.has_cd() else self.ico_record) self.tree_select() #tylko dla aktualizacja ikony @@ -4528,6 +4567,15 @@ def record_info(self): time_info = strftime('%Y/%m/%d %H:%M:%S',localtime_catched(self.current_record.header.creation_time)) self.get_text_info_dialog().show('Record Info.',librer_core.record_info_alias_wrapper(self.current_record,self.current_record.txtinfo) ) + def purify_items_cache(self): + self_item_to_data = self.item_to_data + self_tree_exists = self.tree.exists + for item in list(self_item_to_data): + if not self_tree_exists(item): + del self_item_to_data[item] + + #print('self_item_to_data:',len(self_item_to_data.keys()),asizeof(self_item_to_data)) + @block @logwrapper def unload_record(self,record=None): @@ -4540,12 +4588,19 @@ def unload_record(self,record=None): self_tree = self.tree self_tree.delete(*self_tree.get_children(record_item)) + + if record_item in self.item_to_data: + del self.item_to_data[record_item] + self.purify_items_cache() + + record.unload_filestructure() + record.unload_customdata() + #self.find_clear() + self_tree.insert(record_item,'end',text='dummy') #dummy_sub_item self_tree.set(record_item,'opened','0') self_tree.item(record_item, open=False) - record.unload_filestructure() - record.unload_customdata() self_tree.item(record_item, image=self.get_record_raw_icon(record),tags=self.RECORD_RAW) self_tree.focus(record_item) self_tree.see(record_item) From 790490f8dc219a5b01aa9aa19fab14278f72f310 Mon Sep 17 00:00:00 2001 From: piotrj Date: Sat, 24 Feb 2024 21:54:28 +0100 Subject: [PATCH 09/20] lighter icons, new record dialog fix --- src/icons/left.png | Bin 335 -> 192 bytes src/icons/right.png | Bin 318 -> 189 bytes src/icons/up.png | Bin 248 -> 203 bytes src/librer.py | 2 +- 4 files changed, 1 insertion(+), 1 deletion(-) diff --git a/src/icons/left.png b/src/icons/left.png index 5fc0f84ba1a480e42fff772b61181db46924ab5c..898b6e9b920f0d24b9400fb7b85a29081e69eb3f 100644 GIT binary patch delta 165 zcmV;W09yag0>A-~BYyygNklJ!OoRxZaI1xnxM$#ac)(Lq^WVC} zD+4xv`!_x+6eFzh81bM6yV(?m*knz%*=sDB>~O993(=&n86Irxq{CvWNio6~IHnf) Tess)400000NkvXXu0mjf$fHW! delta 309 zcmV-50m}Zs0nY-EBYy!FNkl8m%fdjDMv1pZLJY)bFc}PDv`E-Z zK9<;QMsG1mSd0d=>a7xsqOIj_(pJuCpXr?YJpccbT-S3llGgz8SV9(8_&~FJQxaUj z2KKOm8@!?3zA>8sk6|6Vm<#Z;8%{cTJirUw;{dA|My4Nl2Y=@%;RtIO=?5-g8)qov z7@NrTf-|vc3nzHNX}sqXa_U%vSjJ(5b%%XSrnL00g*s|z#B3%m^ATUg-Pax_h#rQ45=ir#4cCljMG$+zJ=x=A{QKUMXosHW^%_qFtH#HEI#Mi zw^&XPc;OlQxbO3Zb*zPQB*e{+``{S2sm(3w*ygcSt|c9i8N5EX+WVqq&-3WALy z_;@~mwb&?j!AmQGcPzx9*d#GFJB=hc_{J0Fu@sQ|IBVtsppDnKwHBZ%9)IGjl?M<-FL(<`6&2j! zxeLj18rq7dzhfl;72F1-oh3}A8IT{WVGc7mL!+2D+BQC8FJcr!sks=7EYxEnF$KKBGKC}tAMwB)n=vFY_|yWwal>|O z=QHY_J2iHFN@8$N1: + if row>2: self.up_button[e].grid(row=row,column=0,sticky='news') self.use_checkbutton[e] = Checkbutton(cde_frame,variable=self.CDE_use_var_list[e],command = lambda x=e : self.use_checkbutton_mod(x)) From b6d32a2db6bd69a90efe2eaaed2cf635db8f0ae0 Mon Sep 17 00:00:00 2001 From: piotrj Date: Sun, 25 Feb 2024 16:29:49 +0100 Subject: [PATCH 10/20] option for disable tooltips, selection fix --- src/librer.py | 153 +++++++++++++++++++++++++++++++++++++++++--------- 1 file changed, 128 insertions(+), 25 deletions(-) diff --git a/src/librer.py b/src/librer.py index b5214e4..1ca93bc 100644 --- a/src/librer.py +++ b/src/librer.py @@ -103,6 +103,8 @@ CFG_last_dir = 'last_dir' CFG_geometry = 'geometry' +CFG_KEY_show_popups = 'show_popups' +CFG_KEY_groups_collapse = 'groups_collapse' cfg_defaults={ CFG_KEY_SINGLE_DEVICE:True, @@ -138,7 +140,9 @@ CFG_KEY_import_crc:True, CFG_last_dir:'.', - CFG_geometry:'' + CFG_geometry:'', + CFG_KEY_show_popups:True, + CFG_KEY_groups_collapse:True } HOMEPAGE='https://github.com/PJDude/librer' @@ -487,9 +491,9 @@ def __init__(self,cwd): bg_focus='#90DD90' bg_focus_off='#90AA90' - bg_sel='#AAAAAA' + #bg_sel='#AAAAAA' - style_map('Treeview', background=[('focus',bg_focus),('selected',bg_sel),('','white')]) + style_map('Treeview', background=[('focus',bg_focus),('selected',bg_focus_off),('','white')]) #style_map('semi_focus.Treeview', background=[('focus',bg_focus),('selected',bg_focus_off),('','white')]) @@ -627,6 +631,8 @@ def file_cascade_post(): self_file_cascade_add_separator() self_file_cascade_add_command(label = 'Find ...',command = self.finder_wrapper_show, accelerator="Ctrl+F",image = self.ico_find,compound='left',state = 'normal' if librer_core.records else 'disabled') self_file_cascade_add_separator() + self_file_cascade_add_command(label = 'Settings ...',command = self.settings_show, accelerator="F4",image = self.ico_empty,compound='left',state = 'normal') + self_file_cascade_add_separator() self_file_cascade_add_command(label = 'Clear Search Results',command = self.find_clear, image = self.ico_empty,compound='left',state = 'normal' if self.any_valid_find_results else 'disabled') self_file_cascade_add_separator() self_file_cascade_add_command(label = 'Exit',command = self.exit,image = self_ico['exit'],compound='left') @@ -806,10 +812,10 @@ def help_cascade_post(): self_progress_dialog_on_load.hide(True) read_thread.join() - if self.action_abort: self.info_dialog_on_main.show('Records loading aborted','Restart Librer to gain full access to the recordset.') + if load_errors: self.get_text_info_dialog().show('Loading errors','\n\n'.join(load_errors) ) @@ -818,6 +824,10 @@ def help_cascade_post(): self.main_config(cursor='') self.tree_semi_focus() + + #for child in self.tree.get_children(): + # self.tree.item(child, open=False) + self.status_info.configure(image='',text = 'Ready') tree_bind = tree.bind @@ -835,6 +845,9 @@ def help_cascade_post(): self_main_bind = self_main.bind + self.tree.bind("",lambda event : self.tree_focus_out() ) + self.tree.bind("",lambda event : self.tree_focus_in() ) + self_main_bind("",lambda event : self.menubar_unpost() ) self_main_bind("",lambda event : self.focusin() ) @@ -866,6 +879,8 @@ def help_cascade_post(): self_main_bind('', lambda event : self.record_repack() ) self_main_bind('', lambda event : self.find_next() ) + self_main_bind('', lambda event : self.settings_show() ) + self_main_bind('', lambda event : self.find_prev()) self_main_bind('', lambda event : self.alias_name() ) @@ -885,6 +900,29 @@ def help_cascade_post(): self_main.mainloop() + def tree_focus_out(self): + tree = self.tree + item=tree.focus() + if item: + tree.selection_set(item) + self.selected=item + + selected=None + + def tree_focus_in(self): + try: + if selection := self.tree.selection(): + item=selection[0] + tree.selection_remove(item) + tree.focus(item) + self.tree_sel_change(item,True) + elif item:=self.selected: + tree.focus(item) + self.tree_sel_change(item,True) + + except Exception as e: + l_error(f'groups_tree_focus_in:{e}') + def tree_scrollbar_set(self,v1,v2): if v1=='0.0' and v2=='1.0': self.tree_scrollbar.pack_forget() @@ -1582,6 +1620,45 @@ def get_assign_to_group_dialog(self): self.assign_to_group_dialog_created = True return self.assign_to_group_dialog + settings_dialog_created = False + @restore_status_line + @block + def get_settings_dialog(self): + if not self.settings_dialog_created: + self.status("Creating dialog ...") + + self.settings_dialog=GenericDialog(self.main,self.main_icon_tuple,self.bg_color,'Settings',pre_show=self.pre_show,post_close=self.post_close) + + sfdma = self.settings_dialog.area_main + + self.show_popups_var = BooleanVar() + self.popups_cb = Checkbutton(sfdma,text='Show tooltips',variable=self.show_popups_var,command=self.popups_show_mod) + self.popups_cb.grid(row=0, column=0, sticky='news',padx=4,pady=4) + + self.groups_collapsed_var = BooleanVar() + self.popups_cb = Checkbutton(sfdma,text='Groups collapsed at startup',variable=self.groups_collapsed_var,command=self.groups_collapse_mod) + self.popups_cb.grid(row=1, column=0, sticky='news',padx=4,pady=4) + + sfdma.grid_columnconfigure( 0, weight=1) + + Button(self.settings_dialog.area_buttons, text='Close', width=14, command=self.settings_close ).pack(side='right', anchor='n',padx=5,pady=5) + + self.settings_dialog_created = True + + self.show_popups_var.set(self.cfg.get(CFG_KEY_show_popups)) + self.groups_collapsed_var.set(self.cfg.get(CFG_KEY_groups_collapse)) + + return self.settings_dialog + + def popups_show_mod(self): + self.cfg.set(CFG_KEY_show_popups,self.show_popups_var.get()) + + def groups_collapse_mod(self): + self.cfg.set(CFG_KEY_groups_collapse,self.groups_collapsed_var.get()) + + def settings_close(self): + self.settings_dialog.hide() + find_dialog_created = False @restore_status_line @block @@ -1663,7 +1740,6 @@ def ver_number(var): self.find_range_cb1.grid(row=0, column=0, sticky='news',padx=4,pady=4) self.find_range_cb1.bind('', lambda event : self.find_items()) - find_range_cb2 = Radiobutton(find_filename_frame,text='All records',variable=self.find_range_all,value=True,command=self.find_mod) find_range_cb2.grid(row=0, column=1, sticky='news',padx=4,pady=4) find_range_cb2.bind('', lambda event : self.find_items()) @@ -2211,7 +2287,7 @@ def show_tooltip_widget(self,event): self.configure_tooltip(event.widget) - self.tooltip_deiconify() + self.tooltip_deiconify_wrapp() self.adaptive_tooltip_geometry(event) @@ -2239,6 +2315,10 @@ def get_item_record(self,item): subpath_list.reverse() return (item,current_record_name,subpath_list) + def tooltip_deiconify_wrapp(self): + if self.cfg.get(CFG_KEY_show_popups): + self.tooltip_deiconify() + def show_tooltips_tree(self,event): self.unschedule_tooltips_tree(event) self.menubar_unpost() @@ -2250,7 +2330,7 @@ def show_tooltips_tree(self,event): if tree.identify("region", event.x, event.y) == 'heading': if colname in ('path','size_h','ctime_h'): self.tooltip_lab_configure(text='Sort by %s' % self.org_label[colname]) - self.tooltip_deiconify() + self.tooltip_deiconify_wrapp() else: self.hide_tooltip() @@ -2292,7 +2372,7 @@ def show_tooltips_tree(self,event): self.tooltip_lab_configure(text='\n'.join(tooltip_list)) - self.tooltip_deiconify() + self.tooltip_deiconify_wrapp() else: self.tooltip_lab_configure(text='label') @@ -2301,7 +2381,7 @@ def show_tooltips_tree(self,event): if coldata: self.tooltip_lab_configure(text=coldata) - self.tooltip_deiconify() + self.tooltip_deiconify_wrapp() else: self.hide_tooltip() @@ -2397,6 +2477,13 @@ def exit(self): find_params_changed=True + @block + def settings_show(self): + dialog = self.get_settings_dialog() + dialog.show('Settings') + + self.tree_semi_focus() + @block def finder_wrapper_show(self): #if self.current_record: @@ -2708,8 +2795,8 @@ def find_items(self): sel_range = librer_core.records else: sel_range = self.get_selected_records() - sel_range_info = ','.join([librer_core.get_record_name(rec) for rec in sel_range]) - search_info_lines_append(f'Search in records:{sel_range_info}') + sel_range_info = '\n'.join([librer_core.get_record_name(rec) for rec in sel_range]) + search_info_lines_append(f'Search in records:\n{sel_range_info}') #if self.current_record: # search_info_lines_append(f'Search in record:{librer_core.get_record_name(self.current_record)}') @@ -3092,7 +3179,7 @@ def select_find_result(self,mod): self.tree.see(current_item) self.tree.update() - self_tree.selection_set(current_item) + #self_tree.selection_set(current_item) self_tree.focus(current_item) @@ -3216,8 +3303,8 @@ def key_press(self,event): l_error(e) self.info_dialog_on_main.show('INTERNAL ERROR',str(e)) - if tree_focus:=tree.focus(): - tree.selection_set(tree_focus) + #if tree_focus:=tree.focus(): + # tree.selection_set(tree_focus) ################################################# def select_and_focus(self,item): @@ -3250,7 +3337,7 @@ def tree_on_mouse_button_press(self,event): tree.selection_remove(tree.selection()) tree.focus(item) - tree.selection_set(item) + #tree.selection_set(item) self.tree_semi_focus() self.tree_sel_change(item) @@ -3281,7 +3368,7 @@ def tree_semi_focus(self): #tree.focus(item) tree.see(item) - tree.selection_set(item) + #tree.selection_set(item) self.tree_sel_change(item) self.sel_item = item @@ -3447,6 +3534,7 @@ def remove_from_group(self): self.column_sort(self.tree) + last_assign_to_group_group = None @logwrapper def assign_to_group(self): #item=self.tree.focus() @@ -3465,7 +3553,10 @@ def assign_to_group(self): current = librer_core.get_record_group(record) if not current: - current = values[0] + if self.last_assign_to_group_group in values: + current = self.last_assign_to_group_group + else: + current = values[0] dial.show('Assign to group','Assign record to group:',current) @@ -3473,6 +3564,7 @@ def assign_to_group(self): group = dial.entry_val.get() if group: + self.last_assign_to_group_group = group res2=librer_core.assign_new_group(record,group) if res2: self.info_dialog_on_main.show('assign_new_group Error',res2) @@ -3983,7 +4075,7 @@ def remove_record(self): self.status_record_configure('') if remaining_records := self.tree.get_children(): if new_sel_record := remaining_records[0]: - self.tree.selection_set(new_sel_record) + #self.tree.selection_set(new_sel_record) self.tree.focus(new_sel_record) self.tree_semi_focus() @@ -4415,9 +4507,10 @@ def groups_show(self): def single_group_show(self,group): values = (group,group,0,'',0,'',0,'',self.GROUP) + group_item=self.tree.insert('','end',iid=None,values=values,open=False,text=group,image=self.ico_group,tags=self.GROUP) self.group_to_item[group] = group_item - self.tree.selection_set(group_item) + #self.tree.selection_set(group_item) self.tree.focus(group_item) self.tree.see(group_item) self.column_sort(self.tree) @@ -4436,14 +4529,25 @@ def single_record_show(self,record): record_item=self.tree.insert(group_item,'end',iid=None,values=values,open=False,text=librer_core.get_record_name(record),image=self.get_record_raw_icon(record),tags=self.RECORD_RAW) self.tree.insert(record_item,'end',text='dummy') #dummy_sub_item + groups_collapse = self.cfg.get(CFG_KEY_groups_collapse) + + #print(self.cfg.get(CFG_KEY_groups_collapse),group_item) + self.tree.item(group_item, open = False) + #self.cfg.get(CFG_KEY_groups_collapse) + self.tree_sort_item(None) self.item_to_record[record_item]=record self.record_to_item[record]=record_item - self.tree.focus(record_item) - self.tree.selection_set(record_item) - self.tree.see(record_item) + if groups_collapse: + self.tree.focus(group_item) + #self.tree.selection_set(group_item) + self.tree.see(group_item) + else: + self.tree.focus(record_item) + #self.tree.selection_set(record_item) + self.tree.see(record_item) records_len=len(librer_core.records) self.status_records_all_configure(f'Records:{records_len}') @@ -4464,7 +4568,6 @@ def single_record_show(self,record): self.column_sort(self.tree) - def tree_update_none(self): self.tree.selection_remove(self.tree.selection()) @@ -4565,7 +4668,7 @@ def record_info(self): if not self.block_processing_stack: if self.current_record: time_info = strftime('%Y/%m/%d %H:%M:%S',localtime_catched(self.current_record.header.creation_time)) - self.get_text_info_dialog().show('Record Info.',librer_core.record_info_alias_wrapper(self.current_record,self.current_record.txtinfo) ) + self.get_text_info_dialog().show('Record Info',librer_core.record_info_alias_wrapper(self.current_record,self.current_record.txtinfo) ) def purify_items_cache(self): self_item_to_data = self.item_to_data @@ -4604,7 +4707,7 @@ def unload_record(self,record=None): self_tree.item(record_item, image=self.get_record_raw_icon(record),tags=self.RECORD_RAW) self_tree.focus(record_item) self_tree.see(record_item) - self_tree.selection_set(record_item) + #self_tree.selection_set(record_item) self.tree_select() @block From 6e1bb47cba2313d9d08ee6e59511fefcbbafd468 Mon Sep 17 00:00:00 2001 From: piotrj Date: Sun, 25 Feb 2024 16:30:16 +0100 Subject: [PATCH 11/20] similiar distro names --- .github/workflows/run.yml | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/.github/workflows/run.yml b/.github/workflows/run.yml index ab00638..ffa2bf1 100644 --- a/.github/workflows/run.yml +++ b/.github/workflows/run.yml @@ -238,7 +238,7 @@ jobs: path: ./ - name: rename windows distro - run: mv ./librer.win.zip ./librer_devel.${{ steps.version.outputs.version }}.nuitka.portable.windows.zip + run: mv ./librer.win.zip ./librer_devel.${{ steps.version.outputs.version }}.portable.windows.nuitka.zip ##################################### - name: download linux nuitka distro @@ -248,7 +248,7 @@ jobs: path: ./ - name: rename linux distro - run: mv ./librer.lin.zip ./librer_devel.${{ steps.version.outputs.version }}.nuitka.portable.linux.zip + run: mv ./librer.lin.zip ./librer_devel.${{ steps.version.outputs.version }}.portable.linux.nuitka.zip ##################################### - name: Create GitHub release @@ -262,7 +262,7 @@ jobs: prerelease: true files: | librer_devel.${{ steps.version.outputs.version }}.portable.linux.zip + librer_devel.${{ steps.version.outputs.version }}.portable.linux.nuitka.zip librer_devel.${{ steps.version.outputs.version }}.portable.windows.zip - librer_devel.${{ steps.version.outputs.version }}.nuitka.portable.linux.zip - librer_devel.${{ steps.version.outputs.version }}.nuitka.portable.windows.zip + librer_devel.${{ steps.version.outputs.version }}.portable.windows.nuitka.zip From 9fcbc5f889e9125e6e9914ad5ac0487dae3ff3d8 Mon Sep 17 00:00:00 2001 From: piotrj Date: Fri, 1 Mar 2024 21:37:02 +0100 Subject: [PATCH 12/20] nuitka update --- requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements.txt b/requirements.txt index e09884f..a18bf75 100644 --- a/requirements.txt +++ b/requirements.txt @@ -5,4 +5,4 @@ psutil==5.9.8 pympler==1.0.1 ciso8601==2.3.1 pywin32==306; sys.platform == 'win32' -nuitka==2.0.3 +nuitka==2.0.5 From 6660215cc0fba8e7e6819cb863d8e9fbbe61c74a Mon Sep 17 00:00:00 2001 From: piotrj Date: Fri, 1 Mar 2024 21:37:37 +0100 Subject: [PATCH 13/20] layout fix --- src/dialogs.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/src/dialogs.py b/src/dialogs.py index 9f51111..26b7692 100644 --- a/src/dialogs.py +++ b/src/dialogs.py @@ -356,10 +356,10 @@ def __init__(self,parent,icon,bg_color,pre_show=None,post_close=None,min_width=1 self.area_main.grid_columnconfigure(0, weight=1) self.cancel_button=Button(self.area_buttons, text='Close', width=14, command=super().hide ) - self.cancel_button.pack(side='right', anchor='e',padx=2,pady=5) + self.cancel_button.pack(side='right', anchor='e',padx=2,pady=5,fill='both') self.copy_button=Button(self.area_buttons, text='Copy', width=14, command=self.clip_copy_message ) - self.copy_button.pack(side='right', anchor='w',padx=2,pady=5) + self.copy_button.pack(side='right', anchor='w',padx=2,pady=5,fill='both') self.find_next_butt=Button(self.area_mark, command=lambda : self.find_next_prev(1), width=1) self.find_next_butt.pack(side='right', anchor='w',padx=2,pady=5,fill='both') @@ -509,7 +509,7 @@ def __init__(self,parent,icon,bg_color,pre_show=None,post_close=None,min_width=8 self.cancel_button.pack(side='left', anchor='n',padx=5,pady=5) self.ok_button=Button(self.area_buttons, text='OK', width=14, command=self.ok,image=image, compound='left' ) - self.ok_button.pack(side='right', anchor='n',padx=5,pady=5) + self.ok_button.pack(side='right', anchor='n',padx=5,pady=5,fill='both') self.focus=self.cancel_button From fc10596cdaca9d65b0e350a96efaa1a928ef98c6 Mon Sep 17 00:00:00 2001 From: piotrj Date: Fri, 1 Mar 2024 21:38:57 +0100 Subject: [PATCH 14/20] distinguish different types of CDE errors --- src/core.py | 53 +++++++++++++++++++++++------------------- src/librer.py | 64 +++++++++++++++++++++++++-------------------------- 2 files changed, 61 insertions(+), 56 deletions(-) diff --git a/src/core.py b/src/core.py index 6eb4248..3b78cc2 100644 --- a/src/core.py +++ b/src/core.py @@ -81,6 +81,8 @@ CD_OK_ID = 0 CD_INDEX_ID = 1 CD_DATA_ID = 2 +CD_ABORTED_ID = 3 +CD_EMPTY_ID = 4 def get_dev_labes_dict(): lsblk = subprocess_run(['lsblk','-fJ'],capture_output = True,text = True) @@ -637,6 +639,8 @@ def extract_customdata(self,print_func,abort_list,threads=0): CD_OK_ID_loc = CD_OK_ID CD_DATA_ID_loc = CD_DATA_ID + CD_ABORTED_ID_loc = CD_ABORTED_ID + CD_EMPTY_ID_loc = CD_EMPTY_ID all_threads_data_list={} all_threads_files_cde_errors_quant = {} @@ -668,6 +672,8 @@ def threaded_cde(timeout_semi_list,thread_index,thread_data_list,cde_errors_quan self_killed[thread_index]=False + empty=False + time_start = perf_counter_loc() if abort_list[0] : #wszystko returncode=200 @@ -718,6 +724,7 @@ def threaded_cde(timeout_semi_list,thread_index,thread_data_list,cde_errors_quan if not output: output = 'No output collected.' returncode=203 + empty=True ##################################### @@ -736,7 +743,9 @@ def threaded_cde(timeout_semi_list,thread_index,thread_data_list,cde_errors_quan new_elem={ CD_OK_ID_loc:bool(returncode==0 and not self_killed[thread_index] and not aborted), - CD_DATA_ID_loc:(rule_nr,returncode,output) + CD_DATA_ID_loc:(rule_nr,returncode,output), + CD_ABORTED_ID_loc:aborted, + CD_EMPTY_ID_loc:empty } scan_like_list.append(new_elem) #dostep z wielu watkow @@ -820,7 +829,6 @@ def threaded_cde(timeout_semi_list,thread_index,thread_data_list,cde_errors_quan try: used_cd_index = customdata_helper[cd_field] new_elem[CD_INDEX_ID_loc]=used_cd_index - customdata_stats_refs[rule_nr]+=1 except: customdata_helper[cd_field] = new_elem[CD_INDEX_ID_loc] = cd_index cd_index+=1 @@ -829,7 +837,8 @@ def threaded_cde(timeout_semi_list,thread_index,thread_data_list,cde_errors_quan customdata_stats_size[rule_nr]+=asizeof(cd_field) customdata_stats_uniq[rule_nr]+=1 - customdata_stats_refs[rule_nr]+=1 + + customdata_stats_refs[rule_nr]+=1 print_func( ('info','Custom data post-processing finished.'),True) @@ -906,7 +915,8 @@ def tupelize_rec(self,scan_like_data,results_queue_put): except: has_cd = False cd_ok = False - has_crc = False + cd_aborted = False + cd_empty = False else: #if 'cd_ok' in info_dict: if CD_OK_ID in info_dict: @@ -917,14 +927,17 @@ def tupelize_rec(self,scan_like_data,results_queue_put): cd_ok = False has_cd = False - #if 'crc_val' in info_dict: - # crc_val = info_dict['crc_val'] - # has_crc = True - #else: - # has_crc = False - has_crc = False + if CD_ABORTED_ID in info_dict: + cd_aborted = info_dict[CD_ABORTED_ID] + else: + cd_aborted = False - code_new = LUT_encode_loc[ (is_dir,is_file,is_symlink,is_bind,has_cd,has_files,cd_ok,has_crc,False,False) ] + if CD_EMPTY_ID in info_dict: + cd_empty = info_dict[CD_EMPTY_ID] + else: + cd_empty = False + + code_new = LUT_encode_loc[ (is_dir,is_file,is_symlink,is_bind,has_cd,has_files,cd_ok,cd_aborted,cd_empty,False) ] sub_list_elem=[entry_name_index,code_new,size,mtime] @@ -934,8 +947,6 @@ def tupelize_rec(self,scan_like_data,results_queue_put): if has_cd: #only files self.header.references_cd+=1 sub_list_elem.append( cd_index ) - if has_crc: #only files - sub_list_elem.append( crc_val ) sub_list.append( tuple(sub_list_elem) ) @@ -956,12 +967,11 @@ def pack_data(self,results_queue_put): has_cd = False has_files = True cd_ok = False - has_crc = False self.header.references_names=0 self.header.references_cd=0 - code = LUT_encode[ (is_dir,is_file,is_symlink,is_bind,has_cd,has_files,cd_ok,has_crc,False,False) ] + code = LUT_encode[ (is_dir,is_file,is_symlink,is_bind,has_cd,has_files,cd_ok,False,False,False) ] self.filestructure = ('',code,size,mtime,self.tupelize_rec(self.scan_data,results_queue_put)) self.header.items_names=len(self.filenames) @@ -975,12 +985,12 @@ def remove_cd_rec(self,tuple_like_data): self_remove_cd_rec = self.remove_cd_rec - is_dir,is_file,is_symlink,is_bind,has_cd,has_files,cd_ok,has_crc,aux1,aux2 = LUT_decode_loc[tuple_like_data[1]] + is_dir,is_file,is_symlink,is_bind,has_cd,has_files,cd_ok,cd_aborted,cd_empty,aux2 = LUT_decode_loc[tuple_like_data[1]] has_cd=False cd_ok=False - code = LUT_encode[ (is_dir,is_file,is_symlink,is_bind,has_cd,has_files,cd_ok,has_crc,aux1,aux2) ] + code = LUT_encode[ (is_dir,is_file,is_symlink,is_bind,has_cd,has_files,cd_ok,cd_aborted,cd_empty,aux2) ] new_list = [tuple_like_data[0],code,tuple_like_data[2],tuple_like_data[3]] @@ -1058,7 +1068,7 @@ def find_items(self, name = filenames_loc[name_nr] - is_dir,is_file,is_symlink,is_bind,has_cd,has_files,cd_ok,has_crc,aux1,aux2 = LUT_decode_loc[code] + is_dir,is_file,is_symlink,is_bind,has_cd,has_files,cd_ok,cd_aborted,cd_empty,aux2 = LUT_decode_loc[code] elem_index=4 if has_files: @@ -1071,9 +1081,6 @@ def find_items(self, cd_nr = data_entry[elem_index] elem_index+=1 - #if has_crc: - # crc = data_entry[elem_index] - next_level = parent_path_components + [name] if name_search_kind_is_error: if size>-1: @@ -2004,8 +2011,6 @@ def import_records_wii_do(self,compr,postfix,label,quant_files,quant_folders,fil has_cd = bool(new_record.customdata) has_files = True cd_ok = False - has_crc = False - new_record.header.references_names=0 new_record.header.references_cd=0 @@ -2013,7 +2018,7 @@ def import_records_wii_do(self,compr,postfix,label,quant_files,quant_folders,fil sub_size,sub_quant,sub_folders_quant = new_record.sld_recalc_rec(scan_like_data) #print('ccc',sub_size,sub_quant,sub_folders_quant,flush=True) - code = LUT_encode[ (is_dir,is_file,is_symlink,is_bind,has_cd,has_files,cd_ok,has_crc,False,False) ] + code = LUT_encode[ (is_dir,is_file,is_symlink,is_bind,has_cd,has_files,cd_ok,False,False,False) ] new_record.header.sum_size = sub_size new_record.header.quant_files = sub_quant diff --git a/src/librer.py b/src/librer.py index 1ca93bc..981f066 100644 --- a/src/librer.py +++ b/src/librer.py @@ -374,6 +374,10 @@ def __init__(self,cwd): self.ico_cd_ok_crc = self_ico['cd_ok_crc'] self.ico_cd_error = self_ico['cd_error'] self.ico_cd_error_crc = self_ico['cd_error_crc'] + + self.ico_cd_aborted = self_ico['cd_aborted'] + self.ico_cd_empty = self_ico['cd_empty'] + self.ico_crc = self_ico['crc'] self.ico_license = self_ico['license'] self.ico_timeout = self_ico['timeout'] @@ -631,7 +635,7 @@ def file_cascade_post(): self_file_cascade_add_separator() self_file_cascade_add_command(label = 'Find ...',command = self.finder_wrapper_show, accelerator="Ctrl+F",image = self.ico_find,compound='left',state = 'normal' if librer_core.records else 'disabled') self_file_cascade_add_separator() - self_file_cascade_add_command(label = 'Settings ...',command = self.settings_show, accelerator="F4",image = self.ico_empty,compound='left',state = 'normal') + self_file_cascade_add_command(label = 'Settings ...',command = self.settings_show, accelerator="F12",image = self.ico_empty,compound='left',state = 'normal') self_file_cascade_add_separator() self_file_cascade_add_command(label = 'Clear Search Results',command = self.find_clear, image = self.ico_empty,compound='left',state = 'normal' if self.any_valid_find_results else 'disabled') self_file_cascade_add_separator() @@ -879,7 +883,7 @@ def help_cascade_post(): self_main_bind('', lambda event : self.record_repack() ) self_main_bind('', lambda event : self.find_next() ) - self_main_bind('', lambda event : self.settings_show() ) + self_main_bind('', lambda event : self.settings_show() ) self_main_bind('', lambda event : self.find_prev()) @@ -910,15 +914,16 @@ def tree_focus_out(self): selected=None def tree_focus_in(self): + tree = self.tree try: - if selection := self.tree.selection(): + if selection := tree.selection(): + tree.selection_remove(*selection) item=selection[0] - tree.selection_remove(item) tree.focus(item) - self.tree_sel_change(item,True) + tree_sel_change(item,True) elif item:=self.selected: tree.focus(item) - self.tree_sel_change(item,True) + tree_sel_change(item,True) except Exception as e: l_error(f'groups_tree_focus_in:{e}') @@ -936,7 +941,7 @@ def item_has_cd(self,item): data_tuple = self.item_to_data[item] code = data_tuple[1] - is_dir,is_file,is_symlink,is_bind,has_cd,has_files,cd_ok,aux0,aux1,aux2 = LUT_decode[code] + is_dir,is_file,is_symlink,is_bind,has_cd,has_files,cd_ok,cd_aborted,cd_empty,aux2 = LUT_decode[code] return has_cd def widget_tooltip_cget(self,widget,tooltip): @@ -2356,7 +2361,7 @@ def show_tooltips_tree(self,event): if item in self.item_to_data: data_tuple = self.item_to_data[item] code = data_tuple[1] - is_dir,is_file,is_symlink,is_bind,has_cd,has_files,cd_ok,aux0,aux1,aux2 = LUT_decode[code] + is_dir,is_file,is_symlink,is_bind,has_cd,has_files,cd_ok,cd_aborted,cd_empty,aux2 = LUT_decode[code] if is_symlink: tooltip_list.append('') @@ -2745,13 +2750,21 @@ def find_mod(self): def invalidate_find_results(self): self.any_valid_find_results=Fale + def get_range_name(self): + if self.current_group: + return f'group: {self.current_group}' + elif self.current_record: + return f'record: {self.current_record}' + else: + return () + def get_selected_records(self): if self.current_group: return librer_core.get_records_of_group(self.current_group) elif self.current_record: return [self.current_record] else: - return [] + return () #@restore_status_line def find_items(self): @@ -2795,7 +2808,8 @@ def find_items(self): sel_range = librer_core.records else: sel_range = self.get_selected_records() - sel_range_info = '\n'.join([librer_core.get_record_name(rec) for rec in sel_range]) + #sel_range_info = '\n'.join([librer_core.get_record_name(rec) for rec in sel_range]) + sel_range_info = self.get_range_name() search_info_lines_append(f'Search in records:\n{sel_range_info}') #if self.current_record: @@ -3179,8 +3193,6 @@ def select_find_result(self,mod): self.tree.see(current_item) self.tree.update() - #self_tree.selection_set(current_item) - self_tree.focus(current_item) self.tree_semi_focus() @@ -3303,9 +3315,6 @@ def key_press(self,event): l_error(e) self.info_dialog_on_main.show('INTERNAL ERROR',str(e)) - #if tree_focus:=tree.focus(): - # tree.selection_set(tree_focus) - ################################################# def select_and_focus(self,item): self.tree_see(item) @@ -3337,7 +3346,6 @@ def tree_on_mouse_button_press(self,event): tree.selection_remove(tree.selection()) tree.focus(item) - #tree.selection_set(item) self.tree_semi_focus() self.tree_sel_change(item) @@ -3364,11 +3372,7 @@ def tree_semi_focus(self): if item: tree.focus_set() - #tree.configure(style='semi_focus.Treeview') - - #tree.focus(item) tree.see(item) - #tree.selection_set(item) self.tree_sel_change(item) self.sel_item = item @@ -4075,7 +4079,6 @@ def remove_record(self): self.status_record_configure('') if remaining_records := self.tree.get_children(): if new_sel_record := remaining_records[0]: - #self.tree.selection_set(new_sel_record) self.tree.focus(new_sel_record) self.tree_semi_focus() @@ -4424,7 +4427,7 @@ def open_item(self,item=None): (top_entry_name_nr,top_code,top_size,top_mtime) = top_data_tuple[0:4] - top_is_dir,top_is_file,top_is_symlink,top_is_bind,top_has_cd,top_has_files,top_cd_ok,top_aux0,top_aux1,top_aux2 = LUT_decode_loc[top_code] + top_is_dir,top_is_file,top_is_symlink,top_is_bind,top_has_cd,top_has_files,top_cd_ok,top_cd_aborted,top_cd_empty,top_aux2 = LUT_decode_loc[top_code] record_filenames = record.filenames @@ -4433,6 +4436,10 @@ def open_item(self,item=None): self_ico_folder = self.ico_folder self_ico_cd_ok = self.ico_cd_ok self_ico_cd_error = self.ico_cd_error + + self_cd_ico_aborted = self.ico_cd_aborted + self_cd_ico_empty = self.ico_cd_empty + self_ico_empty = self.ico_empty record_find_results = record.find_results @@ -4448,7 +4455,7 @@ def open_item(self,item=None): entry_subpath_tuple = tuple(subpath_list + [entry_name]) - is_dir,is_file,is_symlink,is_bind,has_cd,has_files,cd_ok,aux0,aux1,aux2 = LUT_decode_loc[code] + is_dir,is_file,is_symlink,is_bind,has_cd,has_files,cd_ok,cd_aborted,cd_empty,aux2 = LUT_decode_loc[code] sub_data_tuple = None @@ -4463,12 +4470,9 @@ def open_item(self,item=None): if has_cd: elem_index+=1 - #if has_crc: - # pass kind = self_DIR if is_dir else self_FILE - #image = (self_ico_folder_error if size==-1 else self_ico_folder_link if is_symlink or is_bind else self_ico_folder) if is_dir else (self_ico_cd_ok if cd_ok else self_ico_cd_error) if has_cd and not has_crc else (self_ico_cd_ok_crc if cd_ok else self_ico_cd_error_crc) if has_cd and has_crc else self.ico_crc if has_crc else self_ico_empty - image = (self_ico_folder_error if size==-1 else self_ico_folder_link if is_symlink or is_bind else self_ico_folder) if is_dir else (self_ico_cd_ok if cd_ok else self_ico_cd_error) if has_cd else self_ico_empty + image = (self_ico_folder_error if size==-1 else self_ico_folder_link if is_symlink or is_bind else self_ico_folder) if is_dir else (self_ico_cd_ok if cd_ok else self_cd_ico_aborted if cd_aborted else self_cd_ico_empty if cd_empty else self_ico_cd_error) if has_cd else self_ico_empty if is_symlink or is_bind: tags=self_SYMLINK @@ -4510,7 +4514,6 @@ def single_group_show(self,group): group_item=self.tree.insert('','end',iid=None,values=values,open=False,text=group,image=self.ico_group,tags=self.GROUP) self.group_to_item[group] = group_item - #self.tree.selection_set(group_item) self.tree.focus(group_item) self.tree.see(group_item) self.column_sort(self.tree) @@ -4542,11 +4545,9 @@ def single_record_show(self,record): if groups_collapse: self.tree.focus(group_item) - #self.tree.selection_set(group_item) self.tree.see(group_item) else: self.tree.focus(record_item) - #self.tree.selection_set(record_item) self.tree.see(record_item) records_len=len(librer_core.records) @@ -4632,7 +4633,7 @@ def show_customdata(self): data_tuple = self.item_to_data[item] (entry_name,code,size,mtime) = data_tuple[0:4] - is_dir,is_file,is_symlink,is_bind,has_cd,has_files,cd_ok,aux0,aux1,aux2 = LUT_decode[code] + is_dir,is_file,is_symlink,is_bind,has_cd,has_files,cd_ok,cd_aborted,cd_empty,aux2 = LUT_decode[code] if has_cd: #wiec nie has_files cd_index = data_tuple[4] @@ -4707,7 +4708,6 @@ def unload_record(self,record=None): self_tree.item(record_item, image=self.get_record_raw_icon(record),tags=self.RECORD_RAW) self_tree.focus(record_item) self_tree.see(record_item) - #self_tree.selection_set(record_item) self.tree_select() @block From f2e235852b9bd0b68effffadcbde4f4b80a3dbdc Mon Sep 17 00:00:00 2001 From: piotrj Date: Fri, 1 Mar 2024 21:40:06 +0100 Subject: [PATCH 15/20] new errors kinds icons --- src/icons/cd_aborted.png | Bin 0 -> 83 bytes src/icons/cd_empty.png | Bin 0 -> 114 bytes 2 files changed, 0 insertions(+), 0 deletions(-) create mode 100644 src/icons/cd_aborted.png create mode 100644 src/icons/cd_empty.png diff --git a/src/icons/cd_aborted.png b/src/icons/cd_aborted.png new file mode 100644 index 0000000000000000000000000000000000000000..8a0332d2a7e921103ed6392c14b73d36ef72fc0c GIT binary patch literal 83 zcmeAS@N?(olHy`uVBq!ia0vp^0wB!61|;P_|4#%`Ql2i3Ar_~T6C_v{Cp>BV&%-nS g!qFLS&8>_KN Date: Sat, 2 Mar 2024 11:45:50 +0100 Subject: [PATCH 16/20] remember columns sorting, search records by different kind of errors --- src/core.py | 27 ++++++++++++++++++++++----- src/librer.py | 45 +++++++++++++++++++++++++++++++-------------- src/record.py | 14 ++++++-------- 3 files changed, 59 insertions(+), 27 deletions(-) diff --git a/src/core.py b/src/core.py index 3b78cc2..9bf1342 100644 --- a/src/core.py +++ b/src/core.py @@ -672,17 +672,13 @@ def threaded_cde(timeout_semi_list,thread_index,thread_data_list,cde_errors_quan self_killed[thread_index]=False - empty=False - time_start = perf_counter_loc() if abort_list[0] : #wszystko returncode=200 output = aborted_string aborted = True + empty=True else: - aborted = False - - returncode=202 expressions,use_smin,smin_int,use_smax,smax_int,executable,parameters,shell,timeout,do_crc = cde_list[rule_nr] full_file_path = normpath(abspath(sep.join([scan_path,subpath]))).replace('/',sep) command,command_info = get_command(executable,parameters,full_file_path,shell) @@ -693,6 +689,7 @@ def threaded_cde(timeout_semi_list,thread_index,thread_data_list,cde_errors_quan timeout_val=time()+timeout if timeout else None ##################################### + empty=False try: subprocess = uni_popen(command,shell) timeout_semi_list[0]=(timeout_val,subprocess) @@ -700,6 +697,7 @@ def threaded_cde(timeout_semi_list,thread_index,thread_data_list,cde_errors_quan timeout_semi_list[0]=None returncode=201 output = f'Exception: {re}' + aborted = False else: subprocess_stdout_readline = subprocess.stdout.readline subprocess_poll = subprocess.poll @@ -707,6 +705,7 @@ def threaded_cde(timeout_semi_list,thread_index,thread_data_list,cde_errors_quan output_list = [] output_list_append = output_list.append + returncode=202 while True: line = subprocess_stdout_readline() @@ -719,6 +718,9 @@ def threaded_cde(timeout_semi_list,thread_index,thread_data_list,cde_errors_quan if self_killed[thread_index]: output_list_append('Killed.') + aborted = True + else: + aborted = False output = '\n'.join(output_list).strip() if not output: @@ -734,6 +736,7 @@ def threaded_cde(timeout_semi_list,thread_index,thread_data_list,cde_errors_quan cde_errors_quant[rule_nr]+=1 cde_errors_quant_all+=1 + if not aborted: files_cde_quant += 1 files_cde_size += size @@ -1042,6 +1045,8 @@ def find_items(self, cd_search_kind_is_any = bool(cd_search_kind=='any') cd_search_kind_is_without = bool(cd_search_kind=='without') cd_search_kind_is_error = bool(cd_search_kind=='error') + cd_search_kind_is_empty = bool(cd_search_kind=='empty') + cd_search_kind_is_aborted = bool(cd_search_kind=='aborted') name_search_kind_is_error = bool(name_search_kind=='error') @@ -1175,6 +1180,18 @@ def find_items(self, continue else: continue + elif cd_search_kind_is_empty: + if has_cd: + if not cd_empty: + continue + else: + continue + elif cd_search_kind_is_aborted: + if has_cd: + if not cd_aborted: + continue + else: + continue print_func( (search_progress,size,mtime,*next_level) ) diff --git a/src/librer.py b/src/librer.py index 981f066..0feb63a 100644 --- a/src/librer.py +++ b/src/librer.py @@ -103,6 +103,7 @@ CFG_last_dir = 'last_dir' CFG_geometry = 'geometry' +CFG_SORTING = 'sorting' CFG_KEY_show_popups = 'show_popups' CFG_KEY_groups_collapse = 'groups_collapse' @@ -684,8 +685,11 @@ def help_cascade_post(): self_REAL_SORT_COLUMN_IS_NUMERIC['size_h'] = True self_REAL_SORT_COLUMN_IS_NUMERIC['ctime_h'] = True - #colname,sort_index,is_numeric,reverse,dir_code,non_dir_code - self.column_sort_last_params=self.column_groups_sort_params_default=('#0',self_REAL_SORT_COLUMN_INDEX['#0'],self_REAL_SORT_COLUMN_IS_NUMERIC['#0'],0,0,1) + try: + self.column_sort_last_params = self.cfg_get(CFG_SORTING) + except: + #colname,sort_index,is_numeric,reverse,dir_code,non_dir_code + self.column_sort_last_params=('#0',self_REAL_SORT_COLUMN_INDEX['#0'],self_REAL_SORT_COLUMN_IS_NUMERIC['#0'],0,0,1) ####################################################################### @@ -1821,28 +1825,36 @@ def ver_number(var): r_dont2.grid(row=0, column=0, sticky='news',padx=4,pady=4) r_dont2.bind('', lambda event : self.find_items()) - r_without = Radiobutton(find_cd_frame,text="files without Custom Data ",variable=self.find_cd_search_kind_var,value='without',command=self.find_mod) + r_without = Radiobutton(find_cd_frame,text="No Custom Data",variable=self.find_cd_search_kind_var,value='without',command=self.find_mod) r_without.grid(row=1, column=0, sticky='news',padx=4,pady=4) r_without.bind('', lambda event : self.find_items()) - r_correct = Radiobutton(find_cd_frame,text="files with any correct Custom Data ",variable=self.find_cd_search_kind_var,value='any',command=self.find_mod) + r_correct = Radiobutton(find_cd_frame,text="Any correct Custom Data",variable=self.find_cd_search_kind_var,value='any',command=self.find_mod) r_correct.grid(row=2, column=0, sticky='news',padx=4,pady=4) r_correct.bind('', lambda event : self.find_items()) - r_error = Radiobutton(find_cd_frame,text="files with error on CD extraction",variable=self.find_cd_search_kind_var,value='error',command=self.find_mod) + r_error = Radiobutton(find_cd_frame,text="Error on CD extraction",variable=self.find_cd_search_kind_var,value='error',command=self.find_mod) r_error.grid(row=3, column=0, sticky='news',padx=4,pady=4) r_error.bind('', lambda event : self.find_items()) + r_error_empty = Radiobutton(find_cd_frame,text="No CD extracted (empty value)",variable=self.find_cd_search_kind_var,value='empty',command=self.find_mod) + r_error_empty.grid(row=4, column=0, sticky='news',padx=4,pady=4) + r_error_empty.bind('', lambda event : self.find_items()) + + r_error_empty = Radiobutton(find_cd_frame,text="CD extraction aborted",variable=self.find_cd_search_kind_var,value='aborted',command=self.find_mod) + r_error_empty.grid(row=5, column=0, sticky='news',padx=4,pady=4) + r_error_empty.bind('', lambda event : self.find_items()) + regexp_radio_cd = Radiobutton(find_cd_frame,text="by regular expression",variable=self.find_cd_search_kind_var,value='regexp',command=self.find_mod) - regexp_radio_cd.grid(row=4, column=0, sticky='news',padx=4,pady=4) + regexp_radio_cd.grid(row=6, column=0, sticky='news',padx=4,pady=4) regexp_radio_cd.bind('', lambda event : self.find_items()) glob_radio_cd = Radiobutton(find_cd_frame,text="by glob pattern",variable=self.find_cd_search_kind_var,value='glob',command=self.find_mod) - glob_radio_cd.grid(row=5, column=0, sticky='news',padx=4,pady=4) + glob_radio_cd.grid(row=7, column=0, sticky='news',padx=4,pady=4) glob_radio_cd.bind('', lambda event : self.find_items()) fuzzy_radio_cd = Radiobutton(find_cd_frame,text="by fuzzy match",variable=self.find_cd_search_kind_var,value='fuzzy',command=self.find_mod) - fuzzy_radio_cd.grid(row=6, column=0, sticky='news',padx=4,pady=4) + fuzzy_radio_cd.grid(row=8, column=0, sticky='news',padx=4,pady=4) fuzzy_radio_cd.bind('', lambda event : self.find_items()) self.find_cd_regexp_entry = Entry(find_cd_frame,textvariable=self.find_cd_regexp_var,validate="key") @@ -1853,17 +1865,17 @@ def ver_number(var): self.find_cd_glob_entry.bind("", self.find_mod_keypress) self.find_cd_fuzz_entry.bind("", self.find_mod_keypress) - self.find_cd_regexp_entry.grid(row=4, column=1, sticky='we',padx=4,pady=4) - self.find_cd_glob_entry.grid(row=5, column=1, sticky='we',padx=4,pady=4) - self.find_cd_fuzz_entry.grid(row=6, column=1, sticky='we',padx=4,pady=4) + self.find_cd_regexp_entry.grid(row=6, column=1, sticky='we',padx=4,pady=4) + self.find_cd_glob_entry.grid(row=7, column=1, sticky='we',padx=4,pady=4) + self.find_cd_fuzz_entry.grid(row=8, column=1, sticky='we',padx=4,pady=4) self.cd_case_sens_cb = Checkbutton(find_cd_frame,text='Case sensitive',variable=self.find_cd_case_sens_var,command=self.find_mod) - self.cd_case_sens_cb.grid(row=5, column=2, sticky='wens',padx=4,pady=4,columnspan=2) + self.cd_case_sens_cb.grid(row=7, column=2, sticky='wens',padx=4,pady=4,columnspan=2) self.find_cd_fuzzy_threshold_lab = Label(find_cd_frame,text='Threshold:',bg=self.bg_color,anchor='e') self.find_cd_fuzzy_threshold_entry = Entry(find_cd_frame,textvariable=self.find_cd_fuzzy_threshold) - self.find_cd_fuzzy_threshold_lab.grid(row=6, column=2, sticky='wens',padx=4,pady=4) - self.find_cd_fuzzy_threshold_entry.grid(row=6, column=3, sticky='wens',padx=4,pady=4) + self.find_cd_fuzzy_threshold_lab.grid(row=8, column=2, sticky='wens',padx=4,pady=4) + self.find_cd_fuzzy_threshold_entry.grid(row=8, column=3, sticky='wens',padx=4,pady=4) self.find_cd_fuzzy_threshold_entry.bind("", self.find_mod_keypress) @@ -2870,6 +2882,10 @@ def find_items(self): search_info_lines_append(f'Files with any correct Custom Data') elif find_cd_search_kind == 'error': search_info_lines_append('Files with error on CD extraction') + elif find_cd_search_kind == 'empty': + search_info_lines_append('Files with empty CD value') + elif find_cd_search_kind == 'aborted': + search_info_lines_append('Files with aborted CD extraction') elif find_cd_search_kind == 'regexp': if find_cd_regexp: if res := test_regexp(find_cd_regexp): @@ -3597,6 +3613,7 @@ def column_sort_click(self, tree, colname): sort_index=self.REAL_SORT_COLUMN_INDEX[colname] is_numeric=self.REAL_SORT_COLUMN_IS_NUMERIC[colname] self.column_sort_last_params=(colname,sort_index,is_numeric,reverse,dir_code,non_dir_code) + self.cfg.set(CFG_SORTING,self.column_sort_last_params) #print('\npre sort info colname:',colname,'is_numeric',is_numeric,'reverse:',reverse) colname_real = self.REAL_SORT_COLUMN[colname] diff --git a/src/record.py b/src/record.py index 4616af8..0307c30 100644 --- a/src/record.py +++ b/src/record.py @@ -269,12 +269,13 @@ def proper_exit(code): cd_glob=bool(find_cd_search_kind=='glob') cd_fuzzy=bool(find_cd_search_kind=='fuzzy') cd_error=bool(find_cd_search_kind=='error') + cd_empty=bool(find_cd_search_kind=='empty') + cd_aborted=bool(find_cd_search_kind=='aborted') cd_without=bool(find_cd_search_kind=='without') cd_ok=bool(find_cd_search_kind=='any') if cd_regexp: custom_data_needed=True - cd_search_kind='regexp' if res := test_regexp(cd_expr): proper_exit(res) re_obj_cd=re_compile(cd_expr, MULTILINE | DOTALL) @@ -282,7 +283,6 @@ def proper_exit(code): elif cd_glob: custom_data_needed=True - cd_search_kind='glob' if cd_case_sens: re_obj_cd=re_compile(translate(cd_expr), MULTILINE | DOTALL) cd_func_to_call = lambda x : re_obj_cd.match(x) @@ -291,19 +291,17 @@ def proper_exit(code): cd_func_to_call = lambda x : re_obj_cd.match(x) elif cd_fuzzy: custom_data_needed=True - cd_search_kind='fuzzy' cd_func_to_call = lambda x : bool(SequenceMatcher(None,cd_expr, x).ratio()>cd_fuzzy_threshold) elif cd_without: - cd_search_kind='without' + cd_func_to_call = None + elif cd_empty: cd_func_to_call = None elif cd_error: - cd_search_kind='error' cd_func_to_call = None elif cd_ok: - cd_search_kind='any' cd_func_to_call = None else: - cd_search_kind='dont' + #cd_search_kind='dont' cd_func_to_call = None ##################################################################### @@ -323,7 +321,7 @@ def proper_exit(code): size_min,size_max, t_min,t_max, name_search_kind,name_func_to_call, - cd_search_kind,cd_func_to_call, + find_cd_search_kind,cd_func_to_call, print_info) except Exception as fe: print_info(f'find_items error:{fe}') From b818bf0c913e7706e6f41291b0a5982a24e74265 Mon Sep 17 00:00:00 2001 From: piotrj Date: Sat, 2 Mar 2024 11:46:23 +0100 Subject: [PATCH 17/20] icon --- src/icons/cd_aborted.png | Bin 83 -> 105 bytes 1 file changed, 0 insertions(+), 0 deletions(-) diff --git a/src/icons/cd_aborted.png b/src/icons/cd_aborted.png index 8a0332d2a7e921103ed6392c14b73d36ef72fc0c..b63bc47627b1b24deef2fa23ef6114ac028dab36 100644 GIT binary patch delta 74 zcmWH}oS@=r;OXKRVsSb-L4tL0f=8pIaoV5sgoGdG4;=U)e@adA&+%hvi>{>ls8lpE ep5$)UC}2$e&MzDPcu6w@5O})!xvX}%WVsSb-L4tL0!js1TJUsI+9G&6T+{(zH^n>w>e3KL(0}yz+`njxg HN@xNA^b-;z From cb3a62652a8913b26b8278972a583c034ad32285 Mon Sep 17 00:00:00 2001 From: piotrj Date: Sat, 2 Mar 2024 14:48:21 +0100 Subject: [PATCH 18/20] remember sorting order, calculate groups sum size --- src/librer.py | 48 ++++++++++++++++++++++++++++++++++++++++++------ 1 file changed, 42 insertions(+), 6 deletions(-) diff --git a/src/librer.py b/src/librer.py index 0feb63a..1593120 100644 --- a/src/librer.py +++ b/src/librer.py @@ -577,7 +577,8 @@ def __init__(self,cwd): tree_column('ctime_h', width=150, minwidth=100, stretch='no',anchor='e') tree_heading = tree.heading - tree_heading('#0',text='Name \u25B2',anchor='w') + #tree_heading('#0',text='Name \u25B2',anchor='w') + tree_heading('#0',text='Name',anchor='w') tree_heading('size_h',anchor='w',text=self_org_label['size_h']) tree_heading('ctime_h',anchor='n',text=self_org_label['ctime_h']) @@ -924,10 +925,10 @@ def tree_focus_in(self): tree.selection_remove(*selection) item=selection[0] tree.focus(item) - tree_sel_change(item,True) + self.tree_sel_change(item,True) elif item:=self.selected: tree.focus(item) - tree_sel_change(item,True) + self.tree_sel_change(item,True) except Exception as e: l_error(f'groups_tree_focus_in:{e}') @@ -3543,6 +3544,8 @@ def remove_group(self): def remove_from_group(self): record = self.current_record + group = librer_core.get_record_group(record) + res = librer_core.remove_record_from_group(record) if res : self.info_dialog_on_main.show('Error',res) @@ -3550,6 +3553,11 @@ def remove_from_group(self): record_item = self.record_to_item[record] self.tree.move(record_item,'',0) + if group: + size=record.header.sum_size + self.group_to_size_sum[group]-=size + self.single_group_update_size(group) + self.find_clear() self.column_sort(self.tree) @@ -3571,6 +3579,7 @@ def assign_to_group(self): dial.combobox.configure(values=values) record = self.current_record current = librer_core.get_record_group(record) + size=record.header.sum_size if not current: if self.last_assign_to_group_group in values: @@ -3584,15 +3593,23 @@ def assign_to_group(self): group = dial.entry_val.get() if group: + self.last_assign_to_group_group = group res2=librer_core.assign_new_group(record,group) if res2: self.info_dialog_on_main.show('assign_new_group Error',res2) else: + if current: + self.group_to_size_sum[current]-=size + self.single_group_update_size(current) + group_item = self.group_to_item[group] record_item = self.record_to_item[record] self.tree.move(record_item,group_item,0) - #self.tree.open(group_item) + + self.group_to_size_sum[group]+=size + self.single_group_update_size(group) + self.open_item(group_item) self.tree.focus(record_item) self.tree.see(record_item) @@ -4075,6 +4092,8 @@ def remove_record(self): label = librer_core.get_record_name(record) path = record.header.scan_path creation_time = record.header.creation_time + group = librer_core.get_record_group(record) + size=record.header.sum_size dialog = self.get_simple_question_dialog() @@ -4090,6 +4109,10 @@ def remove_record(self): res=librer_core.delete_record(record) l_info(f'deleted file:{res}') + if group: + self.group_to_size_sum[group]-=size + self.single_group_update_size(group) + self.find_clear() #record.find_results_clean() @@ -4521,16 +4544,26 @@ def get_record_raw_icon(self,record): def groups_show(self): #('data','record','opened','path','size','size_h','ctime','ctime_h','kind') self.group_to_item = {} + self.group_to_size_sum = {} + + self.group_to_size_sum[None]=0 self.group_to_item[None]='' for group in librer_core.groups: self.single_group_show(group) + def single_group_update_size(self, group): + values = (group,group,0,'',self.group_to_size_sum[group],bytes_to_str(self.group_to_size_sum[group]),0,'',self.GROUP) + + self.tree.item(self.group_to_item[group],values=values) + def single_group_show(self,group): - values = (group,group,0,'',0,'',0,'',self.GROUP) + self.group_to_size_sum[group]=0 + group_item=self.tree.insert('','end',iid=None,open=False,text=group,image=self.ico_group,tags=self.GROUP) - group_item=self.tree.insert('','end',iid=None,values=values,open=False,text=group,image=self.ico_group,tags=self.GROUP) self.group_to_item[group] = group_item + self.single_group_update_size(group) + self.tree.focus(group_item) self.tree.see(group_item) self.column_sort(self.tree) @@ -4553,6 +4586,9 @@ def single_record_show(self,record): #print(self.cfg.get(CFG_KEY_groups_collapse),group_item) self.tree.item(group_item, open = False) + + self.group_to_size_sum[group]+=size + self.single_group_update_size(group) #self.cfg.get(CFG_KEY_groups_collapse) self.tree_sort_item(None) From dc7df2f1e8a88483e5f16a2d0963ee1f11ad58aa Mon Sep 17 00:00:00 2001 From: piotrj Date: Sat, 2 Mar 2024 19:52:25 +0100 Subject: [PATCH 19/20] nuitka update --- requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements.txt b/requirements.txt index a18bf75..83bfe1a 100644 --- a/requirements.txt +++ b/requirements.txt @@ -5,4 +5,4 @@ psutil==5.9.8 pympler==1.0.1 ciso8601==2.3.1 pywin32==306; sys.platform == 'win32' -nuitka==2.0.5 +nuitka==2.0.6 From 0c675d5e69582c8f37dd539d347bc9d963361aec Mon Sep 17 00:00:00 2001 From: piotrj Date: Sat, 2 Mar 2024 19:52:52 +0100 Subject: [PATCH 20/20] minor fixes --- src/librer.py | 59 +++++++++++++++++++++++++++++++++------------------ 1 file changed, 38 insertions(+), 21 deletions(-) diff --git a/src/librer.py b/src/librer.py index 1593120..579606e 100644 --- a/src/librer.py +++ b/src/librer.py @@ -832,10 +832,12 @@ def help_cascade_post(): self.menubar_config(cursor='') self.main_config(cursor='') - self.tree_semi_focus() - #for child in self.tree.get_children(): - # self.tree.item(child, open=False) + if items := self.tree.get_children(): + self.tree.focus(items[0]) + + self.tree.focus_set() + self.tree_semi_focus() self.status_info.configure(image='',text = 'Ready') @@ -2386,13 +2388,32 @@ def show_tooltips_tree(self,event): if has_cd: tooltip_list.append('') - tooltip_list.append('(Double click to show Custom Data.)') + if not cd_ok: + tooltip_list.append('Custom Data Extraction ended with error') + tooltip_list.append('(Double click to show Custom Data.)') + elif cd_aborted: + tooltip_list.append('Custom Data Extraction was aborted') + tooltip_list.append('(Double click to show Custom Data.)') + elif cd_empty: + tooltip_list.append('Custom Data is empty') + else: + tooltip_list.append('(Double click to show Custom Data.)') + self.tooltip_lab_configure(text='\n'.join(tooltip_list)) self.tooltip_deiconify_wrapp() + + elif tree.tag_has(self.GROUP,item): + if values := tree.item(item,'values'): + name = values[0] + self.tooltip_lab_configure(text=f'group :{name}') + else: + self.tooltip_lab_configure(text='') + + else: - self.tooltip_lab_configure(text='label') + self.tooltip_lab_configure(text='unknown_label') elif col: coldata=tree.set(item,col) @@ -3426,10 +3447,10 @@ def context_menu_show(self,event): item_actions_state=('disabled','normal')[self.sel_item is not None] - item=self.tree.focus() is_group = bool(self.tree.tag_has(self.GROUP,item)) + is_record_loaded = bool(self.tree.tag_has(self.RECORD,item)) is_record = bool(self.tree.tag_has(self.RECORD_RAW,item) or self.tree.tag_has(self.RECORD,item)) record_in_group = False @@ -3492,6 +3513,8 @@ def context_menu_show(self,event): pop_add_separator() pop_add_command(label = 'Clear Search Results',command = self.find_clear, image = self.ico_empty,compound='left',state = 'normal' if self.any_valid_find_results else 'disabled') pop_add_separator() + pop_add_command(label = 'Unload record data',command = self.unload_record, accelerator="Backspace", image = self.ico_empty,compound='left',state = 'normal' if is_record_loaded else 'disabled') + pop_add_separator() pop_add_command(label = 'Exit', command = self.exit ,image = self.ico['exit'],compound='left') @@ -3565,20 +3588,15 @@ def remove_from_group(self): last_assign_to_group_group = None @logwrapper def assign_to_group(self): - #item=self.tree.focus() - - #is_group = bool(self.tree.tag_has(self.GROUP,item)) - #is_record = bool(self.tree.tag_has(self.RECORD_RAW,item) or self.tree.tag_has(self.RECORD,item)) - if self.current_record: - curr_group = librer_core.get_record_group(self.current_record) + record = self.current_record + current = prev_group = librer_core.get_record_group(record) + #print(f'{current=}') dial = self.get_assign_to_group_dialog() - values = list(librer_core.groups.keys()) dial.combobox.configure(values=values) - record = self.current_record - current = librer_core.get_record_group(record) + size=record.header.sum_size if not current: @@ -3599,15 +3617,15 @@ def assign_to_group(self): if res2: self.info_dialog_on_main.show('assign_new_group Error',res2) else: - if current: - self.group_to_size_sum[current]-=size + if prev_group: + self.group_to_size_sum[current] -= size self.single_group_update_size(current) group_item = self.group_to_item[group] record_item = self.record_to_item[record] self.tree.move(record_item,group_item,0) - self.group_to_size_sum[group]+=size + self.group_to_size_sum[group] += size self.single_group_update_size(group) self.open_item(group_item) @@ -4553,9 +4571,8 @@ def groups_show(self): self.single_group_show(group) def single_group_update_size(self, group): - values = (group,group,0,'',self.group_to_size_sum[group],bytes_to_str(self.group_to_size_sum[group]),0,'',self.GROUP) - - self.tree.item(self.group_to_item[group],values=values) + sum_size = self.group_to_size_sum[group] + self.tree.item(self.group_to_item[group],values=(group,group,0,'',sum_size,bytes_to_str(sum_size),0,'',self.GROUP)) def single_group_show(self,group): self.group_to_size_sum[group]=0