From e71e22ea928a117530026c4fd1fc453ba2b2719e Mon Sep 17 00:00:00 2001 From: piotrj Date: Sat, 9 Dec 2023 12:58:37 +0100 Subject: [PATCH] detailed info on internal data, dedicated icons indicating internal state, info about records loading errors --- src/core.py | 115 ++++++++++++++++++++------------- src/icons/record.png | Bin 567 -> 577 bytes src/icons/record_cd.png | Bin 0 -> 607 bytes src/icons/record_cd_loaded.png | Bin 0 -> 601 bytes src/icons/record_raw.png | Bin 0 -> 527 bytes src/icons/record_raw_cd.png | Bin 0 -> 559 bytes src/librer.py | 61 ++++++++++------- 7 files changed, 107 insertions(+), 69 deletions(-) create mode 100644 src/icons/record_cd.png create mode 100644 src/icons/record_cd_loaded.png create mode 100644 src/icons/record_raw.png create mode 100644 src/icons/record_raw_cd.png diff --git a/src/core.py b/src/core.py index 5c4ccca..f464059 100644 --- a/src/core.py +++ b/src/core.py @@ -26,7 +26,7 @@ # #################################################################################### -from pympler import asizeof +from pympler.asizeof import asizeof from json import loads as json_loads @@ -84,7 +84,7 @@ PARAM_INDICATOR_SIGN = '%' -data_format_version='1.0011' +data_format_version='1.0012' VERSION_FILE='version.txt' @@ -233,6 +233,12 @@ def __init__(self,label='',scan_path=''): self.files_cde_size_extracted = 0 self.files_cde_errors_quant = 0 + self.items_names=0 + self.items_cd=0 + + self.references_names = 0 + self.references_cd = 0 + self.cde_list = [] self.zipinfo = {} @@ -628,9 +634,9 @@ def tupelize_rec(self,scan_like_data): self_customdata = self.customdata sub_list = [] for entry_name,items_list in scan_like_data.items(): - try: entry_name_index = self.filenames_helper[entry_name] + self.header.references_names+=1 except Exception as VE: print('filenames error:',entry_name,VE) else: @@ -671,6 +677,7 @@ def tupelize_rec(self,scan_like_data): sub_list_elem.append(self_tupelize_rec(sub_dict)) else: if has_cd: #only files + self.header.references_cd+=1 sub_list_elem.append( cd_index ) if has_crc: #only files sub_list_elem.append( crc_val ) @@ -680,7 +687,8 @@ def tupelize_rec(self,scan_like_data): except Exception as e: self.log.error('tupelize_rec error::%s',e ) print('tupelize_rec error:',e,' entry_name:',entry_name,' items_list:',items_list) - + + return tuple(sorted(sub_list,key = lambda x : x[1:4])) ############################################################# @@ -694,10 +702,16 @@ def pack_data(self): has_files = True cd_ok = False has_crc = False - + + self.header.references_names=0 + self.header.references_cd=0 + code = LUT_encode[ (is_dir,is_file,is_symlink,is_bind,has_cd,has_files,cd_ok,has_crc) ] self.filestructure = ('',code,size,mtime,self.tupelize_rec(self.scan_data)) - + + self.header.items_names=len(self.filenames) + self.header.items_cd=len(self.customdata) + del self.filenames_helper del self.scan_data @@ -929,7 +943,7 @@ def prepare_info(self): else: self_header = self.header - local_time = strftime('%Y/%m/%d %H:%M:%S',localtime(self.header.creation_time)) + local_time = strftime('%Y/%m/%d %H:%M:%S',localtime(self_header.creation_time)) info_list.append(f'record label : {self_header.label}') info_list.append('') info_list.append(f'scanned path : {self_header.scan_path}') @@ -945,21 +959,25 @@ def prepare_info(self): info_list.append('') info_list.append(f'record file : {self.FILE_NAME} ({bytes_to_str(self.FILE_SIZE)})') info_list.append('') - info_list.append( 'internal sizes : compressed serialized original #unique #entry') + info_list.append( 'internal sizes : compressed serialized original items references') info_list.append('') h_data = self.header_sizes - fs_data = self.header.zipinfo["filestructure"] - fn_data = self.header.zipinfo["filenames"] - cd_data = self.header.zipinfo["customdata"] - + fs_data = self_header.zipinfo["filestructure"] + fn_data = self_header.zipinfo["filenames"] + cd_data = self_header.zipinfo["customdata"] + info_list.append(f'header :{bytes_to_str_mod(h_data[0]).rjust(12) }{bytes_to_str_mod(h_data[1]).rjust(12) }{bytes_to_str_mod(h_data[2]).rjust(12) }') info_list.append(f'filestructure :{bytes_to_str_mod(fs_data[0]).rjust(12) }{bytes_to_str_mod(fs_data[1]).rjust(12) }{bytes_to_str_mod(fs_data[2]).rjust(12) }') - info_list.append(f'file names :{bytes_to_str_mod(fn_data[0]).rjust(12) }{bytes_to_str_mod(fn_data[1]).rjust(12) }{bytes_to_str_mod(fn_data[2]).rjust(12) }{str(len(self.filenames)).rjust(12)}') - + info_list.append(f'file names :{bytes_to_str_mod(fn_data[0]).rjust(12) }{bytes_to_str_mod(fn_data[1]).rjust(12) }{bytes_to_str_mod(fn_data[2]).rjust(12) }{str(fnumber(self_header.items_names)).rjust(12) }{str(fnumber(self_header.references_names)).rjust(12)}') + if cd_data[0]: - info_list.append(f'custom data :{bytes_to_str_mod(cd_data[0]).rjust(12) }{bytes_to_str_mod(cd_data[1]).rjust(12) }{bytes_to_str_mod(cd_data[2]).rjust(12) }{str(len(self.customdata)).rjust(12)}') - + info_list.append(f'custom data :{bytes_to_str_mod(cd_data[0]).rjust(12) }{bytes_to_str_mod(cd_data[1]).rjust(12) }{bytes_to_str_mod(cd_data[2]).rjust(12) }{str(fnumber(self_header.items_cd)).rjust(12) }{str(fnumber(self_header.references_cd)).rjust(12)}') + + info_list.append('') + info_list.append('filesystem - ' + ('loaded' if self.decompressed_filestructure else 'not loaded yet') ) + info_list.append('custom data - ' + ('not present' if not bool(cd_data[0]) else 'loaded' if self.decompressed_customdata else 'not loaded yet') ) + try: if self_header.cde_list: info_list.append('\nCDE rules (draft):') @@ -969,7 +987,10 @@ def prepare_info(self): pass self.txtinfo = '\n'.join(info_list) - + + def has_cd(self): + return bool(self.header.zipinfo["customdata"][0]) + def save(self,file_path=None,compression_level=16): if file_path: filename = basename(normpath(file_path)) @@ -980,39 +1001,40 @@ def save(self,file_path=None,compression_level=16): self.info_line = f'saving {filename}' self.log.info('saving %s' % file_path) - + + self_header = self.header + with ZipFile(file_path, "w") as zip_file: compressor = ZstdCompressor(level=compression_level,threads=-1) - - #self.header.zipinfo = {} + compressor_compress = compressor.compress self.info_line = f'serializing File stucture' filestructure_ser = dumps(self.filestructure) self.info_line = f'compressing File stucture' - filestructure_ser_compr = compressor.compress(filestructure_ser) - self.header.zipinfo['filestructure'] = (asizeof.asizeof(filestructure_ser_compr),asizeof.asizeof(filestructure_ser),asizeof.asizeof(self.filestructure)) - + filestructure_ser_compr = compressor_compress(filestructure_ser) + self_header.zipinfo['filestructure'] = (asizeof(filestructure_ser_compr),asizeof(filestructure_ser),asizeof(self.filestructure)) + self.info_line = f'serializing file names' filenames_ser = dumps(self.filenames) self.info_line = f'compressing file names' - filenames_ser_comp = compressor.compress(filenames_ser) - self.header.zipinfo['filenames'] = (asizeof.asizeof(filenames_ser_comp),asizeof.asizeof(filenames_ser),asizeof.asizeof(self.filenames)) + filenames_ser_comp = compressor_compress(filenames_ser) + self_header.zipinfo['filenames'] = (asizeof(filenames_ser_comp),asizeof(filenames_ser),asizeof(self.filenames)) if self.customdata: self.info_line = f'serializing custom data' customdata_ser = dumps(self.customdata) self.info_line = f'compressing custom data' - customdata_ser_compr = compressor.compress(customdata_ser) - self.header.zipinfo['customdata'] = (asizeof.asizeof(customdata_ser_compr),asizeof.asizeof(customdata_ser),asizeof.asizeof(self.customdata)) + customdata_ser_compr = compressor_compress(customdata_ser) + self_header.zipinfo['customdata'] = (asizeof(customdata_ser_compr),asizeof(customdata_ser),asizeof(self.customdata)) else: - self.header.zipinfo['customdata'] = (0,0,0) + self_header.zipinfo['customdata'] = (0,0,0) ########### - header_ser = dumps(self.header) - header_ser_compr = compressor.compress(header_ser) - self.header_sizes=(asizeof.asizeof(header_ser_compr),asizeof.asizeof(header_ser),asizeof.asizeof(self.header)) + header_ser = dumps(self_header) + header_ser_compr = compressor_compress(header_ser) + self.header_sizes=(asizeof(header_ser_compr),asizeof(header_ser),asizeof(self_header)) - zip_file.writestr('header',compressor.compress(header_ser)) + zip_file.writestr('header',header_ser_compr) ########### self.info_line = f'saving {filename} (File stucture)' zip_file.writestr('filestructure',filestructure_ser_compr) @@ -1037,6 +1059,7 @@ def load_wrap(self,db_dir,file_name): def load(self,file_path): self.file_path = file_path file_name = basename(normpath(file_path)) + #self.log.info('loading %s' % file_name) #TODO - problem w podprocesie @@ -1045,18 +1068,19 @@ def load(self,file_path): header_ser_compr = zip_file.read('header') header_ser = ZstdDecompressor().decompress(header_ser_compr) self.header = loads( header_ser ) - - self.header_sizes=(asizeof.asizeof(header_ser_compr),asizeof.asizeof(header_ser),asizeof.asizeof(self.header)) - - self.prepare_info() + self.header_sizes=(asizeof(header_ser_compr),asizeof(header_ser),asizeof(self.header)) if self.header.data_format_version != data_format_version: - self.log.error(f'incompatible data format version error: {self.header.data_format_version} vs {data_format_version}') - return True + message = f'loading "{file_path}" error: incompatible data format version: {self.header.data_format_version} vs {data_format_version}' + self.log.error(message) + return message + + self.prepare_info() except Exception as e: - print('loading error:file:%s error:%s' % (file_path,e) ) - return True + message = f'loading "{file_path}" error: "{e}"' + #self.log.error(message) + return message return False @@ -1073,7 +1097,6 @@ def decompress_filestructure(self): self.filenames = loads(filenames_ser) self.decompressed_filestructure = True - self.prepare_info() return True @@ -1092,8 +1115,8 @@ def decompress_customdata(self): self.customdata = [] self.decompressed_customdata = True - self.prepare_info() + return True return False @@ -1158,7 +1181,7 @@ def abort(self): #print('core abort') self.abort_action = True - def read_records(self): + def threaded_read_records(self,load_errors): self.log.info('read_records: %s',self.db_dir) self.records_to_show=[] @@ -1176,10 +1199,12 @@ def read_records(self): info_curr_quant+=1 info_curr_size+=size - - if new_record.load_wrap(self.db_dir,filename) : + + + if res:=new_record.load_wrap(self.db_dir,filename) : self.log.warning('removing:%s',filename) self.records.remove(new_record) + load_errors.append(res) else: self.records_to_show.append( (new_record,info_curr_quant,info_curr_size) ) self.update_sorted() diff --git a/src/icons/record.png b/src/icons/record.png index 117c5d6b8537df457c8be0e069346f1b4bf9d654..9426fe49bf488d9d2d4b37bf2394cc5eddf7a8bc 100644 GIT binary patch literal 577 zcmV-H0>1r;P)5a2)3pz&0Jx;o%{jPKO`}HjQSp830Nt_V)G=LQpD| zI6gk6+wEcu2*Z#t4AELsE|-x~;y4bj>*BgDg+c+(^N>=qSS*O+m@o{@0n_P}d_GSQ z1V|~#X0s@zP)d=e>1GX(rYXS7$BPE38L=U%<{ySqpC2#UJAy!-@w1l|=b4ZJP%A0R07Z{T~k+x-S0 z04^>ret4eO-re2BaU1|PH#bpA0gxmKX_^8so6Q)H$4sZwpU20?9TVW`>8X8pcgNP& z7E($8EX#Vn96|`PEGq%S;SkHR+5qPBP_x;@_kDsOC}qaSr)c!W7{^a>*6>LQc9M~C25)xMbR1%h9NsUI|M<1l#)uN zf>H{l6nUPPYk)k@d3<~%48t|RG)?;bK1wMzHa5tz?1cx*<&uYo2c(qT-{0f=KBj4| z0S5;M93CEWeSOVjGC>GI6h$vQ&{_k~Xf)__I^5pgzVslDV?57ee}Df!co4_&t38P0 zcn!$&oO->^U@#y}Qw+l(%d+zGR4Nr**TpalYPA}zR*UoV^EDt%Q?|Fa+1uOu=LwGE z;CUWeYi!$QKA)F902n}d(rfB@j|s3^txj)lZoX)(5kdfznE+TU7K}zCq?8PYL$0o_ zSgls4Wf}oaPEI}mpMm#9>kUvVbWlVZpa*o)y~_Qr+tEy(Q38c0Iz`;Nl63G6Z`}468s(b zytTFU2|%Q2-fT9%xUO4USy{nx8~`$z3`!{g#^W(z7y@v8ea*$i1y@&BU$?in>t-5) zySuyE`T04Ei;GAp0kAAzNloDUKECgxwWd@mA*IB!ENt7xwr%qHJdWcarDQUh5QZVX@24ji zjYcdjE#Y|{QcAMfEJ`VqQp9mQ&jI2%W;UBK8jaEun5N0;=_yJn78VwWqUeDJlgWh3 z%S)t`^!t4}oerjHrY9(u%Ty{Aj*pKS4u=RK@O}S*23l(XN~IF@dYzM#lc#AA1Ocw= zvbMJNA8HT;!IL!zg5Yt2-QC>}z-E#j?(OZpkK>qPvB=rk8DSV=7zR-k%|BN*nes*2ARxX!u90!0*CWBH6fa!Ef7={2`Ute>1dCAq))t9}!y}Fr(;Qs!; zc7A@&^71lLN&qa&TAT|Zh@xmgFc=K5EUN}!rd`p-#s=MP7tiw+ji%En07@xxxg0_W zip3&ZTU+#cJyH|+zK`$wXss!gN=PZOEDPJVv2B}tK9A!#NGX}kW`tpg@B8TqCX)%P ztE+gPhm?|RHj7dUr4(@-FLHo5j=8(LV=|efCooNuv$HdlQY4Q8_$h(IOr>D=;AP53n*QHXa z{D&F@LGWx1f*^Q;;0Fi5PLdrVXvA?$u~?+v?-Pb0hG7sz(c*Juvsv=_JceOVC=}S< z-lo-RJvqS+@aDI?5r!cvD=Vz6t^Ktk$8m677p*n6ZF6&Tv(N#+NVB4cmp&&R?)+fW7Z*q=84Lz=IvwWoc_X!(0_^YazXsj|n@Q;f nP)M+sST;Zp_|R-NzXJROlEeF%mAvwE00000NkvXXu0mjfBZmnd literal 0 HcmV?d00001 diff --git a/src/icons/record_raw.png b/src/icons/record_raw.png new file mode 100644 index 0000000000000000000000000000000000000000..108528a9687779d34e51e9b375d8f1828c469536 GIT binary patch literal 527 zcmV+q0`UEbP)^Jj$nsI2k7K_Cv;3F`sU9W(*wfzTpx;+KH4~N5V08X3F z=kxgwrPR37=^&*9!1FwW5C9lsu+{>gwdQa*a6BG=jz*&oZX1BAs>WHCp|!?Zi?tTV zdCtvooW_5-T*$JFs;b5Sep`yV-7aaGayp%wOpP%B1VKO;h5*EIOt05ty#!|%d)w@Ua#bNj^}yo_j}Sb#dY2G07;VY@bJKDwIa{+7a9m5 zh@z;`U@#c4*=+99Kx>UsiX=(?Lk+ancWZE01!Y;Hlw!BrHBEMFteXa57~(h%N-6sN zKC{`ZJ%F_qDJ4mgyr4)*iBgIvitv3OV@#t1fZG<}-gS#(xpva_>0`=6OVGY&OnI-Pz1-UHpz^a6NY>OXEx7+;; z;MDniGMRi;O7)t}CQ?cOJkLW20e~?EYb^j;YgVfjo6Y9i+1c4Uw+=v26umS}(OP4z z#afHwJjCWWPGvtF4y0*HQ4~D@zix?Itrkg=u-$H}NR2T71VKO;h5$4g4Ngx_xxKxm zHh|We{eDjrMKl@>eBZB@5P~oaky7G$9$A)Qtwn2HA7HoJA*E!y-Bz^^f&d`|MNw3H zSx)jiXSdtc2XI}N#bSXF0@ro1){^IWb)RJ!>-8GX^SHmiCrJ`q*R2l_$1x`-C(LFu z*6Z~n4ulXyQB-lz>2#RS=g;FnYmHKhIFA2A4z$+K=HST;Mx)Usa9$p-`~Chf&vTSg zESJlw$&QV6%36h-*Hk1?j= z0if2XI6i)k9DV+k!S&y|wRV^!$z>Ep1VQlI(SL9?#$b%W^E}ct<>uxFYwfW1GzGZ2 xx_S$I1m2XUSHR0szbi*Jzz^WlU@-U&@C)}28CMy}^yL5m002ovPDHLkV1n~Q{A>UK literal 0 HcmV?d00001 diff --git a/src/librer.py b/src/librer.py index adc68a7..7dbba55 100644 --- a/src/librer.py +++ b/src/librer.py @@ -249,7 +249,11 @@ def __init__(self,cwd): #self_hg_ico = self.hg_ico self.hg_ico_len = len(self.hg_ico) + self.ico_record_raw = self_ico['record_raw'] + self.ico_record_raw_cd = self_ico['record_raw_cd'] self.ico_record = self_ico['record'] + self.ico_record_cd = self_ico['record_cd'] + self.ico_record_cd_loaded = self_ico['record_cd_loaded'] self.ico_cd_ok = self_ico['cd_ok'] self.ico_cd_ok_crc = self_ico['cd_ok_crc'] self.ico_cd_error = self_ico['cd_error'] @@ -271,6 +275,7 @@ def __init__(self,cwd): self_main.iconphoto(True, self_ico_librer,self.ico_record) + self.RECORD_RAW='r' self.RECORD='R' self.DIR='D' self.DIRLINK='L' @@ -451,6 +456,7 @@ def __init__(self,cwd): tree_tag_configure = tree.tag_configure + tree_tag_configure(self.RECORD_RAW, foreground='gray') tree_tag_configure(self.RECORD, foreground='green') tree_tag_configure(self.SYMLINK, foreground='gray') tree_tag_configure(self.FOUND, foreground='red') @@ -608,11 +614,12 @@ def help_cascade_post(): self.status_info.configure(image='',text = 'Checking records to load ...') records_quant,records_size = librer_core.read_records_pre() - + + load_errors = [] if records_quant: self.status_info.configure(image='',text = 'Loading records ...') - read_thread=Thread(target=lambda : librer_core.read_records(),daemon=True) + read_thread=Thread(target=lambda : librer_core.threaded_read_records(load_errors),daemon=True) read_thread_is_alive = read_thread.is_alive self_progress_dialog_on_load.lab_l1.configure(text='Records space:') @@ -674,7 +681,10 @@ def help_cascade_post(): if self.action_abort: self.info_dialog_on_main.show('Records loading aborted','Restart Librer to gain full access to the recordset.') - + + if load_errors: + self.get_text_info_dialog().show('Loading errors','\n\n'.join(load_errors) ) + self.menu_enable() self.menubar_config(cursor='') self.main_config(cursor='') @@ -1580,11 +1590,11 @@ def validate_size_str(val): Entry(find_size_frame,textvariable=self.find_size_min_var).grid(row=0, column=1, sticky='we',padx=4,pady=4) Entry(find_size_frame,textvariable=self.find_size_max_var).grid(row=0, column=3, sticky='we',padx=4,pady=4) - (find_modtime_frame := LabelFrame(sfdma,text='File mod time',bd=2,bg=self.bg_color,takefocus=False)).grid(row=4,column=0,sticky='news',padx=4,pady=4) + (find_modtime_frame := LabelFrame(sfdma,text='File last modification time',bd=2,bg=self.bg_color,takefocus=False)).grid(row=4,column=0,sticky='news',padx=4,pady=4) find_modtime_frame.grid_columnconfigure((0,1,2,3), weight=1) - Label(find_modtime_frame,text='min: ',bg=self.bg_color,anchor='e',relief='flat',bd=2).grid(row=0, column=0, sticky='we',padx=4,pady=4) - Label(find_modtime_frame,text='max: ',bg=self.bg_color,anchor='e',relief='flat',bd=2).grid(row=0, column=2, sticky='we',padx=4,pady=4) + Label(find_modtime_frame,text='after: ',bg=self.bg_color,anchor='e',relief='flat',bd=2).grid(row=0, column=0, sticky='we',padx=4,pady=4) + Label(find_modtime_frame,text='before: ',bg=self.bg_color,anchor='e',relief='flat',bd=2).grid(row=0, column=2, sticky='we',padx=4,pady=4) Entry(find_modtime_frame,textvariable=self.find_modtime_min_var).grid(row=0, column=1, sticky='we',padx=4,pady=4) Entry(find_modtime_frame,textvariable=self.find_modtime_max_var).grid(row=0, column=3, sticky='we',padx=4,pady=4) @@ -1726,17 +1736,21 @@ def record_import(self): self.status('importing record ...') new_record = librer_core.create() - if new_record.load(self.import_dialog_file): - self.log.error('import failed :%s',self.import_dialog_file) + if res:=new_record.load(self.import_dialog_file): + self.log.error(f'import failed :{self.import_dialog_file} error: {res}') + #TODO - fialog z informacja return local_file_name = 'imported.'+ str(time()) + '.dat' local_file = sep.join([DATA_DIR,local_file_name]) new_record.clone_record(local_file,keep_cd,keep_crc,self.import_compr_var_int.get()) - new_record.load_wrap(DATA_DIR,local_file_name) - self.single_record_show(new_record) - self.last_dir = dirname(self.import_dialog_file) + if res:=new_record.load_wrap(DATA_DIR,local_file_name): + print(f'record_import:{res}') + #TODO wtorny load + else: + self.single_record_show(new_record) + self.last_dir = dirname(self.import_dialog_file) def focusin(self): #print('focusin') @@ -3517,6 +3531,10 @@ def access_customdata(self,record): self.status('loading custom data ...') self.main.update() record.decompress_customdata() + + item = self.record_to_item[record] + self.tree.item(item,image=self.ico_record_cd_loaded) + #tags=self.RECORD @block_actions_processing @gui_block @@ -3557,13 +3575,15 @@ def open_item(self,item=None): # print(record.find_results[0]) #except Exception as e: # print('totu:',e) + + self_item_to_data = self.item_to_data - - if tree.tag_has(self.RECORD,item): + if tree.tag_has(self.RECORD_RAW,item): self.access_filestructure(record) - self.item_to_data[item] = record.filestructure + self_item_to_data[item] = record.filestructure + self.tree.item(item,tags=self.RECORD, image=self.ico_record_cd if record.has_cd() else self.ico_record) - top_data_tuple = self.item_to_data[item] + top_data_tuple = self_item_to_data[item] (top_entry_name_nr,top_code,top_size,top_mtime) = top_data_tuple[0:4] @@ -3609,16 +3629,9 @@ def open_item(self,item=None): tags='' if record.find_results: for find_result in record.find_results: - #print(f'compare:{find_result[0]} vs {entry_subpath_tuple}') if find_result[0]==entry_subpath_tuple: - #print(' czad!') tags=self.FOUND break - #else: - # print('lipa') - - #self.tree.item(current_item,tags=self.FOUND) - #items_names_tuple,res_size,res_mtime=record.find_results[self.find_result_index] #('data','record','opened','path','size','size_h','ctime','ctime_h','kind') values = (entry_name,'','0',entry_name,size,bytes_to_str(size),mtime,strftime('%Y/%m/%d %H:%M:%S',localtime(mtime)),kind) @@ -3629,7 +3642,7 @@ def open_item(self,item=None): tree_insert = tree.insert for (sort_index,values,entry_name,image,sub_dictionary_bool),(has_files,tags,data_tuple) in sorted(new_items_values.items(),key = lambda x : x[0][0],reverse=reverse) : new_item=tree_insert(item,'end',iid=None,values=values,open=False,text=entry_name,image=image,tags=tags) - self.item_to_data[new_item] = data_tuple + self_item_to_data[new_item] = data_tuple if sub_dictionary_bool: tree_insert(new_item,'end') #dummy_sub_item #if to_the_bottom: @@ -3648,7 +3661,7 @@ def single_record_show(self,record): #('data','record','opened','path','size','size_h','ctime','ctime_h','kind') values = (record.header.label,record.header.label,0,record.header.scan_path,size,bytes_to_str(size),record.header.creation_time,strftime('%Y/%m/%d %H:%M:%S',localtime(record.header.creation_time)),self.RECORD) - record_item=self.tree.insert('','end',iid=None,values=values,open=False,text=record.header.label,image=self.ico_record,tags=self.RECORD) + record_item=self.tree.insert('','end',iid=None,values=values,open=False,text=record.header.label,image=self.ico_record_raw_cd if record.has_cd() else self.ico_record_raw,tags=self.RECORD_RAW) self.tree.insert(record_item,'end',text='dummy') #dummy_sub_item self.tree_sort_item(None)