50
50
import keyboard
51
51
from ruamel .yaml import YAML
52
52
import logging
53
- from io import StringIO as StringBuffer
54
53
from datetime import datetime
55
54
from cerberus import Validator
56
55
import warnings
95
94
WS_MINIMIZEBOX = 131072
96
95
WS_MAXIMIZEBOX = 65536
97
96
98
- log_capture_string = StringBuffer ()
97
+ log_capture_string = StringIO ()
99
98
logging .basicConfig (level = logging .INFO ,
100
99
format = '%(asctime)s, %(levelname)s, %(message)s' ,
101
100
datefmt = '%Y-%m-%d %H:%M:%S' ,
102
101
handlers = [logging .StreamHandler (log_capture_string )]
103
102
)
104
103
105
104
__author__ = "Brian Maloney"
106
- __version__ = "2024.07.24 "
105
+ __version__ = "2024.09.20 "
107
106
__email__ = "bmmaloney97@gmail.com"
108
107
rbin = []
109
108
user_logs = {}
@@ -1291,10 +1290,11 @@ def highlight_pattern(self, pattern, tag, start="1.0", end="end", regexp=False):
1291
1290
1292
1291
class Result :
1293
1292
1294
- def __init__ (self , master , * args , folder = True , tags = '' ):
1293
+ def __init__ (self , master , * args , folder = True , folderShared = '' , tags = '' ):
1295
1294
self .master = master
1296
1295
self .args = args
1297
1296
self .folder = folder
1297
+ self .folderShared = folderShared
1298
1298
self .tags = tags
1299
1299
self .type = []
1300
1300
self .status = []
@@ -1360,7 +1360,7 @@ def handle_folder_status(self, num, values_list):
1360
1360
''
1361
1361
)
1362
1362
1363
- if num == '7' and len (values_list ) > 11 :
1363
+ if num == '7' and len (values_list ) > 12 :
1364
1364
shortcut_item = next ((item for item in self .args [0 ] if 'shortcutitemindex:' in item .lower ()), None )
1365
1365
if shortcut_item and int (shortcut_item .split (' ' )[1 ]) > 0 :
1366
1366
self .type .clear ()
@@ -1372,6 +1372,14 @@ def handle_folder_status(self, num, values_list):
1372
1372
else :
1373
1373
self .type .append (self .get_type_image (num ))
1374
1374
1375
+ sharedItem = next (
1376
+ (item .split (' ' )[1 ] for item in self .args [0 ] if 'shareditem:' in item .lower () and len (item .split (' ' )) > 1 ),
1377
+ ''
1378
+ )
1379
+
1380
+ if sharedItem == '1' :
1381
+ self .status .append (shared_big_img )
1382
+
1375
1383
if not set (self .lock_list ).intersection (spoPermissions ):
1376
1384
if num not in ('10' , '11' ):
1377
1385
self .status .append (locked_big_img )
@@ -1397,7 +1405,7 @@ def process_non_folder_status(self, values_list):
1397
1405
else :
1398
1406
self .status .append (self .get_status_image (num ))
1399
1407
1400
- if sharedItem == '1' :
1408
+ if sharedItem == '1' or self . folderShared == '1' :
1401
1409
self .status .append (shared_big_img )
1402
1410
1403
1411
if not set (self .lock_list ).intersection (spoPermissions ) and not any ('inrecyclebin:' in item .lower () for item in self .args [0 ]):
@@ -2086,7 +2094,7 @@ def get_info(self, event): # need to look into click performance
2086
2094
# find logs for files/folders
2087
2095
if any ('status:' in value .lower () for value in values ):
2088
2096
# Find the item containing 'resourceID:' and extract the desired part
2089
- resourceID = next ((value .split (" " )[1 ].split ("+" )[0 ] for value in values if 'resourceid:' in value .lower ()), '' )
2097
+ resourceID = next ((value .split (" " )[1 ].split ("+" )[0 ] for value in values if value .lower (). startswith ( 'resourceid:' )), '' )
2090
2098
# Concatenate DataFrames containing the resource_id
2091
2099
info = pd .concat ([df .loc [df .Params .astype ('string' ).str .contains (f'{ resourceID } ' , case = False , na = False )] for df in df_list ])
2092
2100
@@ -2182,6 +2190,14 @@ def file_pane(self):
2182
2190
else :
2183
2191
self .status .append (image_mapping .get (folderStatus , online_img ))
2184
2192
2193
+ sharedItemF = next (
2194
+ (item .split (' ' )[1 ] for item in values if 'shareditem:' in item .lower () and len (item .split (' ' )) > 1 ),
2195
+ ''
2196
+ )
2197
+
2198
+ if sharedItemF == '1' :
2199
+ self .status .append (shared_img )
2200
+
2185
2201
if not set (lock_list ).intersection (spoPermissions ) and str (tags ) != 'red' :
2186
2202
if folderStatus not in ('10' , '11' , '' ):
2187
2203
self .status .append (locked_img )
@@ -2193,6 +2209,11 @@ def file_pane(self):
2193
2209
2194
2210
try :
2195
2211
if cur_item [0 ] in file_items :
2212
+ folderShared = next (
2213
+ (item .split (' ' )[1 ] for item in self .tv .item (cur_item [0 ])["values" ] if 'shareditem:' in item .lower () and len (item .split (' ' )) > 1 ),
2214
+ ''
2215
+ )
2216
+
2196
2217
for i in file_items [cur_item [0 ]]:
2197
2218
self .status .clear ()
2198
2219
item_data_i = self .tv .item (i )
@@ -2229,7 +2250,7 @@ def file_pane(self):
2229
2250
2230
2251
self .status .append (image_mapping .get (fileStatus , online_img ))
2231
2252
2232
- if sharedItem == '1' :
2253
+ if sharedItem == '1' or folderShared == '1' :
2233
2254
self .status .append (shared_img )
2234
2255
2235
2256
if not set (lock_list ).intersection (spoPermissions_i ) and str (tags_i ) != 'red' :
@@ -3059,14 +3080,18 @@ def search(item=''):
3059
3080
image_key = tv .item (child , 'image' )[0 ]
3060
3081
Result (root , values , child , image_key )
3061
3082
if child in file_items :
3083
+ folderShared = next (
3084
+ (item .split (' ' )[1 ] for item in tv .item (child , 'values' ) if 'shareditem:' in item .lower () and len (item .split (' ' )) > 1 ),
3085
+ ''
3086
+ )
3062
3087
for i in file_items [child ]:
3063
3088
if query .lower () in str (tv .item (i , 'values' )).lower ():
3064
3089
tags = ''
3065
3090
if tv .item (i , 'tags' ):
3066
3091
tags = 'red'
3067
3092
values = tv .item (i , 'values' )
3068
3093
image_key = tv .item (i , 'image' )[0 ]
3069
- Result (root , values , i , image_key , folder = False , tags = tags )
3094
+ Result (root , values , i , image_key , folder = False , folderShared = folderShared , tags = tags )
3070
3095
search (item = child )
3071
3096
3072
3097
@@ -3532,16 +3557,11 @@ def odl(folder_name, csv=False):
3532
3557
file_manager .tv2 .delete (* file_manager .tv2 .get_children ())
3533
3558
file_manager .tv3 .delete (* file_manager .tv3 .get_children ())
3534
3559
key_find = re .compile (r'Users/(?P<user>.*)?/AppData' )
3535
- if csv :
3536
- key = folder_name .name .split ('/' )[- 1 ].split ('_' )[0 ]
3537
- else :
3538
- key = re .findall (key_find , folder_name )
3539
- if len (key ) == 0 :
3540
- key = 'ODL'
3541
- else :
3542
- key = key [0 ]
3543
3560
pb .stop ()
3561
+ start = time .time ()
3562
+
3544
3563
if csv :
3564
+ key = folder_name .name .split ('/' )[- 1 ].split ('_' )[0 ]
3545
3565
header_list = ['Filename' ,
3546
3566
'File_Index' ,
3547
3567
'Timestamp' ,
@@ -3576,6 +3596,11 @@ def odl(folder_name, csv=False):
3576
3596
odl = pd .DataFrame ()
3577
3597
logging .error (f'{ folder_name .name } not a valid ODL csv.' )
3578
3598
else :
3599
+ key = re .findall (key_find , folder_name )
3600
+ if len (key ) == 0 :
3601
+ key = 'ODL'
3602
+ else :
3603
+ key = key [0 ]
3579
3604
odl = parse_odl (folder_name , key , pb , value_label , gui = True )
3580
3605
3581
3606
tb = ttk .Frame ()
@@ -3606,7 +3631,7 @@ def odl(folder_name, csv=False):
3606
3631
3607
3632
pb .stop ()
3608
3633
pb .configure (mode = 'determinate' )
3609
- value_label ['text' ] = " Parsing complete"
3634
+ value_label ['text' ] = f' Parsing complete. { format (( time . time () - start ), ".4f" ) } seconds'
3610
3635
3611
3636
mcount = (len (log_capture_string .getvalue ().split ('\n ' )) - 1 )
3612
3637
message ['text' ] = mcount
@@ -3656,7 +3681,7 @@ def start_parsing(x, filename=False, reghive=False, recbin=False, live=False):
3656
3681
account = os .path .dirname (filename .replace ('/' , '\\ ' )).rsplit ('\\ ' , 1 )[- 1 ]
3657
3682
name = os .path .split (filename )[1 ]
3658
3683
3659
- df , rbin_df , df_scope , scopeID = DATParser .parse_dat (filename , account ,
3684
+ df , rbin_df , df_scope , scopeID , localHashAlgorithm = DATParser .parse_dat (filename , account ,
3660
3685
gui = True , pb = pb ,
3661
3686
value_label = value_label )
3662
3687
@@ -3668,13 +3693,14 @@ def start_parsing(x, filename=False, reghive=False, recbin=False, live=False):
3668
3693
rbin_df , account ,
3669
3694
reghive ,
3670
3695
recbin ,
3696
+ localHashAlgorithm = localHashAlgorithm ,
3671
3697
gui = True ,
3672
3698
pb = pb ,
3673
3699
value_label = value_label )
3674
3700
3675
3701
dat = True
3676
3702
3677
- if x == 'Load from SQLite' :
3703
+ elif x == 'Load from SQLite' :
3678
3704
filename = filename .replace ('/' , '\\ ' )
3679
3705
sql_dir = re .compile (r'\\Users\\(?P<user>.*?)\\AppData\\Local\\Microsoft\\OneDrive\\settings\\(?P<account>.*?)$' )
3680
3706
sql_find = re .findall (sql_dir , filename )
@@ -3686,7 +3712,7 @@ def start_parsing(x, filename=False, reghive=False, recbin=False, live=False):
3686
3712
pb .configure (mode = 'indeterminate' )
3687
3713
value_label ['text' ] = 'Building folder list. Please wait....'
3688
3714
pb .start ()
3689
- df , rbin_df , df_scope , df_GraphMetadata_Records , scopeID , account = SQLiteParser .parse_sql (filename )
3715
+ df , rbin_df , df_scope , df_GraphMetadata_Records , scopeID , account , localHashAlgorithm = SQLiteParser .parse_sql (filename )
3690
3716
3691
3717
if not df .empty :
3692
3718
cache , rbin_df = OneDriveParser .parse_onedrive (df ,
@@ -3698,18 +3724,19 @@ def start_parsing(x, filename=False, reghive=False, recbin=False, live=False):
3698
3724
account ,
3699
3725
reghive ,
3700
3726
recbin ,
3727
+ localHashAlgorithm = localHashAlgorithm ,
3701
3728
gui = True ,
3702
3729
pb = pb ,
3703
3730
value_label = value_label )
3704
3731
pb .stop ()
3705
3732
dat = True
3706
3733
3707
- if x == 'Import JSON' :
3734
+ elif x == 'Import JSON' :
3708
3735
cache = json .load (filename )
3709
3736
df = pd .DataFrame ()
3710
3737
rbin_df = pd .DataFrame ()
3711
3738
3712
- if x == 'Import CSV' :
3739
+ elif x == 'Import CSV' :
3713
3740
account = ''
3714
3741
df , rbin_df , df_scope , df_GraphMetadata_Records , scopeID = parse_csv (filename )
3715
3742
@@ -3722,11 +3749,12 @@ def start_parsing(x, filename=False, reghive=False, recbin=False, live=False):
3722
3749
account ,
3723
3750
reghive ,
3724
3751
recbin ,
3752
+ localHashAlgorithm = False ,
3725
3753
gui = True ,
3726
3754
pb = pb ,
3727
3755
value_label = value_label )
3728
3756
3729
- if x == 'Project' :
3757
+ elif x == 'Project' :
3730
3758
name = filename
3731
3759
pass
3732
3760
@@ -3741,11 +3769,10 @@ def start_parsing(x, filename=False, reghive=False, recbin=False, live=False):
3741
3769
if x == 'Import JSON' :
3742
3770
parent_child (cache , None , True )
3743
3771
df_GraphMetadata_Records = pd .DataFrame (dfs_to_concat )
3744
- else :
3745
- parent_child (cache )
3746
- if x == 'Import JSON' :
3747
3772
curItem = tv .get_children ()[- 1 ]
3748
3773
file_count , del_count , folder_count = json_count (item = curItem )
3774
+ else :
3775
+ parent_child (cache )
3749
3776
3750
3777
pb .stop ()
3751
3778
pb .configure (mode = 'determinate' )
0 commit comments