-
Notifications
You must be signed in to change notification settings - Fork 3
/
Copy pathstorage.py
410 lines (315 loc) · 13.7 KB
/
storage.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
# lshash/storage.py
# Copyright 2012 Kay Zhu (a.k.a He Zhu) and contributors (see CONTRIBUTORS.txt)
#
# This module is part of lshash and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
import json
import numpy as np
import time
import struct
import fastdict
from bitarray import bitarray
try:
import redis
except ImportError:
redis = None
__all__ = ['storage']
def storage(storage_config, index):
""" Given the configuration for storage and the index, return the
configured storage instance.
"""
if 'dict' in storage_config:
return InMemoryStorage(storage_config['dict'])
elif 'random' in storage_config:
return RandomInMemoryStorage(storage_config['random'])
elif 'redis' in storage_config:
storage_config['redis']['db'] = index
return RedisStorage(storage_config['redis'])
else:
raise ValueError("Only in-memory dictionary and Redis are supported.")
class BaseStorage(object):
def __init__(self, config):
""" An abstract class used as an adapter for storages. """
raise NotImplementedError
def keys(self):
""" Returns a list of binary hashes that are used as dict keys. """
raise NotImplementedError
def set_val(self, key, val):
""" Set `val` at `key`, note that the `val` must be a string. """
raise NotImplementedError
def get_val(self, key):
""" Return `val` at `key`, note that the `val` must be a string. """
raise NotImplementedError
def append_val(self, key, val):
""" Append `val` to the list stored at `key`.
If the key is not yet present in storage, create a list with `val` at
`key`.
"""
raise NotImplementedError
def get_list(self, key):
""" Returns a list stored in storage at `key`.
This method should return a list of values stored at `key`. `[]` should
be returned if the list is empty or if `key` is not present in storage.
"""
raise NotImplementedError
class InMemoryStorage(BaseStorage):
def __init__(self, config):
self.name = 'dict'
self.storage = dict()
def keys(self):
return self.storage.keys()
def items(self):
return self.storage.items()
def set_val(self, key, val):
self.storage[key] = val
def get_val(self, key):
return self.storage[key]
def append_val(self, key, val):
self.storage.setdefault(key, []).append(val)
def get_list(self, key):
return self.storage.get(key, [])
class RandomInMemoryStorage(InMemoryStorage):
def __init__(self, config):
self.name = 'random'
if config['t'] == 'string':
self.storage = fastdict.FastCompressUInt32StringDict(config['r'])
self.load_dict = fastdict.FastCompressUInt32StringDict(config['r'])
elif config['t'] == 'int8':
self.storage = fastdict.FastCompressUInt32Int8Dict(config['r'])
self.load_dict = fastdict.FastCompressUInt32Int8Dict(config['r'])
elif config['t'] == 'int32':
self.storage = fastdict.FastCompressUInt32IntDict(config['r'])
self.load_dict = fastdict.FastCompressUInt32IntDict(config['r'])
self.init_key_dimension(config['r'], config['dim'], config['random'])
self.init_bases(config['r'])
self.config = config
self.inited_runtime = False
self.inited_runtime_VLQ_base64 = False
def init_key_dimension(self, num_of_r, dim, random = True):
if random:
self.key_dimensions = np.sort(np.random.choice(dim, num_of_r, replace = False))
else:
self.key_dimensions = np.sort(range(0, num_of_r))
print "key dimensions:"
print self.key_dimensions
self.storage.set_keydimensions(self.key_dimensions.tolist())
def init_bases(self, num_of_r):
self.bases = np.left_shift(1, range(0, num_of_r))
def neighbor_keys(self, key):
actual_key = self.actual_key(key)
return np.bitwise_xor(actual_key, self.bases)
def actual_key(self, key):
key_binary = np.binary_repr(key, width = 64)
bits = bitarray(key_binary)
actual_key_bits = []
for empty_dim in range(0, 32 - self.config['r']):
actual_key_bits.append(False)
for dim in self.key_dimensions:
actual_key_bits.append(bits[dim])
#actual_key_bits = np.zeros(32 - len(actual_key_bits)).astype(np.int).tolist() + actual_key_bits
actual_key_binary = bitarray(actual_key_bits, endian='big')
string = struct.unpack(">I", actual_key_binary.tobytes())[0]
actual_key = np.array([string]).astype(np.uint32)[0]
return actual_key
def set_val(self, key, val):
actual_key = self.actual_key(key)
self.storage.set(int(actual_key), long(key), val)
def get_val(self, key):
actual_key = self.actual_key(key)
return self.storage.get(int(actual_key))
def benchmark_begin(self, title):
print "start to " + title
self.start = time.clock()
def benchmark_end(self, title):
print "end of " + title
elapsed = (time.clock() - self.start)
print "time: " + str(elapsed)
def append_val(self, key, val):
actual_key = self.actual_key(key)
#print "actual_key: " + str(actual_key)
#print "key: " + str(key)
#print "val: " + str(val)
self.storage.append(int(actual_key), long(key), int(val))
def batch_append_vals(self, keys, val):
vals = []
actual_keys = []
for key in keys:
actual_keys.append(int(self.actual_key(key)))
vals.append(val)
if self.config['t'] == 'int8':
if val < 255:
val += 1
else:
val += 1
if self.config['t'] == 'string':
encodeds = self.storage.NumberIdsToVLQ_base64(vals)
vals = []
for encoded in encodeds:
vals.append(encoded)
self.benchmark_begin('batch insert to fastdict')
self.storage.fast_batch_append(actual_keys, keys, vals)
self.benchmark_end('batch insert to fastdict')
def get_list(self, key, filter_code):
actual_key = self.actual_key(key)
vals = []
if self.storage.exist(int(actual_key)):
for key_value in self.storage.get(int(actual_key)):
if str(filter_code) == str(key_value.first):
vals.append(key_value.second)
return vals
def expand_key(self, actual_key, level = 1):
expanded_keys = np.bitwise_xor(actual_key, self.bases)
if level > 1:
neighbor_keys = expanded_keys
for neighbor_key in neighbor_keys:
expanded_keys = np.append(expanded_keys, self.expand_key(neighbor_key, level - 1))
return np.unique(np.array(expanded_keys))
# given sub-sampled key, return all expanded sub-sampled keys
def actual_keys(self, reference_key, level = 1):
actual_key = self.actual_key(reference_key)
if level > 0:
neighbor_keys = self.expand_key(actual_key, level)
all_keys = np.unique(np.append(neighbor_keys, actual_key)).astype(np.uint32)
else:
all_keys = np.array([actual_key]).astype(np.uint32)
return all_keys
# given sub-sampled key, retrieve all binary codes in corresponding buckets
def keys(self, reference_key, level = 1):
all_keys = self.actual_keys(reference_key, level)
keys = []
image_ids = []
for key_value in self.storage.mget(all_keys.tolist()):
keys.append(str(key_value.first))
image_ids.append(key_value.second)
return (keys, image_ids)
def get_neighbor_vals(self, key):
neighbor_keys = self.neighbor_keys(key)
vals = []
for neighbor_key in neighbor_keys:
vals.append(self.storage.get(int(neighbor_key)))
return np.array(vals)
def init_runtime(self):
if not self.inited_runtime:
print "init rumtime dict..."
if self.storage.get_dict_status() == 0:
self.storage.init_runtime_python_dict()
else:
print "Incorrect dict mode."
print "done."
self.inited_runtime = True
def init_runtime_vlq_base64(self):
if not self.inited_runtime_VLQ_base64:
print "init rumtine VLQ base64 dict..."
if self.storage.get_dict_status() == 1:
self.storage.init_runtime_VLQ_base64_dict()
else:
print "Incorrect dict mode."
print "done."
self.inited_runtime_VLQ_base64 = True
def save(self, filename):
if self.config['t'] == 'string':
fastdict.save_compress_uint32_string(filename, self.storage)
elif self.config['t'] == 'int8':
fastdict.save_compress_uint32_int8(filename, self.storage)
elif self.config['t'] == 'int32':
fastdict.save_compress_uint32_int(filename, self.storage)
def load(self, filename):
if self.storage.size() > 0:
if self.config['t'] == 'string':
fastdict.load_compress_uint32_string(filename, self.load_dict)
elif self.config['t'] == 'int8':
fastdict.load_compress_uint32_int8(filename, self.load_dict)
elif self.config['t'] == 'int32':
fastdict.load_compress_uint32_int(filename, self.load_dict)
self.storage.merge(self.load_dict)
self.load_dict.clear()
else:
if self.config['t'] == 'string':
fastdict.load_compress_uint32_string(filename, self.storage)
elif self.config['t'] == 'int8':
fastdict.load_compress_uint32_int8(filename, self.storage)
elif self.config['t'] == 'int32':
fastdict.load_compress_uint32_int(filename, self.storage)
key_dimensions = []
self.storage.get_keydimensions(key_dimensions)
self.key_dimensions = np.array(key_dimensions)
def compress(self):
if self.storage.get_dict_status() == -1:
self.storage.go_index()
else:
print "Incorrect dict mode."
def to_VLQ_base64(self):
if self.storage.get_dict_status() == 0:
self.storage.to_VLQ_base64_dict()
else:
print "Incorrect dict mode."
def uncompress_binary_codes(self, reference_key, level):
binary_codes = None
self.benchmark_begin('uncompressing binary codes')
if self.storage.get_dict_status() == 0:
print "non VLQ base64"
binary_codes = self.storage.mget_binary_codes(self.actual_keys(reference_key, level).tolist())
elif self.storage.get_dict_status() == 1:
print "VLQ base64"
binary_codes = self.storage.mget_VLQ_base64_binary_codes(self.actual_keys(reference_key, level).tolist())
else:
print "Incorrect dict mode."
self.benchmark_end('uncompressing binary codes')
return binary_codes
def show_uncompressed_keys(self, cols_buffer):
index = 0
for buffers in cols_buffer:
print index
for i in range(0, len(buffers) / 8):
data = ''
for j in range(i * 8, i * 8 + 8):
data = data + buffers[j]
print data
print struct.unpack('Q', data)
index += 1
# obtain compressed columns for binary codes to be uncompress with GPU
def get_compressed_cols(self, reference_key, level = 0):
#neighbor_keys = self.neighbor_keys(reference_key)
#actual_key = self.actual_key(reference_key)
#all_keys = np.unique(np.append(neighbor_keys, actual_key))
self.benchmark_begin('load cols')
#cols = self.storage.get_cols(int(actual_key))
cols = None
image_ids = None
if self.storage.get_dict_status() == 2:
print "compressed runtime dict"
cols = self.storage.mget_python_cols_as_buffer(self.actual_keys(reference_key, level).tolist())
image_ids = self.storage.mget_image_ids(self.actual_keys(reference_key, level).tolist())
elif self.storage.get_dict_status() == 3:
print "VLQ base64 compressed runtime dict"
cols = self.storage.mget_VLQ_base64_cols_as_buffer(self.actual_keys(reference_key, level).tolist())
image_ids = self.storage.mget_VLQ_base64_image_ids(self.actual_keys(reference_key, level).tolist())
self.benchmark_end('load cols')
#columns = [0] * len(cols.first)
#self.benchmark_begin('cols to np array')
#for column in cols.first:
#for index in range(0, len(cols.first)):
# columns[index] = np.array(cols.first[index]).astype(np.uint64)
#self.benchmark_end('cols to np array')
#self.benchmark_begin('cols to np array')
#np_columns = np.array(columns)
#self.benchmark_end('cols to np array')
return (cols, image_ids)
def clear(self):
self.storage.clear()
class RedisStorage(BaseStorage):
def __init__(self, config):
if not redis:
raise ImportError("redis-py is required to use Redis as storage.")
self.name = 'redis'
self.storage = redis.StrictRedis(**config)
def keys(self, pattern="*"):
return self.storage.keys(pattern)
def set_val(self, key, val):
self.storage.set(key, val)
def get_val(self, key):
return self.storage.get(key)
def append_val(self, key, val):
self.storage.rpush(key, json.dumps(val))
def get_list(self, key):
return self.storage.lrange(key, 0, -1)