Reputation: 71
i am trying to create an application where products are received on every second and updating a database of fixed size (~ 300 MB) using the LRU policy. Although i have no exception on adding new products and deleting them from the database, it seems that Chrome never deletes the .ldb and .bak files. As such, I spend several gigabytes of hard disk and i always reach the quota limit. The same code works perfect on Firefox. Can someone explain me what am i doing wrong? Below you can find the code.
startExperiment(300 * 1024 * 1024);
function startExperiment(lrusize:number) {
var j = 0;
var productsToInsert = new HashMap<number, Product>();
window.indexedDB.deleteDatabase("ExampleDatabase");
var versionNumber = 1;
var stats = new Stats();
var sizeOfDatabase = 0;
var db = new ProductDatabase('ExampleDatabase', versionNumber, () => {
db.getSizeOfDatabase((result) => {
if(result == null) {
sizeOfDatabase = 0;
} else {
sizeOfDatabase = result;
}
});
});
function randomizeArray() {
var numOfMBS = Math.floor((Math.random() * (10 - 2) + 2) * 1024 * 1024);
var bytearray = new Uint8Array(numOfMBS.valueOf());
for (var i = 0; i < bytearray.length; i++) {
bytearray[i] = Math.random() * (100 - 1) + 1;
}
return bytearray;
}
setInterval(function () {
var readAverage = stats.getReadTimesAverage();
var writeAverage = stats.getWriteTimesAverage();
var deleteAverage = stats.getDeleteTimesAverage();
console.log("Num of insertions : " + j + " | Read average : " + readAverage + " | Write average : " + writeAverage + " | Delete average : " + deleteAverage);
}, 5000);
setInterval(function () {
var bytearray = randomizeArray();
var identifier = j++;
var timestamp = Date.now();
db.getProduct(identifier, (product) => {
if (product == null) {
var newProduct = new Product(identifier, timestamp, 0, bytearray);
var size = memorySizeOf(newProduct);
newProduct.sizeInBytes = size;
productsToInsert.set(identifier, newProduct);
}
});
}, 1000);
function updateLRU() {
var tmpList:Product[] = [];
var keys = productsToInsert.keys();
var currentBytesToBeInserted = 0;
for (var i = 0; i < keys.length; i++) {
var product = productsToInsert.get(keys[i]);
tmpList.push(product);
currentBytesToBeInserted += product.sizeInBytes;
}
var currentSize = sizeOfDatabase + currentBytesToBeInserted;
if (currentSize > lrusize) {
var bytesToRemove = currentSize - lrusize;
db.deleteProducts(bytesToRemove, stats, () => {
sizeOfDatabase -= bytesToRemove;
addFragments(tmpList);
});
} else {
addProducts(tmpList);
}
}
function addProducts(tmpList:Product[]) {
var product = tmpList[0];
var startAddProductTs = Date.now();
db.addProduct(product, () => {
var stopAddProductTs = Date.now();
stats.addWriteTimes(stopAddProductTs - startAddProductTs);
sizeOfDatabase += product.sizeInBytes;
tmpList.shift();
productsToInsert.delete(product.productId);
if(tmpList.length > 0) {
addProducts(tmpList);
} else {
db.addDBSize(sizeOfDatabase, () => {
});
}
});
}
setInterval(function () {
updateLRU();
}, 20000);
}
class ProductDatabase {
private db;
constructor(private name:string, private version:number, callback:() => void) {
this.openDatabase(callback);
}
openDatabase(callback:() => void) {
var openDatabaseRequest = window.indexedDB.open(this.name, this.version);
openDatabaseRequest.onupgradeneeded = this.upgrade;
openDatabaseRequest.onsuccess = () => {
this.db = openDatabaseRequest.result;
callback();
}
}
upgrade(event:any) {
var store = event.target.result.createObjectStore("products", {keyPath: 'productId'});
store.createIndex('by_timestamp', "timestamp", {unique: true});
event.target.result.createObjectStore("dbsize", {keyPath: 'sizeId'});
}
getProduct(productId:number, callback:(result:Product) => void) {
var productStore = this.db.transaction(["products"], "readonly").objectStore('products');
var query = productStore.get(productId);
query.onsuccess = () => {
var product = query.result;
callback(product);
}
query.onerror = () => {
console.error("Read product error : " + query.error);
}
}
addDBSize(dbSize:number, callback:() => void) {
var transaction = this.db.transaction('dbsize', 'readwrite');
var productStore = transaction.objectStore('dbsize');
var newSize = {'sizeId': 1, 'bytelength': dbSize};
var request = productStore.put(newSize);
request.onerror = () => {
console.log("Unsuccessful request with error : " + request.error);
}
transaction.oncomplete = () => {
callback();
}
transaction.onerror = () => {
console.error("fucking error : " + transaction.error);
}
transaction.onabort = () => {
console.error("Shit. transaction is aborted with error : " + transaction.error);
}
}
addCachedProducts(productList:Array<Product>, callback:() => void) {
var transaction = this.db.transaction('products', 'readwrite');
var productStore = transaction.objectStore('products');
for (var i = 0; i < productList.length; i++) {
productStore.add(productList[i]);
}
transaction.oncomplete = () => {
callback();
}
transaction.onabort = () => {
console.error("Shit. transaction is aborted with error : " + transaction.error);
}
}
getNumberOfProducts(callback:(result:number) => void) {
var productStore = this.db.transaction('products', 'readonly').objectStore('products');
var query = productStore.count();
query.onsuccess = () => {
var result = query.result;
callback(result);
}
query.onerror = () => {
console.error("Read number of products error : " + query.error);
}
}
getSizeOfDatabase(callback:(result:number) => void) {
var productStore = this.db.transaction('dbsize', "readonly").objectStore('dbsize');
var query = productStore.get(1);
query.onsuccess = () => {
var product = query.result;
callback(product);
}
query.onerror = () => {
console.error("Read databasesize error : " + query.error);
}
}
deleteProducts(numOfBytes:number, stats:Stats, callback:() => void) {
var transaction = this.db.transaction('products', 'readwrite');
var productStore = transaction.objectStore('products');
var index = productStore.index('by_timestamp');
var request = index.openCursor();
request.onsuccess = function () {
var cursor = request.result;
if (cursor) {
var cursorBytes = cursor.value.sizeInBytes;
var startDeleteTs = Date.now();
var deleteRequest = cursor.delete();
deleteRequest.onsuccess = () => {
var stopDeleteTs = Date.now();
stats.addDeleteTimes(stopDeleteTs - startDeleteTs);
numOfBytes -= cursorBytes;
if (numOfBytes > 0) {
cursor.continue();
}
}
deleteRequest.onerror = () => {
console.error("Delete product error : " + deleteRequest.error);
}
}
}
transaction.oncomplete = () => {
callback();
}
transaction.onabort = () => {
console.log("Delete transaction aborted with error : " + transaction.error);
}
}
addProduct(product:Product, callback:() => void) {
var transaction = this.db.transaction('products', 'readwrite');
var productStore = transaction.objectStore('products');
var request = productStore.put(product);
request.onerror = () => {
console.log("Unsuccessful request with error : " + request.error);
}
transaction.oncomplete = () => {
callback();
}
transaction.onerror = () => {
console.error("fucking error : " + transaction.error);
}
transaction.onabort = () => {
console.error("Shit. transaction is aborted with error : " + transaction.error);
}
}
}
Upvotes: 5
Views: 888
Reputation: 71
Thanks dumbmatter. Unfortunately my experience also showed me that Chrome never deletes the unnecessary files. The problem is that IndexedDB in Chrome use LevelDB as an implementation and it rarely calls compact. However, i found a solution using PouchDB which leverages both IndexedDB and LevelDB api and i can explicitly call compact and delete my unnecessary files.
Upvotes: 1
Reputation: 9683
In Chrome, there is a delay between deleting data through the IndexedDB API and having it deleted from disk. Usually that's fine. But in my experience, sometimes it never gets deleted from disk, which is really bad when the user has exceeded their quota because then you can never store any more data even if you delete everything.
Upvotes: 3