Browse Source
Closes #1235 This commit introduce a pretty big change on how app data files are managed. Instead of having multiple `accounts.json`, `settings.json`, `user.json`, `countervalues.json`, it now store everything in a single `app.json`. And it does it in an *async* way, so it should prevent some frame loss. Migration will be seamless, and will keep `accounts.json` encryption if set. So no user action is involved. This changes comes also with some simplification on password handling (e.g: no more need to store password hash). /!\ Disclaimer During development, I ran about a weird issue (~ 3 or 4 times) when my data was simply "erased" in the file, so back to onboarding, no more settings, etc. I suspect race/write condition, something with write-file-atomic, but I didn't managed to reproduce, everytime I tried. Anyway, I feel not 100% confident with it, so if you guys can test on your side, with your data (with encryption key or not) it will help a lot!master
37 changed files with 861 additions and 322 deletions
@ -1,13 +0,0 @@ |
|||||
// @flow
|
|
||||
|
|
||||
const noop = () => {} |
|
||||
|
|
||||
module.exports = { |
|
||||
init: noop, |
|
||||
get: noop, |
|
||||
set: noop, |
|
||||
getIn: noop, |
|
||||
setIn: noop, |
|
||||
cleanCache: noop, |
|
||||
resetAll: noop, |
|
||||
} |
|
@ -1,104 +0,0 @@ |
|||||
// @flow
|
|
||||
|
|
||||
import logger from 'logger' |
|
||||
import Store from 'electron-store' |
|
||||
import set from 'lodash/set' |
|
||||
import get from 'lodash/get' |
|
||||
|
|
||||
import { decodeAccountsModel, encodeAccountsModel } from 'reducers/accounts' |
|
||||
|
|
||||
type DBKey = 'settings' | 'accounts' | 'countervalues' | 'user' | 'migrations' |
|
||||
|
|
||||
const encryptionKey = {} |
|
||||
|
|
||||
const store = key => |
|
||||
new Store({ |
|
||||
name: key, |
|
||||
defaults: { |
|
||||
data: null, |
|
||||
}, |
|
||||
encryptionKey: encryptionKey[key], |
|
||||
}) |
|
||||
|
|
||||
export function setEncryptionKey(key: DBKey, value?: string) { |
|
||||
encryptionKey[key] = value |
|
||||
} |
|
||||
|
|
||||
const transforms = { |
|
||||
get: { |
|
||||
accounts: decodeAccountsModel, |
|
||||
}, |
|
||||
set: { |
|
||||
accounts: encodeAccountsModel, |
|
||||
}, |
|
||||
} |
|
||||
|
|
||||
function middleware(type: 'get' | 'set', key: string, data: any) { |
|
||||
const t = transforms[type][key] |
|
||||
if (t) { |
|
||||
data = t(data) |
|
||||
} |
|
||||
return data |
|
||||
} |
|
||||
|
|
||||
export default { |
|
||||
// If the db doesn't exists for that key, init it, with the default value provided
|
|
||||
init: (key: DBKey, defaults: any) => { |
|
||||
const db = store(key) |
|
||||
const data = db.get('data') |
|
||||
if (!data) { |
|
||||
db.set('data', defaults) |
|
||||
} |
|
||||
}, |
|
||||
|
|
||||
// TODO flowtype this. we should be able to express all the possible entries and their expected type (with a union type)
|
|
||||
get: (key: DBKey, defaults: any): any => { |
|
||||
const db = store(key) |
|
||||
const data = db.get('data', defaults) |
|
||||
logger.onDB('read', key) |
|
||||
return middleware('get', key, data) |
|
||||
}, |
|
||||
|
|
||||
set: (key: DBKey, val: any) => { |
|
||||
const db = store(key) |
|
||||
val = middleware('set', key, val) |
|
||||
logger.onDB('write', key) |
|
||||
db.set('data', val) |
|
||||
return val |
|
||||
}, |
|
||||
|
|
||||
getIn: (key: DBKey, path: string, defaultValue: any) => { |
|
||||
const db = store(key) |
|
||||
let data = db.get('data') |
|
||||
data = middleware('get', key, data) |
|
||||
return get(data, path, defaultValue) |
|
||||
}, |
|
||||
|
|
||||
setIn: (key: DBKey, path: string, val: any) => { |
|
||||
const db = store(key) |
|
||||
const data = db.get('data') |
|
||||
val = middleware('set', key, val) |
|
||||
set(data, path, val) |
|
||||
db.set('data', data) |
|
||||
return val |
|
||||
}, |
|
||||
|
|
||||
cleanCache: () => { |
|
||||
// Only remove cache store
|
|
||||
const keys = ['countervalues'] |
|
||||
keys.forEach(k => { |
|
||||
const db = store(k) |
|
||||
logger.onDB('clear', k) |
|
||||
db.clear() |
|
||||
}) |
|
||||
}, |
|
||||
|
|
||||
resetAll: () => { |
|
||||
const keys = ['settings', 'accounts', 'countervalues'] |
|
||||
keys.forEach(k => { |
|
||||
const db = store(k) |
|
||||
logger.onDB('clear', k) |
|
||||
db.clear() |
|
||||
}) |
|
||||
}, |
|
||||
} |
|
@ -0,0 +1,226 @@ |
|||||
|
import os from 'os' |
||||
|
import path from 'path' |
||||
|
import fs from 'fs' |
||||
|
import rimrafModule from 'rimraf' |
||||
|
|
||||
|
import db from 'helpers/db' |
||||
|
import { promisify } from 'helpers/promise' |
||||
|
|
||||
|
const rimraf = promisify(rimrafModule) |
||||
|
const fsReadFile = promisify(fs.readFile) |
||||
|
const fsWriteFile = promisify(fs.writeFile) |
||||
|
const fsMkdir = promisify(fs.mkdir) |
||||
|
|
||||
|
const accountsTransform = { |
||||
|
get: accounts => accounts.map(account => ({ ...account, balance: Number(account.balance) })), |
||||
|
set: accounts => accounts.map(account => ({ ...account, balance: account.balance.toString() })), |
||||
|
} |
||||
|
|
||||
|
const fakeAccounts = [{ name: 'a', balance: 100 }, { name: 'b', balance: 200 }] |
||||
|
|
||||
|
async function createRandomTmpDir() { |
||||
|
const p = path.resolve(os.tmpdir(), `tmp-${Math.random()}`) |
||||
|
await rimraf(p) |
||||
|
await fsMkdir(p) |
||||
|
return p |
||||
|
} |
||||
|
|
||||
|
describe('db - without init', () => { |
||||
|
test('throw if trying to get key while db not initiated', async () => { |
||||
|
let err |
||||
|
try { |
||||
|
await db.getKey('app', 'accounts') |
||||
|
} catch (e) { |
||||
|
err = e |
||||
|
} |
||||
|
expect(err).toBeDefined() |
||||
|
expect(err.name).toBe('NoDBPathGiven') |
||||
|
}) |
||||
|
|
||||
|
test('handle the case where db file does not exists', async () => { |
||||
|
let err |
||||
|
try { |
||||
|
const dbPath = await createRandomTmpDir() |
||||
|
db.init(dbPath) |
||||
|
const dbContent = await db.load('app') |
||||
|
expect(dbContent).toEqual({}) |
||||
|
await rimraf(dbPath) |
||||
|
} catch (e) { |
||||
|
err = e |
||||
|
} |
||||
|
expect(err).toBeUndefined() |
||||
|
}) |
||||
|
}) |
||||
|
|
||||
|
describe('db', () => { |
||||
|
const dbPath = path.resolve(os.tmpdir(), 'ledger-live-test-db') |
||||
|
|
||||
|
beforeEach(async () => { |
||||
|
await rimraf(dbPath) |
||||
|
await fsMkdir(dbPath) |
||||
|
db.init(dbPath) |
||||
|
}) |
||||
|
|
||||
|
test('set and get key', async () => { |
||||
|
const a = await db.getKey('app', 'something') |
||||
|
expect(a).toBeUndefined() |
||||
|
await db.setKey('app', 'something', 'foo') |
||||
|
const b = await db.getKey('app', 'something') |
||||
|
expect(b).toBe('foo') |
||||
|
}) |
||||
|
|
||||
|
test('set and get key, even if nested', async () => { |
||||
|
await db.setKey('app', 'something.is.good', 'foo') |
||||
|
const a = await db.getKey('app', 'something.is.good') |
||||
|
expect(a).toBe('foo') |
||||
|
}) |
||||
|
|
||||
|
test('get the whole namespace', async () => { |
||||
|
await db.setKey('app', 'something.is.good', 'foo') |
||||
|
const a = await db.getNamespace('app') |
||||
|
expect(a).toEqual({ something: { is: { good: 'foo' } } }) |
||||
|
}) |
||||
|
|
||||
|
test('set the whole namespace', async () => { |
||||
|
await db.setNamespace('app', { foo: 'bar' }) |
||||
|
const a = await db.getNamespace('app') |
||||
|
expect(a).toEqual({ foo: 'bar' }) |
||||
|
}) |
||||
|
|
||||
|
test('handle default value if value not set', async () => { |
||||
|
const a = await db.getKey('app', 'something.is.good', 57) |
||||
|
expect(a).toBe(57) |
||||
|
}) |
||||
|
|
||||
|
test('encrypt data to filesystem', async () => { |
||||
|
const data = { this: 'is', sparta: true } |
||||
|
let content |
||||
|
let parsed |
||||
|
|
||||
|
// let's try without encrypting
|
||||
|
await db.setKey('app', 'shouldBeEncrypted', data) |
||||
|
const filePath = path.resolve(dbPath, 'app.json') |
||||
|
content = await fsReadFile(filePath, 'utf-8') |
||||
|
parsed = JSON.parse(content).data |
||||
|
expect(parsed.shouldBeEncrypted).toEqual(data) |
||||
|
|
||||
|
// mark the field as encrypted
|
||||
|
await db.setEncryptionKey('app', 'shouldBeEncrypted', 'passw0rd') |
||||
|
|
||||
|
// let's see if it worked
|
||||
|
content = await fsReadFile(filePath, 'utf-8') |
||||
|
parsed = JSON.parse(content).data |
||||
|
const expected = '+UexwDUPgM8mYaandbTUzTMdmZDe+/yd77zOLCHcIWk=' |
||||
|
expect(parsed.shouldBeEncrypted).toEqual(expected) |
||||
|
}) |
||||
|
|
||||
|
test('retrieve encrypted data, after db load', async () => { |
||||
|
const tmpDir = path.resolve(os.tmpdir(), 'with-encrypted-field') |
||||
|
await rimraf(tmpDir) |
||||
|
await fsMkdir(tmpDir) |
||||
|
const encryptedData = |
||||
|
'{"data":{ "shouldBeEncrypted": "+UexwDUPgM8mYaandbTUzTMdmZDe+/yd77zOLCHcIWk=" }}' |
||||
|
await fsWriteFile(path.resolve(tmpDir, 'app.json'), encryptedData) |
||||
|
db.init(tmpDir) |
||||
|
const encrypted = await db.getKey('app', 'shouldBeEncrypted') |
||||
|
expect(encrypted).toBe('+UexwDUPgM8mYaandbTUzTMdmZDe+/yd77zOLCHcIWk=') |
||||
|
await db.setEncryptionKey('app', 'shouldBeEncrypted', 'passw0rd') |
||||
|
const decoded = await db.getKey('app', 'shouldBeEncrypted') |
||||
|
expect(decoded).toEqual({ this: 'is', sparta: true }) |
||||
|
await rimraf(tmpDir) |
||||
|
}) |
||||
|
|
||||
|
test('handle wrong encryption key', async () => { |
||||
|
await db.setKey('app', 'foo', { some: 'data' }) |
||||
|
await db.setEncryptionKey('app', 'foo', 'passw0rd') |
||||
|
|
||||
|
db.init(dbPath) |
||||
|
|
||||
|
const d = await db.getKey('app', 'foo.some') |
||||
|
expect(d).toBe(undefined) |
||||
|
let err |
||||
|
try { |
||||
|
await db.setEncryptionKey('app', 'foo', 'totally not the passw0rd') |
||||
|
} catch (e) { |
||||
|
err = e |
||||
|
} |
||||
|
expect(err).toBeDefined() |
||||
|
expect(err.name).toBe('DBWrongPassword') |
||||
|
await db.setEncryptionKey('app', 'foo', 'passw0rd') |
||||
|
const e = await db.getKey('app', 'foo.some') |
||||
|
expect(e).toBe('data') |
||||
|
}) |
||||
|
|
||||
|
test('detect if field is encrypted or not', async () => { |
||||
|
let isEncrypted |
||||
|
await db.setKey('app', 'encryptedField', { some: 'data' }) |
||||
|
await db.setEncryptionKey('app', 'encryptedField', 'passw0rd') |
||||
|
db.init(dbPath) |
||||
|
const k = await db.getKey('app', 'encryptedField') |
||||
|
expect(k).toBe('HNEETQf+9An6saxmA/X8zg==') |
||||
|
isEncrypted = await db.isKeyEncrypted('app', 'encryptedField') |
||||
|
expect(isEncrypted).toBe(true) |
||||
|
await db.setEncryptionKey('app', 'encryptedField', 'passw0rd') |
||||
|
isEncrypted = await db.isKeyEncrypted('app', 'encryptedField') |
||||
|
expect(isEncrypted).toBe(false) |
||||
|
const value = await db.getKey('app', 'encryptedField') |
||||
|
expect(value).toEqual({ some: 'data' }) |
||||
|
}) |
||||
|
|
||||
|
test('handle transformations', async () => { |
||||
|
db.registerTransform('app', 'accounts', accountsTransform) |
||||
|
await db.setKey('app', 'accounts', fakeAccounts) |
||||
|
const filePath = path.resolve(dbPath, 'app.json') |
||||
|
const fileContent = await fsReadFile(filePath, 'utf-8') |
||||
|
|
||||
|
// expect transform to have written strings
|
||||
|
const expectedFile = |
||||
|
'{"data":{"accounts":[{"name":"a","balance":"100"},{"name":"b","balance":"200"}]}}' |
||||
|
expect(fileContent).toBe(expectedFile) |
||||
|
|
||||
|
db.init(dbPath) |
||||
|
db.registerTransform('app', 'accounts', accountsTransform) |
||||
|
|
||||
|
// expect transform to have loaded numbers
|
||||
|
const accounts = await db.getKey('app', 'accounts') |
||||
|
expect(accounts).toEqual(fakeAccounts) |
||||
|
}) |
||||
|
|
||||
|
test('can handle transform on an encrypted field', async () => { |
||||
|
let accounts |
||||
|
db.registerTransform('app', 'accounts', accountsTransform) |
||||
|
await db.setEncryptionKey('app', 'accounts', 'passw0rd') |
||||
|
await db.setKey('app', 'accounts', fakeAccounts) |
||||
|
accounts = await db.getKey('app', 'accounts') |
||||
|
expect(accounts).toEqual(fakeAccounts) |
||||
|
db.init(dbPath) |
||||
|
db.registerTransform('app', 'accounts', accountsTransform) |
||||
|
await db.setEncryptionKey('app', 'accounts', 'passw0rd') |
||||
|
accounts = await db.getKey('app', 'accounts') |
||||
|
expect(accounts).toEqual(fakeAccounts) |
||||
|
}) |
||||
|
|
||||
|
test('check if password is correct', async () => { |
||||
|
let isEncryptionKeyCorrect |
||||
|
await db.setEncryptionKey('app', 'verySecureField', 'h0dl') |
||||
|
await db.setKey('app', 'verySecureField', { much: { secure: { data: true } } }) |
||||
|
const filePath = path.resolve(dbPath, 'app.json') |
||||
|
const content = await fsReadFile(filePath, 'utf-8') |
||||
|
const expected = |
||||
|
'{"data":{"verySecureField":"i9SyvjaWm/UVpmuyeChmKjSuiWJuMxEJhhvUhvleRoe6gpAOgBWqREB+CRO6yxkD"}}' |
||||
|
expect(content).toBe(expected) |
||||
|
isEncryptionKeyCorrect = db.isEncryptionKeyCorrect('app', 'verySecureField', 'h0dl') |
||||
|
expect(isEncryptionKeyCorrect).toBe(true) |
||||
|
isEncryptionKeyCorrect = db.isEncryptionKeyCorrect('app', 'verySecureField', 'never-h0dl') |
||||
|
expect(isEncryptionKeyCorrect).toBe(false) |
||||
|
}) |
||||
|
|
||||
|
test('inform is a field has an encryption key', async () => { |
||||
|
let hasEncryptionKey |
||||
|
await db.setEncryptionKey('app', 'verySecureField', 'h0dl') |
||||
|
hasEncryptionKey = db.hasEncryptionKey('app', 'verySecureField') |
||||
|
expect(hasEncryptionKey).toBe(true) |
||||
|
hasEncryptionKey = db.hasEncryptionKey('app', 'veryInexistantField') |
||||
|
expect(hasEncryptionKey).toBe(false) |
||||
|
}) |
||||
|
}) |
@ -1,3 +1,291 @@ |
|||||
const db = process.env.STORYBOOK_ENV ? require('./db-storybook') : require('./db') |
// @flow
|
||||
|
|
||||
module.exports = db |
import fs from 'fs' |
||||
|
import path from 'path' |
||||
|
import crypto from 'crypto' |
||||
|
import cloneDeep from 'lodash/cloneDeep' |
||||
|
import writeFileAtomicModule from 'write-file-atomic' |
||||
|
import get from 'lodash/get' |
||||
|
import set from 'lodash/set' |
||||
|
|
||||
|
import logger from 'logger' |
||||
|
import { promisify } from 'helpers/promise' |
||||
|
|
||||
|
import { NoDBPathGiven, DBWrongPassword } from 'config/errors' |
||||
|
|
||||
|
type Transform = { |
||||
|
get: any => any, |
||||
|
set: any => any, |
||||
|
} |
||||
|
|
||||
|
const fsReadFile = promisify(fs.readFile) |
||||
|
const fsUnlink = promisify(fs.unlink) |
||||
|
const writeFileAtomic = promisify(writeFileAtomicModule) |
||||
|
|
||||
|
const ALGORITHM = 'aes-256-cbc' |
||||
|
|
||||
|
let queue = Promise.resolve() |
||||
|
|
||||
|
let DBPath = null |
||||
|
let memoryNamespaces = {} |
||||
|
let encryptionKeys = {} |
||||
|
let transforms = {} |
||||
|
|
||||
|
/** |
||||
|
* Reset memory state, db path, encryption keys, transforms.. |
||||
|
*/ |
||||
|
function init(_DBPath: string) { |
||||
|
DBPath = _DBPath |
||||
|
memoryNamespaces = {} |
||||
|
encryptionKeys = {} |
||||
|
transforms = {} |
||||
|
} |
||||
|
|
||||
|
/** |
||||
|
* Register a transformation for a given namespace and keyPath |
||||
|
* it will be used when reading/writing from/to file |
||||
|
*/ |
||||
|
function registerTransform(ns: string, keyPath: string, transform: Transform) { |
||||
|
if (!transforms[ns]) transforms[ns] = {} |
||||
|
transforms[ns][keyPath] = transform |
||||
|
} |
||||
|
|
||||
|
/** |
||||
|
* Load a namespace, using <file>.json |
||||
|
*/ |
||||
|
async function load(ns: string): Promise<mixed> { |
||||
|
try { |
||||
|
if (!DBPath) throw new NoDBPathGiven() |
||||
|
const filePath = path.resolve(DBPath, `${ns}.json`) |
||||
|
const fileContent = await fsReadFile(filePath) |
||||
|
const { data } = JSON.parse(fileContent) |
||||
|
memoryNamespaces[ns] = data |
||||
|
|
||||
|
// transform fields
|
||||
|
for (const keyPath in transforms[ns]) { |
||||
|
if (transforms[ns].hasOwnProperty(keyPath)) { |
||||
|
const transform = transforms[ns][keyPath] |
||||
|
const val = get(memoryNamespaces[ns], keyPath) |
||||
|
|
||||
|
// if value is string, it's encrypted, so we don't want to transform
|
||||
|
if (typeof val === 'string') continue // eslint-disable-line no-continue
|
||||
|
|
||||
|
set(memoryNamespaces[ns], keyPath, transform.get(val)) |
||||
|
} |
||||
|
} |
||||
|
} catch (err) { |
||||
|
if (err.code === 'ENOENT') { |
||||
|
memoryNamespaces[ns] = {} |
||||
|
await save(ns) |
||||
|
} else { |
||||
|
logger.error(err) |
||||
|
throw err |
||||
|
} |
||||
|
} |
||||
|
return memoryNamespaces[ns] |
||||
|
} |
||||
|
|
||||
|
async function ensureNSLoaded(ns: string) { |
||||
|
if (!memoryNamespaces[ns]) { |
||||
|
await load(ns) |
||||
|
} |
||||
|
} |
||||
|
|
||||
|
/** |
||||
|
* Register a keyPath in db that is encrypted |
||||
|
* This will decrypt the keyPath at this moment, and will be used |
||||
|
* in `save` to encrypt it back |
||||
|
*/ |
||||
|
async function setEncryptionKey(ns: string, keyPath: string, encryptionKey: string): Promise<any> { |
||||
|
if (!encryptionKeys[ns]) encryptionKeys[ns] = {} |
||||
|
encryptionKeys[ns][keyPath] = encryptionKey |
||||
|
const val = await getKey(ns, keyPath, null) |
||||
|
|
||||
|
// no need to decode if already decoded
|
||||
|
if (!val || typeof val !== 'string') { |
||||
|
return save(ns) |
||||
|
} |
||||
|
|
||||
|
try { |
||||
|
const decipher = crypto.createDecipher(ALGORITHM, encryptionKey) |
||||
|
const raw = decipher.update(val, 'base64', 'utf8') + decipher.final('utf8') |
||||
|
let decrypted = JSON.parse(raw) |
||||
|
|
||||
|
// handle the case when we just migrated from the previous storage
|
||||
|
// which stored the data in binary with a `data` key
|
||||
|
if (ns === 'app' && keyPath === 'accounts' && decrypted.data) { |
||||
|
decrypted = decrypted.data |
||||
|
} |
||||
|
|
||||
|
// apply transform if needed
|
||||
|
const transform = get(transforms, `${ns}.${keyPath}`) |
||||
|
if (transform) { |
||||
|
decrypted = transform.get(decrypted) |
||||
|
} |
||||
|
|
||||
|
// only set decrypted data in memory
|
||||
|
set(memoryNamespaces[ns], keyPath, decrypted) |
||||
|
|
||||
|
return save(ns) |
||||
|
} catch (err) { |
||||
|
throw new DBWrongPassword() |
||||
|
} |
||||
|
} |
||||
|
|
||||
|
async function removeEncryptionKey(ns: string, keyPath: string) { |
||||
|
set(encryptionKeys, `${ns}.${keyPath}`, undefined) |
||||
|
return save(ns) |
||||
|
} |
||||
|
|
||||
|
/** |
||||
|
* Set a key in the given namespace |
||||
|
*/ |
||||
|
async function setKey(ns: string, keyPath: string, value: any): Promise<any> { |
||||
|
logger.onDB('write', `${ns}:${keyPath}`) |
||||
|
await ensureNSLoaded(ns) |
||||
|
set(memoryNamespaces[ns], keyPath, value) |
||||
|
return save(ns) |
||||
|
} |
||||
|
|
||||
|
/** |
||||
|
* Get a key in the given namespace |
||||
|
*/ |
||||
|
async function getKey(ns: string, keyPath: string, defaultValue?: any): Promise<any> { |
||||
|
logger.onDB('read', `${ns}:${keyPath}`) |
||||
|
await ensureNSLoaded(ns) |
||||
|
if (!keyPath) return memoryNamespaces[ns] || defaultValue |
||||
|
return get(memoryNamespaces[ns], keyPath, defaultValue) |
||||
|
} |
||||
|
|
||||
|
/** |
||||
|
* Get whole namespace |
||||
|
*/ |
||||
|
async function getNamespace(ns: string, defaultValue?: any) { |
||||
|
logger.onDB('read', ns) |
||||
|
await ensureNSLoaded(ns) |
||||
|
return memoryNamespaces[ns] || defaultValue |
||||
|
} |
||||
|
|
||||
|
async function setNamespace(ns: string, value: any) { |
||||
|
logger.onDB('write', ns) |
||||
|
set(memoryNamespaces, ns, value) |
||||
|
return save(ns) |
||||
|
} |
||||
|
|
||||
|
/** |
||||
|
* Check if a key is encrypted |
||||
|
* |
||||
|
* /!\ it consider encrypted if it's string and can't JSON.parse, so |
||||
|
* can brings false-positive if bad used |
||||
|
*/ |
||||
|
async function isKeyEncrypted(ns: string, keyPath: string): Promise<boolean> { |
||||
|
const v = await getKey(ns, keyPath) |
||||
|
if (typeof v !== 'string') return false |
||||
|
try { |
||||
|
JSON.parse(v) |
||||
|
return false |
||||
|
} catch (err) { |
||||
|
return true |
||||
|
} |
||||
|
} |
||||
|
|
||||
|
/** |
||||
|
* Save given namespace to corresponding file, in atomic way |
||||
|
*/ |
||||
|
async function saveToDisk(ns: string) { |
||||
|
if (!DBPath) throw new NoDBPathGiven() |
||||
|
await ensureNSLoaded(ns) |
||||
|
|
||||
|
// cloning because we are mutating the obj
|
||||
|
const clone = cloneDeep(memoryNamespaces[ns]) |
||||
|
|
||||
|
// transform fields
|
||||
|
if (transforms[ns]) { |
||||
|
for (const keyPath in transforms[ns]) { |
||||
|
if (transforms[ns].hasOwnProperty(keyPath)) { |
||||
|
const transform = transforms[ns][keyPath] |
||||
|
const val = get(clone, keyPath) |
||||
|
// we don't want to transform encrypted fields (that have not being decrypted yet)
|
||||
|
if (!val || typeof val === 'string') continue // eslint-disable-line no-continue
|
||||
|
set(clone, keyPath, transform.set(val)) |
||||
|
} |
||||
|
} |
||||
|
} |
||||
|
|
||||
|
// encrypt fields
|
||||
|
if (encryptionKeys[ns]) { |
||||
|
for (const keyPath in encryptionKeys[ns]) { |
||||
|
if (encryptionKeys[ns].hasOwnProperty(keyPath)) { |
||||
|
const encryptionKey = encryptionKeys[ns][keyPath] |
||||
|
if (!encryptionKey) continue // eslint-disable-line no-continue
|
||||
|
const val = get(clone, keyPath) |
||||
|
if (!val) continue // eslint-disable-line no-continue
|
||||
|
const cipher = crypto.createCipher(ALGORITHM, encryptionKey) |
||||
|
const encrypted = |
||||
|
cipher.update(JSON.stringify(val), 'utf8', 'base64') + cipher.final('base64') |
||||
|
set(clone, keyPath, encrypted) |
||||
|
} |
||||
|
} |
||||
|
} |
||||
|
|
||||
|
const fileContent = JSON.stringify({ data: clone }) |
||||
|
await writeFileAtomic(path.resolve(DBPath, `${ns}.json`), fileContent) |
||||
|
} |
||||
|
|
||||
|
function save(ns: string) { |
||||
|
queue = queue.then(() => saveToDisk(ns)) |
||||
|
return queue |
||||
|
} |
||||
|
|
||||
|
async function cleanCache() { |
||||
|
logger.onDB('clean cache') |
||||
|
await setKey('app', 'countervalues', null) |
||||
|
await save('app') |
||||
|
} |
||||
|
|
||||
|
async function resetAll() { |
||||
|
logger.onDB('reset all') |
||||
|
if (!DBPath) throw new NoDBPathGiven() |
||||
|
memoryNamespaces.app = null |
||||
|
await fsUnlink(path.resolve(DBPath, 'app.json')) |
||||
|
} |
||||
|
|
||||
|
function isEncryptionKeyCorrect(ns: string, keyPath: string, encryptionKey: string) { |
||||
|
try { |
||||
|
return encryptionKeys[ns][keyPath] === encryptionKey |
||||
|
} catch (err) { |
||||
|
return false |
||||
|
} |
||||
|
} |
||||
|
|
||||
|
function hasEncryptionKey(ns: string, keyPath: string) { |
||||
|
try { |
||||
|
return !!encryptionKeys[ns][keyPath] |
||||
|
} catch (err) { |
||||
|
return false |
||||
|
} |
||||
|
} |
||||
|
|
||||
|
function getDBPath() { |
||||
|
if (!DBPath) throw new Error('Trying to get db path but it is not initialized') |
||||
|
return DBPath |
||||
|
} |
||||
|
|
||||
|
export default { |
||||
|
init, |
||||
|
load, |
||||
|
registerTransform, |
||||
|
setEncryptionKey, |
||||
|
removeEncryptionKey, |
||||
|
isEncryptionKeyCorrect, |
||||
|
hasEncryptionKey, |
||||
|
setKey, |
||||
|
getKey, |
||||
|
getNamespace, |
||||
|
setNamespace, |
||||
|
isKeyEncrypted, |
||||
|
save, |
||||
|
cleanCache, |
||||
|
resetAll, |
||||
|
getDBPath, |
||||
|
} |
||||
|
@ -1,3 +1,6 @@ |
|||||
const logger = process.env.STORYBOOK_ENV ? require('./logger-storybook') : require('./logger') |
const logger = |
||||
|
process.env.STORYBOOK_ENV || process.env.NODE_ENV === 'test' |
||||
|
? require('./logger-storybook') |
||||
|
: require('./logger') |
||||
|
|
||||
module.exports = logger |
module.exports = logger |
||||
|
@ -0,0 +1,53 @@ |
|||||
|
// @flow
|
||||
|
|
||||
|
import fs from 'fs' |
||||
|
import path from 'path' |
||||
|
|
||||
|
import { promisify } from 'helpers/promise' |
||||
|
import db from 'helpers/db' |
||||
|
|
||||
|
import type { Migration } from './types' |
||||
|
|
||||
|
const fsReadfile = promisify(fs.readFile) |
||||
|
const fsUnlink = promisify(fs.unlink) |
||||
|
|
||||
|
const migrations: Migration[] = [ |
||||
|
{ |
||||
|
doc: 'merging multiple db files into one app file', |
||||
|
run: async () => { |
||||
|
const dbPath = db.getDBPath() |
||||
|
const legacyKeys = ['accounts', 'countervalues', 'settings', 'user'] |
||||
|
const [accounts, countervalues, settings, user] = await Promise.all( |
||||
|
legacyKeys.map(key => getLegacyData(path.join(dbPath, `${key}.json`))), |
||||
|
) |
||||
|
const appData = { user, settings, accounts, countervalues } |
||||
|
await db.setNamespace('app', appData) |
||||
|
const hasPassword = await db.getKey('app', 'settings.password.isEnabled', false) |
||||
|
await db.setKey('app', 'settings.hasPassword', hasPassword) |
||||
|
await db.setKey('app', 'settings.password', undefined) |
||||
|
const windowParams = await db.getKey('app', 'settings.window') |
||||
|
await db.setKey('app', 'settings.window', undefined) |
||||
|
await db.setNamespace('windowParams', windowParams) |
||||
|
await Promise.all(legacyKeys.map(key => fsUnlink(path.join(dbPath, `${key}.json`)))) |
||||
|
}, |
||||
|
}, |
||||
|
] |
||||
|
|
||||
|
async function getLegacyData(filePath) { |
||||
|
let finalData |
||||
|
const fileContent = await fsReadfile(filePath, 'utf-8') |
||||
|
try { |
||||
|
const { data } = JSON.parse(fileContent) |
||||
|
finalData = data |
||||
|
} catch (err) { |
||||
|
// we assume we are in that case because file is encrypted
|
||||
|
if (err instanceof SyntaxError) { |
||||
|
const buf = await fsReadfile(filePath) |
||||
|
return buf.toString('base64') |
||||
|
} |
||||
|
throw err |
||||
|
} |
||||
|
return finalData |
||||
|
} |
||||
|
|
||||
|
export default migrations |
@ -0,0 +1,96 @@ |
|||||
|
import fs from 'fs' |
||||
|
import os from 'os' |
||||
|
import path from 'path' |
||||
|
import { spawn } from 'child_process' |
||||
|
import rimrafModule from 'rimraf' |
||||
|
import { BigNumber } from 'bignumber.js' |
||||
|
|
||||
|
import { promisify } from 'helpers/promise' |
||||
|
import { runMigrations } from 'migrations' |
||||
|
import { decodeAccountsModel, encodeAccountsModel } from 'reducers/accounts' |
||||
|
import db from 'helpers/db' |
||||
|
|
||||
|
const rimraf = promisify(rimrafModule) |
||||
|
const fsReaddir = promisify(fs.readdir) |
||||
|
|
||||
|
const tmpDir = os.tmpdir() |
||||
|
|
||||
|
const accountsTransform = { |
||||
|
get: decodeAccountsModel, |
||||
|
set: encodeAccountsModel, |
||||
|
} |
||||
|
|
||||
|
describe('migration 1', () => { |
||||
|
describe('without encryption', () => { |
||||
|
test('merging db files', async () => { |
||||
|
const dir = await extractMock('userdata_v1.0.5_mock-01') |
||||
|
let files |
||||
|
db.init(dir) |
||||
|
files = await fsReaddir(dir) |
||||
|
expect(files).toEqual([ |
||||
|
'accounts.json', |
||||
|
'countervalues.json', |
||||
|
'migrations.json', |
||||
|
'settings.json', |
||||
|
'user.json', |
||||
|
]) |
||||
|
await runMigrations() |
||||
|
files = await fsReaddir(dir) |
||||
|
expect(files).toEqual(['app.json', 'migrations.json', 'windowParams.json']) |
||||
|
db.init(dir) |
||||
|
db.registerTransform('app', 'accounts', accountsTransform) |
||||
|
const accounts = await db.getKey('app', 'accounts') |
||||
|
expect(accounts.length).toBe(3) |
||||
|
expect(accounts[0].balance).toBeInstanceOf(BigNumber) |
||||
|
const windowParams = await db.getNamespace('windowParams') |
||||
|
expect(windowParams).toEqual({ |
||||
|
MainWindow: { |
||||
|
positions: { x: 37, y: 37 }, |
||||
|
dimensions: { width: 1526, height: 826 }, |
||||
|
}, |
||||
|
}) |
||||
|
}) |
||||
|
}) |
||||
|
|
||||
|
describe('with encryption', () => { |
||||
|
test('merging db files', async () => { |
||||
|
const dir = await extractMock('userdata_v1.0.5_mock-02-encrypted-accounts') |
||||
|
db.init(dir) |
||||
|
db.registerTransform('app', 'accounts', accountsTransform) |
||||
|
await runMigrations() |
||||
|
await db.setEncryptionKey('app', 'accounts', 'passw0rd') |
||||
|
const files = await fsReaddir(dir) |
||||
|
expect(files).toEqual(['app.json', 'migrations.json', 'windowParams.json']) |
||||
|
const accounts = await db.getKey('app', 'accounts') |
||||
|
expect(accounts.length).toBe(6) |
||||
|
expect(accounts[0].balance).toBeInstanceOf(BigNumber) |
||||
|
}) |
||||
|
|
||||
|
test('migrate password setting', async () => { |
||||
|
const dir = await extractMock('userdata_v1.0.5_mock-02-encrypted-accounts') |
||||
|
db.init(dir) |
||||
|
db.registerTransform('app', 'accounts', accountsTransform) |
||||
|
await runMigrations() |
||||
|
const legacyPasswordSettings = await db.getKey('app', 'settings.password') |
||||
|
expect(legacyPasswordSettings).toBeUndefined() |
||||
|
const hasPassword = await db.getKey('app', 'settings.hasPassword') |
||||
|
expect(hasPassword).toBe(true) |
||||
|
}) |
||||
|
}) |
||||
|
}) |
||||
|
|
||||
|
async function extractMock(mockName) { |
||||
|
const destDirectory = path.resolve(tmpDir, mockName) |
||||
|
const zipFilePath = path.resolve(__dirname, 'mocks', `${mockName}.zip`) |
||||
|
await rimraf(destDirectory) |
||||
|
await extractZip(zipFilePath, destDirectory) |
||||
|
return destDirectory |
||||
|
} |
||||
|
|
||||
|
function extractZip(zipFilePath, destDirectory) { |
||||
|
return new Promise((resolve, reject) => { |
||||
|
const childProcess = spawn('unzip', [zipFilePath, '-d', destDirectory]) |
||||
|
childProcess.on('close', resolve) |
||||
|
childProcess.on('error', reject) |
||||
|
}) |
||||
|
} |
Binary file not shown.
Binary file not shown.
Loading…
Reference in new issue