mirror of https://github.com/lukechilds/polar.git
Browse Source
feat: add import/export network functionality Modularize import/export network and zip/unzip functionality. We also add some tests.master
committed by
jamaljsr
22 changed files with 549 additions and 310 deletions
@ -0,0 +1 @@ |
|||
bar |
@ -0,0 +1 @@ |
|||
console.log('qux'); |
@ -0,0 +1,3 @@ |
|||
{ |
|||
"foo": 2 |
|||
} |
Binary file not shown.
Binary file not shown.
@ -0,0 +1,106 @@ |
|||
import { promises as fs } from 'fs'; |
|||
import { join } from 'path'; |
|||
import { tmpdir } from 'os'; |
|||
import { unzip, zip } from './zip'; |
|||
|
|||
jest.mock('fs-extra', () => jest.requireActual('fs-extra')); |
|||
|
|||
describe('unzip', () => { |
|||
it('unzips test.zip', async () => { |
|||
const destination = join(tmpdir(), 'zip-test-' + Date.now()); |
|||
await unzip(join(__dirname, 'tests', 'resources', 'test.zip'), destination); |
|||
|
|||
const entries = await fs.readdir(destination, { withFileTypes: true }); |
|||
expect(entries.map(e => e.name)).toContain('foo.json'); |
|||
expect(entries.map(e => e.name)).toContain('bar.txt'); |
|||
expect(entries.map(e => e.name)).toContain('baz'); |
|||
|
|||
const fooFile = entries.find(e => e.name === 'foo.json'); |
|||
const barFile = entries.find(e => e.name === 'bar.txt'); |
|||
const bazDir = entries.find(e => e.name === 'baz'); |
|||
|
|||
expect(fooFile).toBeDefined(); |
|||
expect(barFile).toBeDefined(); |
|||
expect(bazDir).toBeDefined(); |
|||
|
|||
expect(fooFile?.isFile()).toBeTruthy(); |
|||
expect(barFile?.isFile()).toBeTruthy(); |
|||
expect(bazDir?.isDirectory()).toBeTruthy(); |
|||
|
|||
// eslint-disable-next-line @typescript-eslint/no-non-null-assertion
|
|||
const bazEntries = await fs.readdir(join(destination, bazDir!.name), { |
|||
withFileTypes: true, |
|||
}); |
|||
|
|||
expect(bazEntries).toHaveLength(1); |
|||
expect(bazEntries.map(e => e.name)).toContain('qux.ts'); |
|||
|
|||
const qux = await fs.readFile(join(destination, 'baz', 'qux.ts')); |
|||
expect(qux.toString('utf-8')).toBe("console.log('qux');\n"); |
|||
|
|||
const bar = await fs.readFile(join(destination, 'bar.txt')); |
|||
expect(bar.toString('utf-8')).toBe('bar\n'); |
|||
|
|||
const foo = await fs.readFile(join(destination, 'foo.json')); |
|||
expect(foo.toString('utf-8')).toBe(JSON.stringify({ foo: 2 }, null, 4) + '\n'); |
|||
}); |
|||
}); |
|||
|
|||
describe.only('zip', () => { |
|||
it('zips objects', async () => { |
|||
const objects: Array<{ name: string; object: any }> = [ |
|||
{ |
|||
name: 'firstObject', |
|||
object: 2, |
|||
}, |
|||
{ |
|||
name: 'secondObject', |
|||
object: { baz: 'baz' }, |
|||
}, |
|||
{ |
|||
name: 'thirdObject', |
|||
object: [2, { foo: 'foo' }, false], |
|||
}, |
|||
]; |
|||
|
|||
const zipped = join(tmpdir(), `zip-test-${Date.now()}.zip`); |
|||
await zip({ |
|||
destination: zipped, |
|||
objects, |
|||
paths: [], |
|||
}); |
|||
|
|||
const unzipped = join(tmpdir(), `zip-test-${Date.now()}`); |
|||
await unzip(zipped, unzipped); |
|||
|
|||
for (const obj of objects) { |
|||
const read = await fs |
|||
.readFile(join(unzipped, obj.name)) |
|||
.then(read => JSON.parse(read.toString('utf-8'))); |
|||
expect(read).toEqual(obj.object); |
|||
} |
|||
}); |
|||
|
|||
it('zips paths', async () => { |
|||
const files = [ |
|||
join(__dirname, 'tests', 'resources', 'bar.txt'), |
|||
join(__dirname, 'tests', 'resources', 'foo.json'), |
|||
join(__dirname, 'tests', 'resources', 'baz'), |
|||
]; |
|||
const zipped = join(tmpdir(), `zip-test-${Date.now()}.zip`); |
|||
await zip({ destination: zipped, objects: [], paths: files }); |
|||
|
|||
const unzipped = join(tmpdir(), `zip-test-${Date.now()}`); |
|||
await unzip(zipped, unzipped); |
|||
|
|||
const entries = await fs.readdir(unzipped, { withFileTypes: true }); |
|||
|
|||
const bar = entries.find(e => e.name === 'bar.txt'); |
|||
const baz = entries.find(e => e.name === 'baz'); |
|||
const foo = entries.find(e => e.name === 'foo.json'); |
|||
|
|||
expect(bar?.isFile()).toBeTruthy(); |
|||
expect(baz?.isDirectory()).toBeTruthy(); |
|||
expect(foo?.isFile()).toBeTruthy(); |
|||
}); |
|||
}); |
@ -0,0 +1,157 @@ |
|||
import { error, info, warn } from 'electron-log'; |
|||
import fs from 'fs'; |
|||
import { pathExists } from 'fs-extra'; |
|||
import { basename, join, resolve } from 'path'; |
|||
import archiver from 'archiver'; |
|||
import unzipper from 'unzipper'; |
|||
|
|||
/** |
|||
* Unzips `zip` into `destination` |
|||
*/ |
|||
export const unzip = (zip: string, destination: string): Promise<void> => { |
|||
return new Promise(async (resolve, reject) => { |
|||
try { |
|||
const exists = await pathExists(zip); |
|||
if (!exists) { |
|||
throw Error(`${zip} does not exist!`); |
|||
} |
|||
const stream = fs |
|||
.createReadStream(zip) |
|||
.pipe(unzipper.Extract({ path: destination })); |
|||
|
|||
stream.on('close', resolve); |
|||
stream.on('error', err => { |
|||
error(`Could not unzip ${zip} into ${destination}:`, err); |
|||
reject(err); |
|||
}); |
|||
} catch (err) { |
|||
reject(err); |
|||
} |
|||
}); |
|||
}; |
|||
|
|||
interface ZipArgs { |
|||
/** The destination of the generated zip */ |
|||
destination: string; |
|||
objects: Array<{ |
|||
/** Object to serialize (with `JSON.stringify`) and store in the zip */ |
|||
object: any; |
|||
/** Name of this object in the generated zip */ |
|||
name: string; |
|||
}>; |
|||
/** Files or folders to include */ |
|||
paths: string[]; |
|||
} |
|||
|
|||
/** |
|||
* Adds a raw string into the ZIP archive |
|||
* |
|||
* @param archive ZIP archive to add the file to |
|||
* @param content content to add into archive |
|||
* @param nameInArchive name of file in archive |
|||
*/ |
|||
const addStringToZip = ( |
|||
archive: archiver.Archiver, |
|||
content: string, |
|||
nameInArchive: string, |
|||
): void => { |
|||
try { |
|||
archive.append(content, { name: nameInArchive }); |
|||
} catch (err) { |
|||
error(`Could not add ${nameInArchive} to zip: ${err}`); |
|||
throw err; |
|||
} |
|||
}; |
|||
|
|||
/** |
|||
* Adds a file to the given ZIP archive. We read the file into |
|||
* memory and then append it to the ZIP archive. There appears |
|||
* to be issues with using Archiver.js with Electron/Webpack, |
|||
* so that's why we have to do it in a somewhat inefficient way. |
|||
* |
|||
* Related issues: |
|||
* * https://github.com/archiverjs/node-archiver/issues/349
|
|||
* * https://github.com/archiverjs/node-archiver/issues/403
|
|||
* * https://github.com/archiverjs/node-archiver/issues/174
|
|||
* |
|||
* @param archive ZIP archive to add the file to |
|||
* @param filePath file to add, absolute path |
|||
* @param nameInArchive name of file in archive |
|||
*/ |
|||
const addFileToZip = async ( |
|||
archive: archiver.Archiver, |
|||
filePath: string, |
|||
nameInArchive: string, |
|||
) => { |
|||
return archive.append(await fs.promises.readFile(filePath), { name: nameInArchive }); |
|||
}; |
|||
|
|||
// Generate a sequence of all regular files inside the given directory
|
|||
async function* getFiles(dir: string): AsyncGenerator<string> { |
|||
const entries = await fs.promises.readdir(dir, { withFileTypes: true }); |
|||
for (const entry of entries) { |
|||
const res = resolve(dir, entry.name); |
|||
if (entry.isDirectory()) { |
|||
yield* getFiles(res); |
|||
} else if (entry.isFile()) { |
|||
yield res; |
|||
} |
|||
} |
|||
} |
|||
|
|||
/** |
|||
* Add the given path to the archive. If it's a file we add it directly, it it is a directory |
|||
* we recurse over all the files within that directory |
|||
* |
|||
* @param archive ZIP archive to add the file to |
|||
* @param filePath file to add, absolute path |
|||
*/ |
|||
const addFileOrDirectoryToZip = async (archive: archiver.Archiver, filePath: string) => { |
|||
const isDir = await fs.promises.lstat(filePath).then(res => res.isDirectory()); |
|||
if (isDir) { |
|||
info('Adding directory to zip file:', filePath); |
|||
for await (const file of getFiles(filePath)) { |
|||
// a typical file might look like this:
|
|||
// /home/user/.polar/networks/1/volumes/bitcoind/backend1/regtest/mempool.dat
|
|||
// after applying this transformation, we end up with:
|
|||
// volumes/bitcoind/backend1/regtest/mempool.dat
|
|||
const nameInArchive = join(basename(filePath), file.slice(filePath.length)); |
|||
await addFileToZip(archive, file, nameInArchive); |
|||
} |
|||
} else { |
|||
return addFileToZip(archive, filePath, basename(filePath)); |
|||
} |
|||
}; |
|||
|
|||
export const zip = ({ destination, objects, paths }: ZipArgs): Promise<void> => |
|||
new Promise(async (resolve, reject) => { |
|||
const output = fs.createWriteStream(destination); |
|||
const archive = archiver('zip'); |
|||
|
|||
// finished
|
|||
archive.on('finish', () => resolve()); |
|||
archive.on('error', err => { |
|||
error(`got error when zipping ${destination}:`, err); |
|||
reject(err); |
|||
}); |
|||
|
|||
archive.on('warning', warning => { |
|||
warn(`got warning when zipping ${destination}:`, warning); |
|||
reject(warning); |
|||
}); |
|||
|
|||
// pipe all zipped data to the output
|
|||
archive.pipe(output); |
|||
|
|||
const pathPromises = paths.map(p => addFileOrDirectoryToZip(archive, p)); |
|||
|
|||
for (const obj of objects) { |
|||
addStringToZip(archive, JSON.stringify(obj.object), obj.name); |
|||
} |
|||
|
|||
await Promise.all(pathPromises); |
|||
|
|||
// we've added all files, tell this to the archive so it can emit the 'close' event
|
|||
// once all streams have finished
|
|||
archive.finalize(); |
|||
}); |
Loading…
Reference in new issue