Browse Source

Continue addressing code review and feedback

master
Torkel Rogstad 5 years ago
committed by jamaljsr
parent
commit
82bc894694
  1. 3
      .eslintignore
  2. 13
      .vscode/settings.json
  3. 22
      src/__mocks__/archiver.js
  4. 5
      src/__mocks__/fs-extra.js
  5. 4
      src/components/network/ImportNetwork.spec.tsx
  6. 33
      src/components/network/ImportNetwork.tsx
  7. 2
      src/components/network/NetworkView.spec.tsx
  8. 4
      src/components/routing/Routes.spec.tsx
  9. 6
      src/i18n/locales/en-US.json
  10. 14
      src/store/models/network.spec.ts
  11. 14
      src/store/models/network.ts
  12. 1
      src/utils/constants.ts
  13. 108
      src/utils/network.ts
  14. 1
      src/utils/tests/resources/bar.txt
  15. 1
      src/utils/tests/resources/baz/qux.ts
  16. 3
      src/utils/tests/resources/foo.json
  17. BIN
      src/utils/tests/resources/test.zip
  18. BIN
      src/utils/tests/resources/zipped-network.zip
  19. 174
      src/utils/zip.spec.ts
  20. 71
      src/utils/zip.ts
  21. 3
      tsconfig.json

3
.eslintignore

@ -7,6 +7,3 @@
# compiled by tsc from /src/electron/
/public
# rest resources
/src/utils/resources

13
.vscode/settings.json

@ -67,5 +67,16 @@
],
"editor.codeActionsOnSave": {
"source.fixAll.eslint": true
}
},
"cSpell.words": [
"Testcafe",
"antd",
"bitcoind",
"clightning",
"cmps",
"logobw",
"mrblenny",
"unzipper",
"uploader"
]
}

22
src/__mocks__/archiver.js

@ -0,0 +1,22 @@
const createMockArchiver = () => {
// TODO: this code should live in __mocks__/archiver.js
// eslint-disable-next-line @typescript-eslint/no-var-requires
const { PassThrough } = require('stream');
let mockStream;
// return a fake stream when "archiver()" is called in the app
const ctor = function() {
mockStream = new PassThrough();
mockStream.file = jest.fn();
mockStream.directory = jest.fn();
mockStream.append = jest.fn();
mockStream.finalize = jest.fn();
return mockStream;
};
// attach a func to emit events on the stream from the tests
ctor.mockEmit = (event, data) => mockStream.emit(event, data);
return ctor;
};
export default createMockArchiver();

5
src/__mocks__/fs-extra.js

@ -1,8 +1,11 @@
module.exports = {
outputFile: jest.fn(),
writeFile: jest.fn(),
pathExists: jest.fn(),
readFile: jest.fn(),
remove: jest.fn(),
ensureDir: jest.fn(),
copyFile: jest.fn(),
copy: jest.fn(),
createWriteStream: jest.fn(),
createReadStream: jest.fn(),
};

4
src/components/network/ImportNetwork.spec.tsx

@ -21,7 +21,9 @@ describe('ImportNetwork component', () => {
it('has a file uploader', async () => {
const { getByText } = renderComponent();
expect(
getByText('Click or drag ZIP file to this area to import'),
getByText(
'Drag a zip file exported from Polar here, or click to browse for the file',
),
).toBeInTheDocument();
});

33
src/components/network/ImportNetwork.tsx

@ -1,4 +1,5 @@
import React, { useState } from 'react';
import React from 'react';
import { useAsyncCallback } from 'react-async-hook';
import { RouteComponentProps } from 'react-router';
import { UploadOutlined } from '@ant-design/icons';
import styled from '@emotion/styled';
@ -35,27 +36,17 @@ const ImportNetwork: React.FC<RouteComponentProps> = () => {
const { navigateTo, notify } = useStoreActions(s => s.app);
const { importNetwork } = useStoreActions(s => s.network);
const { l } = usePrefixedTranslation('cmps.network.ImportNetwork');
const [importing, setImporting] = useState(false);
const doImportNetwork = (file: RcFile) => {
setImporting(true);
// we kick off the import promise, but don't wait for it
importNetwork(file.path)
.then(network => {
const doImportNetwork = useAsyncCallback(async (file: RcFile) => {
try {
const network = await importNetwork(file.path);
notify({ message: l('importSuccess', { name: network.name }) });
navigateTo(HOME);
})
.catch(error => {
} catch (error) {
notify({ message: l('importError', { file: file.name }), error });
})
.then(() => {
setImporting(false);
});
}
// return false to prevent the Upload.Dragger from sending the file somewhere
return false;
};
return;
});
const theme = useTheme();
return (
@ -70,10 +61,10 @@ const ImportNetwork: React.FC<RouteComponentProps> = () => {
// to not display a file in the upload dragger after the user has selected a zip
fileList={undefined}
accept=".zip"
disabled={importing}
beforeUpload={doImportNetwork}
disabled={doImportNetwork.loading}
beforeUpload={doImportNetwork.execute}
>
{importing ? (
{doImportNetwork.loading ? (
<>
<Spin size="large" />
<p>{l('importText')}</p>

2
src/components/network/NetworkView.spec.tsx

@ -1,7 +1,7 @@
import React from 'react';
import electron from 'electron';
import fsExtra from 'fs-extra';
import { fireEvent, wait, waitForElement } from '@testing-library/dom';
import { fireEvent, getByText, wait, waitForElement } from '@testing-library/dom';
import { act } from '@testing-library/react';
import { createMemoryHistory } from 'history';
import { Status } from 'shared/types';

4
src/components/routing/Routes.spec.tsx

@ -1,6 +1,6 @@
import React from 'react';
import { renderWithProviders } from 'utils/tests';
import { HOME, NETWORK_NEW, Routes, NETWORK_IMPORT } from 'components/routing';
import { HOME, NETWORK_IMPORT, NETWORK_NEW, Routes } from 'components/routing';
describe('App container', () => {
const renderComponent = (route: string) => {
@ -19,6 +19,6 @@ describe('App container', () => {
it('should render the import network page', () => {
const { getByText } = renderComponent(NETWORK_IMPORT);
expect(getByText('Import a pre-defined Lightning Network')).toBeInTheDocument();
expect(getByText('Import a Lightning Network')).toBeInTheDocument();
});
});

6
src/i18n/locales/en-US.json

@ -260,8 +260,8 @@
"cmps.network.NetworkView.exportSuccess": "Exported '{{name}}'. Saved the zip file to {{destination}}",
"cmps.network.NetworkView.notReadyToExport": "Cannot export a running network",
"cmps.network.NetworkView.notReadyToExportDescription": "Make sure the network is completely stopped before exporting it.",
"cmps.network.ImportNetwork.title": "Import a pre-defined Lightning Network",
"cmps.network.ImportNetwork.fileDraggerArea": "Click or drag ZIP file to this area to import",
"cmps.network.ImportNetwork.title": "Import a Lightning Network",
"cmps.network.ImportNetwork.fileDraggerArea": "Drag a zip file exported from Polar here, or click to browse for the file",
"cmps.network.ImportNetwork.importText": "Importing...",
"cmps.network.ImportNetwork.importSuccess": "Imported network '{{name}}' successfully",
"cmps.network.ImportNetwork.importError": "Could not import '{{file}}'",
@ -349,5 +349,5 @@
"store.models.network.removeLastErr": "Cannot remove the only bitcoin node",
"store.models.network.removeCompatErr": "There are no other compatible backends for {{lnName}} to connect to. You must remove the {{lnName}} node first",
"utils.network.backendCompatError": "This network does not contain a Bitcoin Core v{{requiredVersion}} (or lower) node which is required for {{implementation}} v{{version}}",
"utils.network.importClightningWindows": "Importing networks with c-lightning nodes is not supported on Windows"
"utils.network.incompatibleImplementation": "Importing networks with {{implementation}} nodes is not supported on {{platform}}"
}

14
src/store/models/network.spec.ts

@ -28,7 +28,6 @@ jest.mock('utils/files', () => ({
jest.mock('utils/network', () => ({
...jest.requireActual('utils/network'),
importNetworkFromZip: () => {
return jest.fn().mockImplementation(() => {
const network = {
id: 1,
nodes: {
@ -37,7 +36,6 @@ jest.mock('utils/network', () => ({
},
};
return [network, {}];
})();
},
}));
@ -842,11 +840,11 @@ describe('Network model', () => {
describe('Export', () => {
it('should export a network and show a save dialogue', async () => {
const { network: networkActions } = store.getActions();
const { exportNetwork } = store.getActions().network;
const spy = jest.spyOn(electron.remote.dialog, 'showSaveDialog');
const exported = await networkActions.exportNetwork(getNetwork());
const exported = await exportNetwork(getNetwork());
expect(exported).toBeDefined();
expect(spy).toHaveBeenCalled();
@ -859,18 +857,18 @@ describe('Network model', () => {
// returns undefined if user closes the window
mock.mockImplementation(() => ({} as any));
const { network: networkActions } = store.getActions();
const exported = await networkActions.exportNetwork(getNetwork());
const { exportNetwork } = store.getActions().network;
const exported = await exportNetwork(getNetwork());
expect(exported).toBeUndefined();
});
});
describe('Import', () => {
it('should import a network', async () => {
const { network: networkActions } = store.getActions();
const { importNetwork } = store.getActions().network;
const statePreImport = store.getState();
const imported = await networkActions.importNetwork('zip');
const imported = await importNetwork('zip');
expect(imported.id).toBeDefined();
expect(imported.nodes.bitcoin.length).toBeGreaterThan(0);
expect(imported.nodes.lightning.length).toBeGreaterThan(0);

14
src/store/models/network.ts

@ -1,6 +1,6 @@
import { remote, SaveDialogOptions } from 'electron';
import { info } from 'electron-log';
import { copyFile, ensureDir } from 'fs-extra';
import { copy, ensureDir } from 'fs-extra';
import { join } from 'path';
import { push } from 'connected-react-router';
import { Action, action, Computed, computed, Thunk, thunk } from 'easy-peasy';
@ -646,7 +646,7 @@ const networkModel: NetworkModel = {
const zipped = await zipNetwork(network, allCharts[network.id]);
await copyFile(zipped, zipDestination);
await copy(zipped, zipDestination);
info('exported network to', zipDestination);
return zipDestination;
}),
@ -656,14 +656,14 @@ const networkModel: NetworkModel = {
network: { networks },
} = getStoreState();
const { network: networkActions } = getStoreActions();
const { designer: designerActions } = getStoreActions();
const { add, save } = getStoreActions().network;
const { setChart } = getStoreActions().designer;
const [newNetwork, chart] = await importNetworkFromZip(path, networks);
networkActions.add(newNetwork);
designerActions.setChart({ chart, id: newNetwork.id });
await networkActions.save();
add(newNetwork);
setChart({ chart, id: newNetwork.id });
await save();
info('imported', newNetwork);
return newNetwork;

1
src/utils/constants.ts

@ -143,6 +143,7 @@ export const dockerConfigs: Record<NodeImplementation, DockerConfig> = {
'bitcoind',
'-server=1',
'-regtest=1',
'-reindex',
'-rpcauth={{rpcUser}}:{{rpcAuth}}',
'-debug=1',
'-zmqpubrawblock=tcp://0.0.0.0:28334',

108
src/utils/network.ts

@ -1,6 +1,5 @@
import { debug } from 'electron-log';
import { promises as fs } from 'fs';
import { copy } from 'fs-extra';
import { copy, mkdirp, readFile, writeFile } from 'fs-extra';
import { basename, join } from 'path';
import { IChart } from '@mrblenny/react-flow-chart';
import detectPort from 'detect-port';
@ -21,12 +20,13 @@ import {
ManagedImage,
Network,
} from 'types';
import NetworkDesigner from 'components/designer/NetworkDesigner';
import { dataPath, networksPath, nodePath } from './config';
import { BasePorts, DOCKER_REPO } from './constants';
import { BasePorts, DOCKER_REPO, dockerConfigs } from './constants';
import { getName } from './names';
import { range } from './numbers';
import { isVersionCompatible } from './strings';
import { isWindows } from './system';
import { getPolarPlatform } from './system';
import { prefixTranslation } from './translate';
import { unzip, zip } from './zip';
@ -227,12 +227,43 @@ const isNetwork = (value: any): value is Network => {
};
const readNetwork = async (path: string, id: number): Promise<Network> => {
const rawNetwork = await fs.readFile(path);
const rawNetwork = await readFile(path);
const network = JSON.parse(rawNetwork.toString('utf-8'));
if (!isNetwork(network)) {
throw Error(`${path} did not contain a valid network!`);
}
return network;
};
const isChart = (value: any): value is IChart =>
typeof value === 'object' &&
typeof value.offset === 'object' &&
typeof value.nodes === 'object' &&
typeof value.links === 'object' &&
typeof value.selected === 'object' &&
typeof value.hovered === 'object';
const readExportFile = async (path: string, id: number): Promise<[Network, IChart]> => {
const rawFile = await readFile(path);
const parsed = JSON.parse(rawFile.toString('utf-8'));
if (!parsed.network) {
throw Error(`${path} did not contain a 'network' field`);
}
if (!parsed.chart) {
throw Error(`${path} did not contain a 'chart' field`);
}
const network = parsed.network as unknown;
const chart = parsed.chart as unknown;
if (!isNetwork(network)) {
throw Error(`${path} did not contain a valid network`);
}
if (!isChart(chart)) {
throw Error(`${path} did not contain a valid chart`);
}
network.path = join(dataPath, 'networks', id.toString());
network.id = id;
@ -248,10 +279,7 @@ const readNetwork = async (path: string, id: number): Promise<Network> => {
const clightning = ln as CLightningNode;
clightning.paths = {
macaroon: join(
network.path,
'volumes',
'c-lightning',
clightning.name,
nodePath(network, 'c-lightning', clightning.name),
'rest-api',
'access.macaroon',
),
@ -259,25 +287,7 @@ const readNetwork = async (path: string, id: number): Promise<Network> => {
}
});
return network;
};
const isChart = (value: any): value is IChart =>
typeof value === 'object' &&
typeof value.offset === 'object' &&
typeof value.nodes === 'object' &&
typeof value.links === 'object' &&
typeof value.selected === 'object' &&
typeof value.hovered === 'object';
const readChart = async (path: string): Promise<IChart> => {
const rawChart = await fs.readFile(path);
const chart = JSON.parse(rawChart.toString('utf-8'));
if (!isChart(chart)) {
throw Error(`${path} did not contain a valid chart`);
}
return chart;
return [network, chart];
};
/**
@ -291,10 +301,7 @@ export const getNetworkFromZip = async (
const destination = join(os.tmpdir(), basename(zip, '.zip'));
await unzip(zip, destination);
const [network, chart] = await Promise.all([
readNetwork(join(destination, 'network.json'), newId),
readChart(join(destination, 'chart.json')),
]);
const [network, chart] = await readExportFile(join(destination, 'export.json'), newId);
return [network, chart, destination];
};
@ -320,16 +327,19 @@ export const importNetworkFromZip = async (
zipPath,
newId,
);
const networkHasCLightning = newNetwork.nodes.lightning.some(
n => n.implementation === 'c-lightning',
);
if (isWindows() && networkHasCLightning) {
throw Error(l('importClightningWindows'));
const platform = getPolarPlatform();
for (const { implementation } of newNetwork.nodes.lightning) {
const { platforms } = dockerConfigs[implementation];
const nodeSupportsPlatform = platforms.includes(platform);
if (!nodeSupportsPlatform) {
throw Error(l('incompatibleImplementation', { implementation, platform }));
}
}
const newNetworkDirectory = join(dataPath, 'networks', newId.toString());
await fs.mkdir(newNetworkDirectory, { recursive: true });
await mkdirp(newNetworkDirectory);
const thingsToCopy = ['docker-compose.yml', 'volumes'];
await Promise.all(
@ -366,22 +376,16 @@ export const zipNameForNetwork = (network: Network): string =>
* @return Path of created `.zip` file
*/
export const zipNetwork = async (network: Network, chart: IChart): Promise<string> => {
const exportFileContent = {
network,
chart,
};
await writeFile(join(network.path, 'export.json'), JSON.stringify(exportFileContent));
const destination = join(tmpdir(), zipNameForNetwork(network));
await zip({
destination,
objects: [
{
name: 'network.json',
object: network,
},
{
name: 'chart.json',
object: chart,
},
],
paths: [join(network.path, 'docker-compose.yml'), join(network.path, 'volumes')],
});
await zip(network.path, destination);
return destination;
};

1
src/utils/tests/resources/bar.txt

@ -1 +0,0 @@
bar

1
src/utils/tests/resources/baz/qux.ts

@ -1 +0,0 @@
console.log('qux');

3
src/utils/tests/resources/foo.json

@ -1,3 +0,0 @@
{
"foo": 2
}

BIN
src/utils/tests/resources/test.zip

Binary file not shown.

BIN
src/utils/tests/resources/zipped-network.zip

Binary file not shown.

174
src/utils/zip.spec.ts

@ -1,10 +1,11 @@
import { promises as fs } from 'fs';
import fsExtra from 'fs-extra';
import { join } from 'path';
import archiver from 'archiver';
import { tmpdir } from 'os';
import { PassThrough } from 'stream';
import { unzip, zip } from './zip';
jest.mock('fs-extra', () => jest.requireActual('fs-extra'));
const fsMock = fsExtra as jest.Mocked<typeof fsExtra>;
const archiverMock = archiver as jest.Mocked<any>;
describe('unzip', () => {
it("fail to unzip something that isn't a zip", async () => {
@ -13,105 +14,94 @@ describe('unzip', () => {
).rejects.toThrow();
});
it('unzips test.zip', async () => {
const destination = join(tmpdir(), 'zip-test-' + Date.now());
await unzip(join(__dirname, 'tests', 'resources', 'test.zip'), destination);
const entries = await fs.readdir(destination, { withFileTypes: true });
expect(entries.map(e => e.name)).toContain('foo.json');
expect(entries.map(e => e.name)).toContain('bar.txt');
expect(entries.map(e => e.name)).toContain('baz');
const fooFile = entries.find(e => e.name === 'foo.json');
const barFile = entries.find(e => e.name === 'bar.txt');
const bazDir = entries.find(e => e.name === 'baz');
expect(fooFile).toBeDefined();
expect(barFile).toBeDefined();
expect(bazDir).toBeDefined();
expect(fooFile?.isFile()).toBeTruthy();
expect(barFile?.isFile()).toBeTruthy();
expect(bazDir?.isDirectory()).toBeTruthy();
// eslint-disable-next-line @typescript-eslint/no-non-null-assertion
const bazEntries = await fs.readdir(join(destination, bazDir!.name), {
withFileTypes: true,
});
expect(bazEntries).toHaveLength(1);
expect(bazEntries.map(e => e.name)).toContain('qux.ts');
const qux = await fs.readFile(join(destination, 'baz', 'qux.ts'));
expect(qux.toString('utf-8')).toBe('console.log("qux");\n');
const bar = await fs.readFile(join(destination, 'bar.txt'));
expect(bar.toString('utf-8')).toBe('bar\n');
const foo = await fs.readFile(join(destination, 'foo.json'));
expect(foo.toString('utf-8')).toBe(JSON.stringify({ foo: 2 }, null, 4) + '\n');
});
it("fails to unzip something that doesn't exist", async () => {
return expect(unzip('foobar', 'bazfoo')).rejects.toThrow();
});
});
describe('zip', () => {
it('zips objects', async () => {
const objects: Array<{ name: string; object: any }> = [
{
name: 'firstObject',
object: 2,
},
{
name: 'secondObject',
object: { baz: 'baz' },
},
{
name: 'thirdObject',
object: [2, { foo: 'foo' }, false],
},
];
const zipped = join(tmpdir(), `zip-test-${Date.now()}.zip`);
await zip({
destination: zipped,
objects,
paths: [],
// it('zips objects', async () => {
// const objects: Array<{ name: string; object: any }> = [
// {
// name: 'firstObject',
// object: 2,
// },
// {
// name: 'secondObject',
// object: { baz: 'baz' },
// },
// {
// name: 'thirdObject',
// object: [2, { foo: 'foo' }, false],
// },
// ];
// const zipped = join(tmpdir(), `zip-test-${Date.now()}.zip`);
// await zip({
// destination: zipped,
// objects,
// paths: [],
// });
// const unzipped = join(tmpdir(), `zip-test-${Date.now()}`);
// await unzip(zipped, unzipped);
// for (const obj of objects) {
// const read = await fsExtra
// .readFile(join(unzipped, obj.name))
// .then(read => JSON.parse(read.toString('utf-8')));
// expect(read).toEqual(obj.object);
// }
// });
// it('zips paths', async () => {
// const files = [
// join(__dirname, 'tests', 'resources', 'bar.txt'),
// join(__dirname, 'tests', 'resources', 'foo.json'),
// join(__dirname, 'tests', 'resources', 'baz'),
// ];
// const zipped = join(tmpdir(), `zip-test-${Date.now()}.zip`);
// await zip({ destination: zipped, objects: [], paths: files });
// const unzipped = join(tmpdir(), `zip-test-${Date.now()}`);
// await unzip(zipped, unzipped);
// const entries = await fs..readdir(unzipped, { withFileTypes: true });
// const bar = entries.find(e => e.name === 'bar.txt');
// const baz = entries.find(e => e.name === 'baz');
// const foo = entries.find(e => e.name === 'foo.json');
// expect(bar?.isFile()).toBeTruthy();
// expect(baz?.isDirectory()).toBeTruthy();
// expect(foo?.isFile()).toBeTruthy();
// });
it('should fail if there is an archiver error', async () => {
fsMock.createWriteStream.mockReturnValueOnce(new PassThrough() as any);
const promise = zip('source', 'destination');
// emit an error after a small delay
const mockError = new Error('test-error');
setTimeout(() => {
archiverMock.mockEmit('error', mockError);
}, 100);
await expect(promise).rejects.toEqual(mockError);
});
const unzipped = join(tmpdir(), `zip-test-${Date.now()}`);
await unzip(zipped, unzipped);
for (const obj of objects) {
const read = await fs
.readFile(join(unzipped, obj.name))
.then(read => JSON.parse(read.toString('utf-8')));
expect(read).toEqual(obj.object);
}
});
it('zips paths', async () => {
const files = [
join(__dirname, 'tests', 'resources', 'bar.txt'),
join(__dirname, 'tests', 'resources', 'foo.json'),
join(__dirname, 'tests', 'resources', 'baz'),
];
const zipped = join(tmpdir(), `zip-test-${Date.now()}.zip`);
await zip({ destination: zipped, objects: [], paths: files });
const unzipped = join(tmpdir(), `zip-test-${Date.now()}`);
await unzip(zipped, unzipped);
it('should fail if there is an archiver warning', async () => {
fsMock.createWriteStream.mockReturnValueOnce(new PassThrough() as any);
const entries = await fs.readdir(unzipped, { withFileTypes: true });
const promise = zip('source', 'destination');
const bar = entries.find(e => e.name === 'bar.txt');
const baz = entries.find(e => e.name === 'baz');
const foo = entries.find(e => e.name === 'foo.json');
// emit an error after a small delay
const mockError = new Error('test-warning');
setTimeout(() => {
archiverMock.mockEmit('warning', mockError);
}, 100);
expect(bar?.isFile()).toBeTruthy();
expect(baz?.isDirectory()).toBeTruthy();
expect(foo?.isFile()).toBeTruthy();
await expect(promise).rejects.toEqual(mockError);
});
});

71
src/utils/zip.ts

@ -1,7 +1,7 @@
import { error, warn } from 'electron-log';
import { error, info, warn } from 'electron-log';
import fs from 'fs';
import { pathExists } from 'fs-extra';
import { basename } from 'path';
import { createWriteStream, pathExists } from 'fs-extra';
import { join } from 'path';
import archiver from 'archiver';
import unzipper from 'unzipper';
@ -30,54 +30,10 @@ export const unzip = (zip: string, destination: string): Promise<void> => {
});
};
interface ZipArgs {
/** The destination of the generated zip */
destination: string;
objects: Array<{
/** Object to serialize (with `JSON.stringify`) and store in the zip */
object: any;
/** Name of this object in the generated zip */
name: string;
}>;
/** Files or folders to include */
paths: string[];
}
/**
* Adds a raw string into the ZIP archive
*
* @param archive ZIP archive to add the file to
* @param content content to add into archive
* @param nameInArchive name of file in archive
*/
const addStringToZip = (
archive: archiver.Archiver,
content: string,
nameInArchive: string,
): void => {
archive.append(content, { name: nameInArchive });
return;
};
/**
* Add the given path to the archive. If it's a file we add it directly, it it is a directory
* we recurse over all the files within that directory
*
* @param archive ZIP archive to add the file to
* @param filePath file to add, absolute path
*/
const addFileOrDirectoryToZip = async (archive: archiver.Archiver, filePath: string) => {
const isDir = await fs.promises.lstat(filePath).then(res => res.isDirectory());
if (isDir) {
archive.directory(filePath, basename(filePath));
} else {
archive.file(filePath, { name: basename(filePath) });
}
};
export const zip = ({ destination, objects, paths }: ZipArgs): Promise<void> =>
export const zip = (source: string, destination: string): Promise<void> =>
new Promise(async (resolve, reject) => {
const output = fs.createWriteStream(destination);
info('zipping', source, 'to', destination);
const output = createWriteStream(destination);
const archive = archiver('zip');
// finished
@ -95,13 +51,16 @@ export const zip = ({ destination, objects, paths }: ZipArgs): Promise<void> =>
// pipe all zipped data to the output
archive.pipe(output);
const pathPromises = paths.map(p => addFileOrDirectoryToZip(archive, p));
for (const obj of objects) {
addStringToZip(archive, JSON.stringify(obj.object), obj.name);
// avoid including the c-lightning RPC socket
const entryDataFunction: archiver.EntryDataFunction = entry => {
if (entry.name?.endsWith(join('lightningd', 'regtest', 'lightning-rpc'))) {
console.info('skipping', entry);
return false;
}
await Promise.all(pathPromises);
return entry;
};
// append files from a sub-directory, putting its contents at the root of archive
archive.directory(source, false, entryDataFunction);
// we've added all files, tell this to the archive so it can emit the 'close' event
// once all streams have finished

3
tsconfig.json

@ -16,6 +16,5 @@
"jsx": "preserve",
"baseUrl": "src"
},
"include": ["src"],
"exclude": ["src/utils/tests/resources"]
"include": ["src"]
}

Loading…
Cancel
Save