Browse Source

Merge #273 #282

273: Build for aarch64 which is needed for RPi4 r=bonomat a=bonomat

Adds more build targets and publishes docker containers. 


This was tested on my personal fork and resulted in the following container which can run on an RPi4: 

```
docker run ghcr.io/bonomat/hermes-maker-aarch64-unknown-linux-gnu:latest
```

A few notes which might come up: 
- The naming of the containers can be improved, please someone with sed knowledge give it a try :)
- I was not able to use the github action `docker/build-push-action`. This moves the image build into a tmp folder which I was not able to escape. Hence, I was not able to copy the binaries into the image
- I was not able to combine the containers into a single one because we have parallel builds. Apparently this should be possible though (reference: [manifest](https://www.docker.com/blog/multi-platform-docker-builds/)). 
- I was also not able to use alpine because of the different libc6 version

282: Only fetch announcements when needed r=luckysori a=luckysori

Fixes #275.

I think this is what you meant with #275, `@thomaseizinger.` It does result in immediately deleting some code you wrote in #279. I do think we'll end up reintroducing it once we actually use more than one oracle event per CFD, but it seems unnecessary to keep it around until then. Let me know if you disagree.

Co-authored-by: Philipp Hoenisch <philipp@hoenisch.at>
Co-authored-by: Lucas Soriano del Pino <l.soriano.del.pino@gmail.com>
refactor/no-log-handler
bors[bot] 3 years ago
committed by GitHub
parent
commit
8fa714345f
No known key found for this signature in database GPG Key ID: 4AEE18F83AFDEB23
  1. 2
      .cargo/config.toml
  2. 3
      .dockerignore
  3. 53
      .github/workflows/build-release-binary.yml
  4. 20
      .github/workflows/ci.yml
  5. 15
      Dockerfile
  6. 3
      bors.toml
  7. 10
      daemon/src/maker_cfd.rs
  8. 160
      daemon/src/oracle.rs
  9. 4
      daemon/src/setup_contract.rs
  10. 10
      daemon/src/taker_cfd.rs
  11. 2
      rust-toolchain.toml

2
.cargo/config.toml

@ -1,2 +1,4 @@
[target.armv7-unknown-linux-gnueabihf] [target.armv7-unknown-linux-gnueabihf]
linker = "arm-linux-gnueabihf-gcc" linker = "arm-linux-gnueabihf-gcc"
[target.aarch64-unknown-linux-gnu]
linker = "aarch64-linux-gnu-gcc"

3
.dockerignore

@ -0,0 +1,3 @@
.*
!./target

53
.github/workflows/build-release-binary.yml

@ -14,34 +14,52 @@ jobs:
target: x86_64-unknown-linux-gnu target: x86_64-unknown-linux-gnu
os: ubuntu-latest os: ubuntu-latest
archive_ext: tar archive_ext: tar
docker_platforms: linux/amd64
- bin: taker - bin: taker
target: armv7-unknown-linux-gnueabihf target: armv7-unknown-linux-gnueabihf
os: ubuntu-latest os: ubuntu-latest
archive_ext: tar archive_ext: tar
docker_platforms: linux/arm/v7
- bin: taker
target: aarch64-unknown-linux-gnu
os: ubuntu-latest
archive_ext: tar
docker_platforms: linux/arm64
- bin: taker - bin: taker
target: x86_64-apple-darwin target: x86_64-apple-darwin
os: macos-latest os: macos-latest
archive_ext: tar archive_ext: tar
docker_platforms: None
- bin: taker - bin: taker
target: x86_64-pc-windows-msvc target: x86_64-pc-windows-msvc
os: windows-latest os: windows-latest
archive_ext: zip archive_ext: zip
docker_platforms: None
- bin: maker - bin: maker
target: x86_64-unknown-linux-gnu target: x86_64-unknown-linux-gnu
os: ubuntu-latest os: ubuntu-latest
archive_ext: tar archive_ext: tar
docker_platforms: linux/amd64
- bin: maker - bin: maker
target: armv7-unknown-linux-gnueabihf target: armv7-unknown-linux-gnueabihf
os: ubuntu-latest os: ubuntu-latest
archive_ext: tar archive_ext: tar
docker_platforms: linux/arm64
- bin: maker
target: aarch64-unknown-linux-gnu
os: ubuntu-latest
archive_ext: tar
docker_platforms: linux/arm64
- bin: maker - bin: maker
target: x86_64-apple-darwin target: x86_64-apple-darwin
os: macos-latest os: macos-latest
archive_ext: tar archive_ext: tar
docker_platforms: None
- bin: maker - bin: maker
target: x86_64-pc-windows-msvc target: x86_64-pc-windows-msvc
os: windows-latest os: windows-latest
archive_ext: zip archive_ext: zip
docker_platforms: None
runs-on: ${{ matrix.os }} runs-on: ${{ matrix.os }}
steps: steps:
- name: Checkout tagged commit - name: Checkout tagged commit
@ -63,6 +81,12 @@ jobs:
sudo apt-get update sudo apt-get update
sudo apt-get install gcc-arm-linux-gnueabihf sudo apt-get install gcc-arm-linux-gnueabihf
- name: Install compiler for aarch64 arch (armv8)
if: matrix.target == 'aarch64-unknown-linux-gnu'
run: |
sudo apt-get update
sudo apt-get install 'gcc-aarch64-linux-gnu'
- uses: actions/setup-node@v2 - uses: actions/setup-node@v2
with: with:
node-version: '16' node-version: '16'
@ -80,7 +104,8 @@ jobs:
run: cargo build --target=${{ matrix.target }} --release --bin ${{ matrix.bin }} run: cargo build --target=${{ matrix.target }} --release --bin ${{ matrix.bin }}
- name: Smoke test the binary - name: Smoke test the binary
if: matrix.target != 'armv7-unknown-linux-gnueabihf' # armv7-unknown-linux-gnueabihf is only cross-compiled, no smoke test # armv7-* and aarch64-* is only cross-compiled, no smoke test
if: matrix.target != 'armv7-unknown-linux-gnueabihf' && matrix.target != 'aarch64-unknown-linux-gnu'
run: target/${{ matrix.target }}/release/${{ matrix.bin }} --help run: target/${{ matrix.target }}/release/${{ matrix.bin }} --help
# Remove once python 3 is the default # Remove once python 3 is the default
@ -129,3 +154,29 @@ jobs:
asset_path: ./${{ steps.create-archive-name.outputs.archive }} asset_path: ./${{ steps.create-archive-name.outputs.archive }}
asset_name: ${{ steps.create-archive-name.outputs.archive }} asset_name: ${{ steps.create-archive-name.outputs.archive }}
asset_content_type: application/gzip asset_content_type: application/gzip
- name: Set up QEMU
if: matrix.os != 'windows-latest' && matrix.os != 'macos-latest'
uses: docker/setup-qemu-action@v1
- name: Set up Docker Buildx
if: matrix.os != 'windows-latest' && matrix.os != 'macos-latest'
uses: docker/setup-buildx-action@v1
- name: Login into github registry
uses: docker/login-action@v1.10.0
if: matrix.os != 'windows-latest' && matrix.os != 'macos-latest'
with:
registry: ghcr.io
username: ${{ github.actor }}
password: ${{ secrets.GITHUB_TOKEN }}
- name: Build and publish docker image
if: matrix.os != 'windows-latest' && matrix.os != 'macos-latest'
run: |
docker buildx build \
--push \
--platform=${{ matrix.docker_platforms }} \
--tag ghcr.io/${{ github.actor }}/hermes-${{ matrix.bin }}-${{ matrix.target }} \
--build-arg BINARY_PATH=target/${{ matrix.target }}/release/${{ matrix.bin }} \
.

20
.github/workflows/ci.yml

@ -94,20 +94,32 @@ jobs:
daemons_arm_build: daemons_arm_build:
runs-on: ubuntu-latest runs-on: ubuntu-latest
strategy:
matrix:
include:
- target: armv7-unknown-linux-gnueabihf
- target: aarch64-unknown-linux-gnu
steps: steps:
- uses: actions/checkout@v2 - uses: actions/checkout@v2
- name: Setup rust toolchain - name: Setup rust toolchain
run: rustup show run: rustup show
- uses: Swatinem/rust-cache@v1.3.0 - uses: Swatinem/rust-cache@v1.3.0
- name: Install compiler for armhf arch - name: Install compiler for armhf arch
if: matrix.target == 'armv7-unknown-linux-gnueabihf'
run: | run: |
sudo apt-get update sudo apt-get update
sudo apt-get install gcc-arm-linux-gnueabihf sudo apt-get install gcc-arm-linux-gnueabihf
- run: cargo build --target=armv7-unknown-linux-gnueabihf --bins
- name: Install compiler for aarch64 arch
if: matrix.target == 'aarch64-unknown-linux-gnu'
run: |
sudo apt-get update
sudo apt-get install gcc-aarch64-linux-gnu
- run: cargo build --target=${{ matrix.target }} --bins
- name: Upload binaries - name: Upload binaries
uses: actions/upload-artifact@v2 uses: actions/upload-artifact@v2
with: with:
name: maker-and-taker-binaries-armv7 name: maker-and-taker-binaries-${{ matrix.target }}
path: | path: |
target/armv7-unknown-linux-gnueabihf/debug/maker target/${{ matrix.target }}/debug/maker
target/armv7-unknown-linux-gnueabihf/debug/taker target/${{ matrix.target }}/debug/taker

15
Dockerfile

@ -0,0 +1,15 @@
FROM debian:bullseye-slim
ARG BINARY_PATH
RUN echo "Copying $BINARY_PATH into container"
COPY $BINARY_PATH hermes
RUN chmod a+x hermes
VOLUME data
# HTTP Port and P2P Port
EXPOSE 8000 9999
ENTRYPOINT ["/hermes", "--data-dir=/data", "--http-address=0.0.0.0:8000"]

3
bors.toml

@ -7,5 +7,6 @@ status = [
"test_daemons (ubuntu-latest)", "test_daemons (ubuntu-latest)",
"test_daemons (macos-latest)", "test_daemons (macos-latest)",
"test_daemons (windows-latest)", "test_daemons (windows-latest)",
"daemons_arm_build", "daemons_arm_build (armv7-unknown-linux-gnueabihf)",
"daemons_arm_build (aarch64-unknown-linux-gnu)",
] ]

10
daemon/src/maker_cfd.rs

@ -199,6 +199,10 @@ impl Actor {
let oracle_event_id = let oracle_event_id =
oracle::next_announcement_after(time::OffsetDateTime::now_utc() + Order::TERM)?; oracle::next_announcement_after(time::OffsetDateTime::now_utc() + Order::TERM)?;
self.oracle_actor
.do_send_async(oracle::FetchAnnouncement(oracle_event_id.clone()))
.await?;
let order = Order::new( let order = Order::new(
price, price,
min_quantity, min_quantity,
@ -587,7 +591,7 @@ impl Actor {
.with_context(|| format!("Announcement {} not found", cfd.order.oracle_event_id))?; .with_context(|| format!("Announcement {} not found", cfd.order.oracle_event_id))?;
self.oracle_actor self.oracle_actor
.do_send_async(oracle::MonitorEvent { .do_send_async(oracle::MonitorAttestation {
event_id: offer_announcement.id.clone(), event_id: offer_announcement.id.clone(),
}) })
.await?; .await?;
@ -600,7 +604,7 @@ impl Actor {
}) })
}), }),
receiver, receiver,
(self.oracle_pk, offer_announcement.clone().into()), (self.oracle_pk, offer_announcement),
cfd, cfd,
self.wallet.clone(), self.wallet.clone(),
Role::Maker, Role::Maker,
@ -778,7 +782,7 @@ impl Actor {
.await?; .await?;
self.oracle_actor self.oracle_actor
.do_send_async(oracle::MonitorEvent { .do_send_async(oracle::MonitorAttestation {
event_id: announcement.id.clone(), event_id: announcement.id.clone(),
}) })
.await?; .await?;

160
daemon/src/oracle.rs

@ -17,7 +17,8 @@ const OLIVIA_EVENT_TIME_FORMAT: &[FormatItem] =
format_description!("[year]-[month]-[day]T[hour]:[minute]:[second]"); format_description!("[year]-[month]-[day]T[hour]:[minute]:[second]");
pub struct Actor<CFD, M> { pub struct Actor<CFD, M> {
latest_announcements: HashMap<OracleEventId, Announcement>, announcements: HashMap<OracleEventId, (OffsetDateTime, Vec<schnorrsig::PublicKey>)>,
pending_announcements: HashSet<OracleEventId>,
pending_attestations: HashSet<OracleEventId>, pending_attestations: HashSet<OracleEventId>,
cfd_actor_address: xtra::Address<CFD>, cfd_actor_address: xtra::Address<CFD>,
monitor_actor_address: xtra::Address<M>, monitor_actor_address: xtra::Address<M>,
@ -25,10 +26,23 @@ pub struct Actor<CFD, M> {
pub struct Sync; pub struct Sync;
pub struct MonitorEvent { /// Message used to tell the `oracle::Actor` to fetch an
/// `Announcement` from `olivia`.
///
/// The `Announcement` corresponds to the `OracleEventId` included in
/// the message.
#[derive(Debug, Clone)]
pub struct FetchAnnouncement(pub OracleEventId);
pub struct MonitorAttestation {
pub event_id: OracleEventId, pub event_id: OracleEventId,
} }
/// Message used to request the `Announcement` from the
/// `oracle::Actor`'s local state.
///
/// The `Announcement` corresponds to the `OracleEventId` included in
/// the message.
#[derive(Debug, Clone)] #[derive(Debug, Clone)]
pub struct GetAnnouncement(pub OracleEventId); pub struct GetAnnouncement(pub OracleEventId);
@ -45,7 +59,8 @@ pub struct Attestation {
#[derive(Debug)] #[derive(Debug)]
struct NewAnnouncementFetched { struct NewAnnouncementFetched {
id: OracleEventId, id: OracleEventId,
announcement: Announcement, expected_outcome_time: OffsetDateTime,
nonce_pks: Vec<schnorrsig::PublicKey>,
} }
impl<CFD, M> Actor<CFD, M> { impl<CFD, M> Actor<CFD, M> {
@ -82,7 +97,8 @@ impl<CFD, M> Actor<CFD, M> {
} }
Self { Self {
latest_announcements: HashMap::new(), announcements: HashMap::new(),
pending_announcements: HashSet::new(),
pending_attestations, pending_attestations,
cfd_actor_address, cfd_actor_address,
monitor_actor_address, monitor_actor_address,
@ -92,32 +108,17 @@ impl<CFD, M> Actor<CFD, M> {
impl<CFD, M> Actor<CFD, M> impl<CFD, M> Actor<CFD, M>
where where
CFD: xtra::Handler<Attestation>, CFD: 'static,
M: xtra::Handler<Attestation>, M: 'static,
{ {
async fn update_state(&mut self, ctx: &mut xtra::Context<Self>) -> Result<()> { async fn update_pending_announcements(&mut self, ctx: &mut xtra::Context<Self>) -> Result<()> {
self.update_latest_announcements(ctx) let this = ctx.address().expect("self to be alive");
.await for event_id in self.pending_announcements.iter().cloned() {
.context("failed to update announcements")?; let this = this.clone();
self.update_pending_attestations()
.await
.context("failed to update pending attestations")?;
Ok(())
}
async fn update_latest_announcements(&mut self, ctx: &mut xtra::Context<Self>) -> Result<()> {
for event_id in next_ids()? {
if self.latest_announcements.contains_key(&event_id) {
continue;
}
let this = ctx.address().expect("self to be alive");
tokio::spawn(async move { tokio::spawn(async move {
let url = event_id.to_olivia_url(); let url = event_id.to_olivia_url();
tracing::debug!("Fetching attestation for {}", event_id); tracing::debug!("Fetching announcement for {}", event_id);
let response = reqwest::get(url.clone()) let response = reqwest::get(url.clone())
.await .await
@ -134,7 +135,8 @@ where
this.send(NewAnnouncementFetched { this.send(NewAnnouncementFetched {
id: event_id, id: event_id,
announcement, nonce_pks: announcement.nonce_pks,
expected_outcome_time: announcement.expected_outcome_time,
}) })
.await?; .await?;
@ -144,6 +146,23 @@ where
Ok(()) Ok(())
} }
}
impl<CFD, M> Actor<CFD, M>
where
CFD: xtra::Handler<Attestation>,
M: xtra::Handler<Attestation>,
{
async fn update_state(&mut self, ctx: &mut xtra::Context<Self>) -> Result<()> {
self.update_pending_announcements(ctx)
.await
.context("failed to update pending announcements")?;
self.update_pending_attestations()
.await
.context("failed to update pending attestations")?;
Ok(())
}
async fn update_pending_attestations(&mut self) -> Result<()> { async fn update_pending_attestations(&mut self) -> Result<()> {
let pending_attestations = self.pending_attestations.clone(); let pending_attestations = self.pending_attestations.clone();
@ -195,10 +214,19 @@ where
} }
#[async_trait] #[async_trait]
impl<CFD: 'static, M: 'static> xtra::Handler<MonitorEvent> for Actor<CFD, M> { impl<CFD: 'static, M: 'static> xtra::Handler<MonitorAttestation> for Actor<CFD, M> {
async fn handle(&mut self, msg: MonitorEvent, _ctx: &mut xtra::Context<Self>) { async fn handle(&mut self, msg: MonitorAttestation, _ctx: &mut xtra::Context<Self>) {
if !self.pending_attestations.insert(msg.event_id.clone()) { if !self.pending_attestations.insert(msg.event_id.clone()) {
tracing::trace!("Event {} already being monitored", msg.event_id); tracing::trace!("Attestation {} already being monitored", msg.event_id);
}
}
}
#[async_trait]
impl<CFD: 'static, M: 'static> xtra::Handler<FetchAnnouncement> for Actor<CFD, M> {
async fn handle(&mut self, msg: FetchAnnouncement, _ctx: &mut xtra::Context<Self>) {
if !self.pending_announcements.insert(msg.0.clone()) {
tracing::trace!("Announcement {} already being fetched", msg.0);
} }
} }
} }
@ -210,35 +238,25 @@ impl<CFD: 'static, M: 'static> xtra::Handler<GetAnnouncement> for Actor<CFD, M>
msg: GetAnnouncement, msg: GetAnnouncement,
_ctx: &mut xtra::Context<Self>, _ctx: &mut xtra::Context<Self>,
) -> Option<Announcement> { ) -> Option<Announcement> {
self.latest_announcements.get(&msg.0).cloned() self.announcements
.get_key_value(&msg.0)
.map(|(id, (time, nonce_pks))| Announcement {
id: id.clone(),
expected_outcome_time: *time,
nonce_pks: nonce_pks.clone(),
})
} }
} }
#[async_trait] #[async_trait]
impl<CFD: 'static, M: 'static> xtra::Handler<NewAnnouncementFetched> for Actor<CFD, M> { impl<CFD: 'static, M: 'static> xtra::Handler<NewAnnouncementFetched> for Actor<CFD, M> {
async fn handle(&mut self, msg: NewAnnouncementFetched, _ctx: &mut xtra::Context<Self>) { async fn handle(&mut self, msg: NewAnnouncementFetched, _ctx: &mut xtra::Context<Self>) {
self.latest_announcements.insert(msg.id, msg.announcement); self.pending_announcements.remove(&msg.id);
self.announcements
.insert(msg.id, (msg.expected_outcome_time, msg.nonce_pks));
} }
} }
/// Construct the URL of the next 24 `BitMEX/BXBT` hourly events
/// `olivia` will attest to.
fn next_ids() -> Result<Vec<OracleEventId>> {
let ids = next_24_hours(OffsetDateTime::now_utc())?
.into_iter()
.map(event_id)
.collect();
Ok(ids)
}
fn next_24_hours(datetime: OffsetDateTime) -> Result<Vec<OffsetDateTime>> {
let adjusted = ceil_to_next_hour(datetime)?;
let timestamps = (0..=24).map(|i| adjusted + i.hours()).collect();
Ok(timestamps)
}
#[allow(dead_code)] #[allow(dead_code)]
pub fn next_announcement_after(timestamp: OffsetDateTime) -> Result<OracleEventId> { pub fn next_announcement_after(timestamp: OffsetDateTime) -> Result<OracleEventId> {
let adjusted = ceil_to_next_hour(timestamp)?; let adjusted = ceil_to_next_hour(timestamp)?;
@ -302,7 +320,11 @@ where
impl xtra::Message for Sync { impl xtra::Message for Sync {
type Result = (); type Result = ();
} }
impl xtra::Message for MonitorEvent { impl xtra::Message for MonitorAttestation {
type Result = ();
}
impl xtra::Message for FetchAnnouncement {
type Result = (); type Result = ();
} }
@ -613,40 +635,4 @@ mod tests {
assert_eq!(event_id.0, "/x/BitMEX/BXBT/2021-09-24T00:00:00.price?n=20"); assert_eq!(event_id.0, "/x/BitMEX/BXBT/2021-09-24T00:00:00.price?n=20");
} }
#[test]
fn next_24() {
let datetime = datetime!(2021-09-23 10:43:12);
let next_24_hours = next_24_hours(datetime.assume_utc()).unwrap();
let expected = vec![
datetime!(2021-09-23 11:00:00).assume_utc(),
datetime!(2021-09-23 12:00:00).assume_utc(),
datetime!(2021-09-23 13:00:00).assume_utc(),
datetime!(2021-09-23 14:00:00).assume_utc(),
datetime!(2021-09-23 15:00:00).assume_utc(),
datetime!(2021-09-23 16:00:00).assume_utc(),
datetime!(2021-09-23 17:00:00).assume_utc(),
datetime!(2021-09-23 18:00:00).assume_utc(),
datetime!(2021-09-23 19:00:00).assume_utc(),
datetime!(2021-09-23 20:00:00).assume_utc(),
datetime!(2021-09-23 21:00:00).assume_utc(),
datetime!(2021-09-23 22:00:00).assume_utc(),
datetime!(2021-09-23 23:00:00).assume_utc(),
datetime!(2021-09-24 00:00:00).assume_utc(),
datetime!(2021-09-24 01:00:00).assume_utc(),
datetime!(2021-09-24 02:00:00).assume_utc(),
datetime!(2021-09-24 03:00:00).assume_utc(),
datetime!(2021-09-24 04:00:00).assume_utc(),
datetime!(2021-09-24 05:00:00).assume_utc(),
datetime!(2021-09-24 06:00:00).assume_utc(),
datetime!(2021-09-24 07:00:00).assume_utc(),
datetime!(2021-09-24 08:00:00).assume_utc(),
datetime!(2021-09-24 09:00:00).assume_utc(),
datetime!(2021-09-24 10:00:00).assume_utc(),
datetime!(2021-09-24 11:00:00).assume_utc(),
];
assert_eq!(next_24_hours, expected)
}
} }

4
daemon/src/setup_contract.rs

@ -32,7 +32,7 @@ use std::ops::RangeInclusive;
pub async fn new( pub async fn new(
mut sink: impl Sink<SetupMsg, Error = anyhow::Error> + Unpin, mut sink: impl Sink<SetupMsg, Error = anyhow::Error> + Unpin,
mut stream: impl FusedStream<Item = SetupMsg> + Unpin, mut stream: impl FusedStream<Item = SetupMsg> + Unpin,
(oracle_pk, announcement): (schnorrsig::PublicKey, Announcement), (oracle_pk, announcement): (schnorrsig::PublicKey, oracle::Announcement),
cfd: Cfd, cfd: Cfd,
wallet: Wallet, wallet: Wallet,
role: Role, role: Role,
@ -74,7 +74,7 @@ pub async fn new(
} }
let payouts = HashMap::from_iter([( let payouts = HashMap::from_iter([(
announcement.clone(), announcement.into(),
payout_curve::calculate(cfd.order.price, cfd.quantity_usd, cfd.order.leverage)?, payout_curve::calculate(cfd.order.price, cfd.quantity_usd, cfd.order.leverage)?,
)]); )]);

10
daemon/src/taker_cfd.rs

@ -243,6 +243,10 @@ impl Actor {
Some(mut order) => { Some(mut order) => {
order.origin = Origin::Theirs; order.origin = Origin::Theirs;
self.oracle_actor
.do_send_async(oracle::FetchAnnouncement(order.oracle_event_id.clone()))
.await?;
let mut conn = self.db.acquire().await?; let mut conn = self.db.acquire().await?;
insert_order(&order, &mut conn).await?; insert_order(&order, &mut conn).await?;
self.order_feed_actor_inbox.send(Some(order))?; self.order_feed_actor_inbox.send(Some(order))?;
@ -290,7 +294,7 @@ impl Actor {
.with_context(|| format!("Announcement {} not found", cfd.order.oracle_event_id))?; .with_context(|| format!("Announcement {} not found", cfd.order.oracle_event_id))?;
self.oracle_actor self.oracle_actor
.do_send_async(oracle::MonitorEvent { .do_send_async(oracle::MonitorAttestation {
event_id: offer_announcement.id.clone(), event_id: offer_announcement.id.clone(),
}) })
.await?; .await?;
@ -301,7 +305,7 @@ impl Actor {
.into_sink() .into_sink()
.with(|msg| future::ok(wire::TakerToMaker::Protocol(msg))), .with(|msg| future::ok(wire::TakerToMaker::Protocol(msg))),
receiver, receiver,
(self.oracle_pk, offer_announcement.clone().into()), (self.oracle_pk, offer_announcement),
cfd, cfd,
self.wallet.clone(), self.wallet.clone(),
Role::Taker, Role::Taker,
@ -399,7 +403,7 @@ impl Actor {
.with_context(|| format!("Announcement {} not found", oracle_event_id))?; .with_context(|| format!("Announcement {} not found", oracle_event_id))?;
self.oracle_actor self.oracle_actor
.do_send_async(oracle::MonitorEvent { .do_send_async(oracle::MonitorAttestation {
event_id: announcement.id.clone(), event_id: announcement.id.clone(),
}) })
.await?; .await?;

2
rust-toolchain.toml

@ -1,4 +1,4 @@
[toolchain] [toolchain]
channel = "1.55" channel = "1.55"
components = ["clippy"] components = ["clippy"]
targets = ["armv7-unknown-linux-gnueabihf"] targets = ["armv7-unknown-linux-gnueabihf", "aarch64-unknown-linux-gnu"]

Loading…
Cancel
Save