Reinstate mapping index, add ActiveSlotId
Reinstate the index mapping for `activeSlots`, however with a modified data structure. The main difference comes from a change in `mySlots`: it now requires a `RequestId` as a parameter (which can be obtained using `myRequests`. This allowed for the `activeSlots` mapping to be keyed on `RequestId`, which gives a mapping of `ActiveSlotId => EnumerableSet.Bytes32`. `ActiveSlotId` is a keccak hash of `address + activeSlotIdx`, which not only allows retrieval of all active slot ids per request, but also allows clearing of all active slot ids for a request by incrementing the `activeSlotIdx`. ``` requestId => [ keccak(address + activeSlotIdx) => EnumerableSet.Bytes32 ] ```
This commit is contained in:
parent
5f056b055a
commit
a99827ed39
|
@ -14,6 +14,7 @@ contract Marketplace is Collateral, Proofs {
|
|||
|
||||
type RequestId is bytes32;
|
||||
type SlotId is bytes32;
|
||||
type ActiveSlotId is bytes32;
|
||||
|
||||
uint256 public immutable collateral;
|
||||
MarketplaceFunds private funds;
|
||||
|
@ -21,8 +22,8 @@ contract Marketplace is Collateral, Proofs {
|
|||
mapping(RequestId => RequestContext) private requestContexts;
|
||||
mapping(SlotId => Slot) private slots;
|
||||
mapping(address => EnumerableSet.Bytes32Set) private activeRequests;
|
||||
mapping(address => EnumerableSet.Bytes32Set) private activeSlots;
|
||||
mapping(address => uint8) private activeSlotsIdx;
|
||||
mapping(RequestId => mapping(ActiveSlotId => EnumerableSet.Bytes32Set)) private activeSlots;
|
||||
mapping(RequestId => uint8) private activeSlotsIdx;
|
||||
|
||||
constructor(
|
||||
IERC20 _token,
|
||||
|
@ -42,36 +43,39 @@ contract Marketplace is Collateral, Proofs {
|
|||
return _toRequestIds(activeRequests[msg.sender].values());
|
||||
}
|
||||
|
||||
function mySlots() public view returns (SlotId[] memory) {
|
||||
return _activeSlotsForHost(msg.sender);
|
||||
}
|
||||
|
||||
function _activeSlotsForHost(address host)
|
||||
internal
|
||||
function mySlots(RequestId requestId)
|
||||
public
|
||||
view
|
||||
returns (SlotId[] memory)
|
||||
{
|
||||
return _toSlotIds(activeSlots[host].values());
|
||||
EnumerableSet.Bytes32Set storage slotIds = _activeSlotsForHost(msg.sender,
|
||||
requestId);
|
||||
return _toSlotIds(slotIds.values());
|
||||
}
|
||||
|
||||
|
||||
function _activeSlotsForHost(address host, RequestId requestId)
|
||||
internal
|
||||
view
|
||||
returns (EnumerableSet.Bytes32Set storage)
|
||||
{
|
||||
mapping(ActiveSlotId => EnumerableSet.Bytes32Set) storage activeForReq =
|
||||
activeSlots[requestId];
|
||||
ActiveSlotId id = _toActiveSlotId(host, requestId);
|
||||
return activeForReq[id];
|
||||
}
|
||||
|
||||
/// @notice Clears active slots for a request
|
||||
/// @dev Because there are no efficient ways to clear an EnumerableSet, an index is updated that points to a new instance.
|
||||
/// @param requestId request for which to clear the active slots
|
||||
function _clearActiveSlots(RequestId requestId) internal {
|
||||
activeSlotsIdx[requestId]++;
|
||||
}
|
||||
|
||||
function _equals(RequestId a, RequestId b) internal pure returns (bool) {
|
||||
return RequestId.unwrap(a) == RequestId.unwrap(b);
|
||||
}
|
||||
|
||||
/// @notice Clears active slots for a host for a given request
|
||||
/// @dev WARNING: This could potentially run out of gas if the number of slots is too high. Calling .values() copies to memory.
|
||||
/// @param host address of the host for which to clear the active slots
|
||||
/// @param requestId identifies the request that the slot must belong to for it to be cleared
|
||||
function _clearActiveSlotsForRequest(address host, RequestId requestId) internal {
|
||||
SlotId[] memory slotIds = _activeSlotsForHost(host);
|
||||
for (uint8 i = 0; i < slotIds.length; i++) {
|
||||
RequestId slotRequestId = _getRequestIdForSlot(slotIds[i]);
|
||||
if (_equals(slotRequestId, requestId)) {
|
||||
activeSlots[host].remove(SlotId.unwrap(slotIds[i]));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function requestStorage(Request calldata request)
|
||||
public
|
||||
marketplaceInvariant
|
||||
|
@ -123,7 +127,7 @@ contract Marketplace is Collateral, Proofs {
|
|||
slot.requestId = requestId;
|
||||
RequestContext storage context = _context(requestId);
|
||||
context.slotsFilled += 1;
|
||||
activeSlots[slot.host].add(SlotId.unwrap(slotId));
|
||||
_activeSlotsForHost(slot.host, requestId).add(SlotId.unwrap(slotId));
|
||||
emit SlotFilled(requestId, slotIndex, slotId);
|
||||
if (context.slotsFilled == request.ask.slots) {
|
||||
context.state = RequestState.Started;
|
||||
|
@ -150,8 +154,7 @@ contract Marketplace is Collateral, Proofs {
|
|||
|
||||
_unexpectProofs(_toProofId(slotId));
|
||||
|
||||
address slotHost = slot.host;
|
||||
activeSlots[slotHost].remove(SlotId.unwrap(slotId));
|
||||
_activeSlotsForHost(slot.host, requestId).remove(SlotId.unwrap(slotId));
|
||||
slot.host = address(0);
|
||||
slot.requestId = RequestId.wrap(0);
|
||||
context.slotsFilled -= 1;
|
||||
|
@ -167,7 +170,7 @@ contract Marketplace is Collateral, Proofs {
|
|||
_setProofEnd(_toEndId(requestId), block.timestamp - 1);
|
||||
context.endsAt = block.timestamp - 1;
|
||||
activeRequests[request.client].remove(RequestId.unwrap(requestId));
|
||||
_clearActiveSlotsForRequest(slotHost, requestId);
|
||||
_clearActiveSlots(requestId);
|
||||
emit RequestFailed(requestId);
|
||||
|
||||
// TODO: burn all remaining slot collateral (note: slot collateral not
|
||||
|
@ -188,7 +191,7 @@ contract Marketplace is Collateral, Proofs {
|
|||
SlotId slotId = _toSlotId(requestId, slotIndex);
|
||||
Slot storage slot = _slot(slotId);
|
||||
require(!slot.hostPaid, "Already paid");
|
||||
activeSlots[slot.host].remove(SlotId.unwrap(slotId));
|
||||
_activeSlotsForHost(slot.host, requestId).remove(SlotId.unwrap(slotId));
|
||||
uint256 amount = pricePerSlot(requests[requestId]);
|
||||
funds.sent += amount;
|
||||
funds.balance -= amount;
|
||||
|
@ -406,6 +409,17 @@ contract Marketplace is Collateral, Proofs {
|
|||
return SlotId.wrap(keccak256(abi.encode(requestId, slotIndex)));
|
||||
}
|
||||
|
||||
function _toActiveSlotId(address host, RequestId requestId)
|
||||
internal
|
||||
view
|
||||
returns (ActiveSlotId)
|
||||
{
|
||||
uint8 activeSlotIdx = activeSlotsIdx[requestId];
|
||||
return ActiveSlotId.wrap(
|
||||
keccak256(abi.encode(host, activeSlotIdx))
|
||||
);
|
||||
}
|
||||
|
||||
function _toLockId(RequestId requestId) internal pure returns (LockId) {
|
||||
return LockId.wrap(RequestId.unwrap(requestId));
|
||||
}
|
||||
|
|
|
@ -746,7 +746,7 @@ describe("Marketplace", function () {
|
|||
await marketplace.fillSlot(slot.request, slot.index, proof)
|
||||
let slot1 = { ...slot, index: slot.index + 1 }
|
||||
await marketplace.fillSlot(slot.request, slot1.index, proof)
|
||||
expect(await marketplace.mySlots()).to.deep.equal([
|
||||
expect(await marketplace.mySlots(slot.request)).to.deep.equal([
|
||||
slotId(slot),
|
||||
slotId(slot1),
|
||||
])
|
||||
|
@ -757,10 +757,34 @@ describe("Marketplace", function () {
|
|||
let slot1 = { ...slot, index: slot.index + 1 }
|
||||
await marketplace.fillSlot(slot.request, slot1.index, proof)
|
||||
await marketplace.freeSlot(slotId(slot))
|
||||
expect(await marketplace.mySlots()).to.deep.equal([slotId(slot1)])
|
||||
expect(await marketplace.mySlots(slot.request)).to.deep.equal([
|
||||
slotId(slot1),
|
||||
])
|
||||
})
|
||||
|
||||
it("removes all request's active slots when request fails", async function () {
|
||||
it("removes active slots for all hosts in a request when it fails", async function () {
|
||||
let halfOfSlots = request.ask.slots / 2
|
||||
|
||||
// fill half the slots with host1
|
||||
for (let i = 0; i < Math.floor(halfOfSlots); i++) {
|
||||
await marketplace.fillSlot(requestId(request), i, proof)
|
||||
}
|
||||
|
||||
// fill other half of slots with host2
|
||||
switchAccount(host2)
|
||||
await token.approve(marketplace.address, collateral)
|
||||
await marketplace.deposit(collateral)
|
||||
for (let i = Math.floor(halfOfSlots); i < request.ask.slots; i++) {
|
||||
await marketplace.fillSlot(requestId(request), i, proof)
|
||||
}
|
||||
|
||||
await waitUntilFailed(marketplace, request, slot)
|
||||
expect(await marketplace.mySlots(slot.request)).to.deep.equal([])
|
||||
switchAccount(host)
|
||||
expect(await marketplace.mySlots(slot.request)).to.deep.equal([])
|
||||
})
|
||||
|
||||
it("doesn't remove active slots for hosts in request that didn't fail", async function () {
|
||||
// start first request
|
||||
await waitUntilStarted(marketplace, request, proof)
|
||||
|
||||
|
@ -784,14 +808,19 @@ describe("Marketplace", function () {
|
|||
let id = slotId(expectedSlot)
|
||||
expected.push(id)
|
||||
}
|
||||
expect(await marketplace.mySlots()).to.deep.equal(expected.reverse())
|
||||
expect(await marketplace.mySlots(slot.request)).to.deep.equal([])
|
||||
expect(await marketplace.mySlots(expectedSlot.request)).to.deep.equal(
|
||||
expected
|
||||
)
|
||||
})
|
||||
|
||||
it("removes slots from list when request finishes", async function () {
|
||||
await waitUntilStarted(marketplace, request, proof)
|
||||
await waitUntilFinished(marketplace, requestId(request))
|
||||
await marketplace.payoutSlot(slot.request, slot.index)
|
||||
expect(await marketplace.mySlots()).to.not.contain(slotId(slot))
|
||||
expect(await marketplace.mySlots(slot.request)).to.not.contain(
|
||||
slotId(slot)
|
||||
)
|
||||
})
|
||||
})
|
||||
})
|
||||
|
|
Loading…
Reference in New Issue