Skip to content

Commit

Permalink
deliveryLayerNeighbors -> contentDeliveryLayerNeighbors
Browse files Browse the repository at this point in the history
  • Loading branch information
teogeb committed Apr 22, 2024
1 parent 7600bcf commit ae475b1
Show file tree
Hide file tree
Showing 4 changed files with 7 additions and 7 deletions.
4 changes: 2 additions & 2 deletions src/crawler/Crawler.ts
Original file line number Diff line number Diff line change
Expand Up @@ -58,7 +58,7 @@ const createNodeInfoLogOutput = (nodeInfo: NodeInfo) => {
streamPartitions: nodeInfo.streamPartitions.map((sp: any) => ({
id: sp.id,
controlLayerNeighbors: sp.controlLayerNeighbors.map((n: PeerDescriptor) => getNodeIdFromPeerDescriptor(n)),
deliveryLayerNeighbors: sp.deliveryLayerNeighbors.map((n: PeerDescriptor) => getNodeIdFromPeerDescriptor(n))
contentDeliveryLayerNeighbors: sp.contentDeliveryLayerNeighbors.map((n: PeerDescriptor) => getNodeIdFromPeerDescriptor(n))
})),
version: nodeInfo.version
}
Expand Down Expand Up @@ -275,7 +275,7 @@ export class Crawler {
const streamPartitions = nodeInfo.streamPartitions.filter(
(sp) => StreamPartIDUtils.getStreamID(sp.id as StreamPartID) === payload.streamId
)
return (streamPartitions.map((sp) => sp.deliveryLayerNeighbors)).flat()
return (streamPartitions.map((sp) => sp.contentDeliveryLayerNeighbors)).flat()
}, `stream-${payload.streamId}-${Date.now()}`)
// TODO could add new nodes and neighbors to NodeRepository?
await this.analyzeStream(payload.streamId, payload.metadata, topology, this.subscribeGate!)
Expand Down
2 changes: 1 addition & 1 deletion src/crawler/Topology.ts
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,7 @@ export class Topology {
for (const info of infos) {
const streamPartNeighbors: Multimap<StreamPartID, DhtAddress> = new Multimap()
for (const streamPartitionInfo of info.streamPartitions) {
const neighbors = streamPartitionInfo.deliveryLayerNeighbors
const neighbors = streamPartitionInfo.contentDeliveryLayerNeighbors
.map((n) => getNodeIdFromPeerDescriptor(n))
.filter((id) => nodeIds.has(id))
streamPartNeighbors.addAll(StreamPartIDUtils.parse(streamPartitionInfo.id), neighbors)
Expand Down
4 changes: 2 additions & 2 deletions test/Crawler.test.ts
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,7 @@ describe('Crawler', () => {
streamPartitions: [{
id: STREAM_PART_ID,
controlLayerNeighbors: [],
deliveryLayerNeighbors: neighbors.get(getNodeIdFromPeerDescriptor(peerDescriptor)) ?? []
contentDeliveryLayerNeighbors: neighbors.get(getNodeIdFromPeerDescriptor(peerDescriptor)) ?? []
}],
version: ''
}
Expand Down Expand Up @@ -58,7 +58,7 @@ describe('Crawler', () => {
const topology = await crawlTopology(
localNode as any,
[nodes[0], nodes[5]],
(response: NodeInfo) => response.streamPartitions[0].deliveryLayerNeighbors,
(response: NodeInfo) => response.streamPartitions[0].contentDeliveryLayerNeighbors,
''
)
expect(localNode.fetchNodeInfo).toHaveBeenCalledTimes(nodes.length)
Expand Down
4 changes: 2 additions & 2 deletions test/Topology.test.ts
Original file line number Diff line number Diff line change
Expand Up @@ -16,13 +16,13 @@ describe('Topology', () => {
peerDescriptor: nodes[0],
streamPartitions: [{
id: STREAM_PART_ID_1,
deliveryLayerNeighbors: [nodes[1], nodes[2]]
contentDeliveryLayerNeighbors: [nodes[1], nodes[2]]
}]
}, {
peerDescriptor: nodes[2],
streamPartitions: [{
id: STREAM_PART_ID_2,
deliveryLayerNeighbors: [nodes[0], nodes[1], nodes[2]]
contentDeliveryLayerNeighbors: [nodes[0], nodes[1], nodes[2]]
}]
}] as any)
expect([...topology.getNeighbors(getNodeIdFromPeerDescriptor(nodes[0]), STREAM_PART_ID_1)]).toIncludeSameMembers([
Expand Down

0 comments on commit ae475b1

Please sign in to comment.