compute references for batch post
Gitea Actions Demo / Explore-Gitea-Actions (push) Failing after 29s
Details
Gitea Actions Demo / Explore-Gitea-Actions (push) Failing after 29s
Details
This commit is contained in:
parent
a81e637907
commit
3023408e04
|
@ -1,35 +0,0 @@
|
|||
const Promise = require('bluebird');
|
||||
const read = require('../../../util/forum/read');
|
||||
const { matrixPools } = require('../../../util/db');
|
||||
|
||||
const computeAuthorWeights = async (batchItems_) => {
|
||||
const weights = {};
|
||||
await Promise.each(batchItems_, async (postId) => {
|
||||
const post = await read(postId);
|
||||
const matrixPool = await matrixPools.get(postId);
|
||||
const { fee, result: { votePasses, quorumMet } } = matrixPool;
|
||||
post.authors.forEach(({ authorAddress, weightPPM }) => {
|
||||
if (votePasses && quorumMet) {
|
||||
weights[authorAddress] = weights[authorAddress] ?? 0;
|
||||
// scale by matrix pool outcome and strength
|
||||
weights[authorAddress] += weightPPM * fee;
|
||||
}
|
||||
// TODO: Rewards for policing
|
||||
// TODO: Propagation via references
|
||||
});
|
||||
});
|
||||
// Rescale author weights so they sum to 1000000
|
||||
const sumOfWeights = Object.values(weights).reduce((t, v) => t + v, 0);
|
||||
if (!sumOfWeights) {
|
||||
return [];
|
||||
}
|
||||
const scaledWeights = Object.values(weights)
|
||||
.map((weight) => Math.floor((weight * 1000000) / sumOfWeights));
|
||||
const sumOfScaledWeights = scaledWeights.reduce((t, v) => t + v, 0);
|
||||
scaledWeights[0] += 1000000 - sumOfScaledWeights;
|
||||
const authors = Object.keys(weights)
|
||||
.map((authorAddress, i) => ({ authorAddress, weightPPM: scaledWeights[i] }));
|
||||
return authors;
|
||||
};
|
||||
|
||||
module.exports = computeAuthorWeights;
|
|
@ -1,33 +0,0 @@
|
|||
// const { expect } = require('chai');
|
||||
const assert = require('assert');
|
||||
const proxyquire = require('proxyquire');
|
||||
|
||||
let posts = {};
|
||||
let pools = {};
|
||||
const read = (postId) => posts[postId];
|
||||
const matrixPools = {
|
||||
get: (postId) => pools[postId],
|
||||
};
|
||||
|
||||
const computeAuthorWeights = proxyquire('./compute-author-weights', {
|
||||
'../../util/forum/read': read,
|
||||
'../../util/db': { matrixPools },
|
||||
});
|
||||
|
||||
describe('computeAuthorWeights', () => {
|
||||
it('computes authorship for multiple posts by one author', async () => {
|
||||
posts = {
|
||||
a: { authors: [{ authorAddress: '0xa1', weightPPM: 1000000 }] },
|
||||
b: { authors: [{ authorAddress: '0xa1', weightPPM: 1000000 }] },
|
||||
c: { authors: [{ authorAddress: '0xa1', weightPPM: 1000000 }] },
|
||||
};
|
||||
pools = {
|
||||
a: { fee: 100, result: { votePasses: true, quorumMet: true } },
|
||||
b: { fee: 100, result: { votePasses: true, quorumMet: true } },
|
||||
c: { fee: 100, result: { votePasses: true, quorumMet: true } },
|
||||
};
|
||||
|
||||
const authors = await computeAuthorWeights(['a', 'b', 'c']);
|
||||
assert.deepEqual(authors, [{ authorAddress: '0xa1', weightPPM: 1000000 }]);
|
||||
});
|
||||
});
|
|
@ -0,0 +1,59 @@
|
|||
const Promise = require('bluebird');
|
||||
const read = require('../../../util/forum/read');
|
||||
const { matrixPools } = require('../../../util/db');
|
||||
|
||||
const WEIGHT_TO_REFERENCES = 300000;
|
||||
|
||||
const computeBatchPost = async (batchItems_) => {
|
||||
const weights = {};
|
||||
let references = [];
|
||||
await Promise.each(batchItems_, async (postId) => {
|
||||
const post = await read(postId);
|
||||
const matrixPool = await matrixPools.get(postId);
|
||||
const { fee, result: { votePasses, quorumMet } } = matrixPool;
|
||||
if (votePasses && quorumMet) {
|
||||
post.authors.forEach(({ authorAddress, weightPPM }) => {
|
||||
weights[authorAddress] = weights[authorAddress] ?? 0;
|
||||
// scale by matrix pool fee
|
||||
weights[authorAddress] += weightPPM * fee;
|
||||
});
|
||||
post.references?.forEach(({ targetPostId, weightPPM }) => {
|
||||
// scale by matrix pool fee
|
||||
references.push({
|
||||
targetPostId,
|
||||
weightPPM: weightPPM * fee,
|
||||
});
|
||||
});
|
||||
}
|
||||
// TODO: Rewards for policing
|
||||
});
|
||||
|
||||
// Rescale author weights so they sum to 1000000
|
||||
const sumOfWeights = Object.values(weights).reduce((t, v) => t + v, 0);
|
||||
if (!sumOfWeights) {
|
||||
return [];
|
||||
}
|
||||
const scaledWeights = Object.values(weights)
|
||||
.map((weight) => Math.floor((weight * 1000000) / sumOfWeights));
|
||||
const sumOfScaledWeights = scaledWeights.reduce((t, v) => t + v, 0);
|
||||
scaledWeights[0] += 1000000 - sumOfScaledWeights;
|
||||
const authors = Object.keys(weights)
|
||||
.map((authorAddress, i) => ({ authorAddress, weightPPM: scaledWeights[i] }));
|
||||
|
||||
// Rescale reference weights so they sum to WEIGHT_TO_REFERENCES
|
||||
if (references.length) {
|
||||
const sumOfReferenceWeights = references.reduce((t, { weightPPM }) => t + weightPPM, 0);
|
||||
const scaledReferences = references.map((reference) => ({
|
||||
targetPostId: reference.targetPostId,
|
||||
weightPPM: Math.floor((reference.weightPPM * WEIGHT_TO_REFERENCES) / sumOfReferenceWeights),
|
||||
}));
|
||||
const sumOfScaledReferenceWeights = scaledReferences
|
||||
.reduce((t, { weightPPM }) => t + weightPPM, 0);
|
||||
scaledReferences[0].weightPPM += WEIGHT_TO_REFERENCES - sumOfScaledReferenceWeights;
|
||||
references = scaledReferences;
|
||||
}
|
||||
|
||||
return { authors, references };
|
||||
};
|
||||
|
||||
module.exports = computeBatchPost;
|
|
@ -0,0 +1,128 @@
|
|||
// const { expect } = require('chai');
|
||||
const assert = require('assert');
|
||||
const proxyquire = require('proxyquire');
|
||||
|
||||
let posts = {};
|
||||
let pools = {};
|
||||
const read = (postId) => posts[postId];
|
||||
const matrixPools = {
|
||||
get: (postId) => pools[postId],
|
||||
};
|
||||
|
||||
const computeBatchPost = proxyquire('./compute-batch-post', {
|
||||
'../../../util/forum/read': read,
|
||||
'../../../util/db': { matrixPools },
|
||||
});
|
||||
|
||||
describe('computeBatchPost', () => {
|
||||
it('multiple posts by one author', async () => {
|
||||
posts = {
|
||||
a: { authors: [{ authorAddress: '0xa1', weightPPM: 1000000 }] },
|
||||
b: { authors: [{ authorAddress: '0xa1', weightPPM: 1000000 }] },
|
||||
c: { authors: [{ authorAddress: '0xa1', weightPPM: 1000000 }] },
|
||||
};
|
||||
pools = {
|
||||
a: { fee: 100, result: { votePasses: true, quorumMet: true } },
|
||||
b: { fee: 100, result: { votePasses: true, quorumMet: true } },
|
||||
c: { fee: 100, result: { votePasses: true, quorumMet: true } },
|
||||
};
|
||||
|
||||
const { authors, references } = await computeBatchPost(['a', 'b', 'c']);
|
||||
assert.deepEqual(authors, [{ authorAddress: '0xa1', weightPPM: 1000000 }]);
|
||||
assert.deepEqual(references, []);
|
||||
});
|
||||
|
||||
it('posts by different authors', async () => {
|
||||
posts = {
|
||||
a: { authors: [{ authorAddress: '0xa1', weightPPM: 1000000 }] },
|
||||
b: { authors: [{ authorAddress: '0xa2', weightPPM: 1000000 }] },
|
||||
};
|
||||
pools = {
|
||||
a: { fee: 100, result: { votePasses: true, quorumMet: true } },
|
||||
b: { fee: 100, result: { votePasses: true, quorumMet: true } },
|
||||
};
|
||||
|
||||
const { authors, references } = await computeBatchPost(['a', 'b']);
|
||||
assert.deepEqual(authors, [
|
||||
{ authorAddress: '0xa1', weightPPM: 500000 },
|
||||
{ authorAddress: '0xa2', weightPPM: 500000 },
|
||||
]);
|
||||
assert.deepEqual(references, []);
|
||||
});
|
||||
|
||||
it('posts by different authors and pools with different fees', async () => {
|
||||
posts = {
|
||||
a: { authors: [{ authorAddress: '0xa1', weightPPM: 1000000 }] },
|
||||
b: { authors: [{ authorAddress: '0xa2', weightPPM: 1000000 }] },
|
||||
};
|
||||
pools = {
|
||||
a: { fee: 100, result: { votePasses: true, quorumMet: true } },
|
||||
b: { fee: 200, result: { votePasses: true, quorumMet: true } },
|
||||
};
|
||||
|
||||
const { authors, references } = await computeBatchPost(['a', 'b']);
|
||||
assert.deepEqual(authors, [
|
||||
{ authorAddress: '0xa1', weightPPM: 333334 },
|
||||
{ authorAddress: '0xa2', weightPPM: 666666 },
|
||||
]);
|
||||
assert.deepEqual(references, []);
|
||||
});
|
||||
|
||||
it('posts with multiple authors', async () => {
|
||||
posts = {
|
||||
a: { authors: [{ authorAddress: '0xa1', weightPPM: 500000 }, { authorAddress: '0xa2', weightPPM: 500000 }] },
|
||||
b: { authors: [{ authorAddress: '0xa1', weightPPM: 500000 }, { authorAddress: '0xa3', weightPPM: 500000 }] },
|
||||
};
|
||||
pools = {
|
||||
a: { fee: 100, result: { votePasses: true, quorumMet: true } },
|
||||
b: { fee: 100, result: { votePasses: true, quorumMet: true } },
|
||||
};
|
||||
|
||||
const { authors, references } = await computeBatchPost(['a', 'b']);
|
||||
assert.deepEqual(authors, [
|
||||
{ authorAddress: '0xa1', weightPPM: 500000 },
|
||||
{ authorAddress: '0xa2', weightPPM: 250000 },
|
||||
{ authorAddress: '0xa3', weightPPM: 250000 },
|
||||
]);
|
||||
assert.deepEqual(references, []);
|
||||
});
|
||||
|
||||
it('post with references', async () => {
|
||||
posts = {
|
||||
a: { authors: [{ authorAddress: '0xa1', weightPPM: 1000000 }] },
|
||||
b: { authors: [{ authorAddress: '0xa2', weightPPM: 1000000 }], references: [{ targetPostId: 'a', weightPPM: 500000 }] },
|
||||
};
|
||||
pools = {
|
||||
b: { fee: 100, result: { votePasses: true, quorumMet: true } },
|
||||
};
|
||||
|
||||
const { authors, references } = await computeBatchPost(['b']);
|
||||
assert.deepEqual(authors, [
|
||||
{ authorAddress: '0xa2', weightPPM: 1000000 },
|
||||
]);
|
||||
assert.deepEqual(references, [{ targetPostId: 'a', weightPPM: 300000 }]);
|
||||
});
|
||||
|
||||
it('post with references and pools with different fees', async () => {
|
||||
posts = {
|
||||
a: { authors: [{ authorAddress: '0xa1', weightPPM: 1000000 }] },
|
||||
b: { authors: [{ authorAddress: '0xa2', weightPPM: 1000000 }] },
|
||||
c: { authors: [{ authorAddress: '0xa3', weightPPM: 1000000 }], references: [{ targetPostId: 'a', weightPPM: 500000 }] },
|
||||
d: { authors: [{ authorAddress: '0xa4', weightPPM: 1000000 }], references: [{ targetPostId: 'b', weightPPM: 500000 }] },
|
||||
};
|
||||
pools = {
|
||||
c: { fee: 100, result: { votePasses: true, quorumMet: true } },
|
||||
d: { fee: 200, result: { votePasses: true, quorumMet: true } },
|
||||
};
|
||||
|
||||
const { authors, references } = await computeBatchPost(['c', 'd']);
|
||||
assert.deepEqual(authors, [
|
||||
{ authorAddress: '0xa3', weightPPM: 333334 },
|
||||
{ authorAddress: '0xa4', weightPPM: 666666 },
|
||||
]);
|
||||
assert.deepEqual(references, [
|
||||
{ targetPostId: 'a', weightPPM: 100000 },
|
||||
{ targetPostId: 'b', weightPPM: 200000 },
|
||||
]);
|
||||
});
|
||||
});
|
|
@ -4,7 +4,7 @@ const write = require('../../../util/forum/write');
|
|||
const addPostWithRetry = require('../../../util/add-post-with-retry');
|
||||
const callWithRetry = require('../../../util/call-with-retry');
|
||||
const { getBatchItems, clearBatchItems } = require('./batch-items');
|
||||
const computeAuthorWeights = require('./compute-author-weights');
|
||||
const computeBatchPost = require('./compute-batch-post');
|
||||
const { wallet, rollup } = require('../../../util/contracts');
|
||||
const { sendMatrixEvent } = require('../../../matrix-bot');
|
||||
const { stakeRollupAvailability } = require('../utils');
|
||||
|
@ -29,12 +29,10 @@ const submitRollup = async () => {
|
|||
if (!batchItems.length) {
|
||||
return { batchItems: [] };
|
||||
}
|
||||
const authors = await computeAuthorWeights(batchItems);
|
||||
const { authors, references } = await computeBatchPost(batchItems);
|
||||
if (!authors.length) {
|
||||
return { batchItems: [] };
|
||||
}
|
||||
// TODO: Compute references as aggregate of the references of posts in the batch
|
||||
const references = [];
|
||||
const content = `Batch of ${batchItems.length} items`;
|
||||
const embeddedData = {
|
||||
batchItems,
|
||||
|
@ -58,7 +56,9 @@ const submitRollup = async () => {
|
|||
const poolDuration = 60;
|
||||
await callWithRetry(() => rollup.submitBatch(batchPostId, batchItems, poolDuration));
|
||||
// Send matrix event
|
||||
await sendMatrixEvent('io.dgov.rollup.submit', { batchPostId, batchItems, authors });
|
||||
await sendMatrixEvent('io.dgov.rollup.submit', {
|
||||
batchPostId, batchItems, authors, references,
|
||||
});
|
||||
// Clear the batch in preparation for next batch
|
||||
await clearBatchItems(batchItems);
|
||||
return {
|
||||
|
|
|
@ -10,13 +10,14 @@ const read = require('../../util/forum/read');
|
|||
const { availabilityStakeDuration } = require('./config');
|
||||
const {
|
||||
stakeRollupAvailability, authorsMatch, validatePost,
|
||||
referencesMatch,
|
||||
} = require('./utils');
|
||||
const computeMatrixPoolResult = require('./matrix-pools/compute-result');
|
||||
const { initializeBatchItems, addBatchItem, clearBatchItems } = require('./batch/batch-items');
|
||||
const { getCurrentBatchWorker, initializeBatchWorker } = require('./batch/batch-worker');
|
||||
const initiateMatrixPool = require('./matrix-pools/initiate');
|
||||
const { initiateMatrixPools } = require('./matrix-pools/initiate-matrix-pools');
|
||||
const computeAuthorWeights = require('./batch/compute-author-weights');
|
||||
const computeBatchPost = require('./batch/compute-batch-post');
|
||||
|
||||
const start = async () => {
|
||||
console.log('registering validation pool decider for rollup');
|
||||
|
@ -30,16 +31,16 @@ const start = async () => {
|
|||
if (!post.embeddedData?.batchItems) return false;
|
||||
|
||||
// Our task here is to check whether the posted result agrees with our own computations
|
||||
let expectedAuthors;
|
||||
try {
|
||||
expectedAuthors = await computeAuthorWeights(post.embeddedData.batchItems);
|
||||
const { authors, references } = await computeBatchPost(post.embeddedData.batchItems);
|
||||
const valid = authorsMatch(post.authors, authors)
|
||||
&& referencesMatch(post.references, references);
|
||||
console.log(`batch post ${pool.props.postId} is ${valid ? 'valid' : 'invalid'}`);
|
||||
return valid;
|
||||
} catch (e) {
|
||||
console.error('Error calculating batch post author weights', e);
|
||||
return null;
|
||||
}
|
||||
const valid = authorsMatch(post.authors, expectedAuthors);
|
||||
console.log(`batch post ${pool.props.postId} is ${valid ? 'valid' : 'invalid'}`);
|
||||
return valid;
|
||||
});
|
||||
|
||||
// Even if we're not the current batch worker, keep track of batch items
|
||||
|
@ -165,10 +166,13 @@ const start = async () => {
|
|||
}
|
||||
case 'io.dgov.rollup.submit': {
|
||||
// This should include the identifier of the on-chain validation pool
|
||||
const { batchPostId, batchItems, authors } = event.content;
|
||||
const {
|
||||
batchPostId, batchItems, authors, references,
|
||||
} = event.content;
|
||||
// Compare batch worker's result with ours to verify
|
||||
const expectedAuthors = await computeAuthorWeights(batchItems);
|
||||
if (!authorsMatch(authors, expectedAuthors)) {
|
||||
const { expectedAuthors, expectedReferences } = await computeBatchPost(batchItems);
|
||||
if (!authorsMatch(authors, expectedAuthors)
|
||||
|| !referencesMatch(references, expectedReferences)) {
|
||||
sendMatrixText(`Unexpected result for batch post ${batchPostId}`);
|
||||
}
|
||||
// Reset batchItems in preparation for next batch
|
||||
|
|
|
@ -24,6 +24,14 @@ const authorsMatch = async (authors, expectedAuthors) => {
|
|||
});
|
||||
};
|
||||
|
||||
const referencesMatch = async (references, expectedReferences) => {
|
||||
if (expectedReferences.length !== references.length) return false;
|
||||
return references.every(({ targetPostId, weightPPM }) => {
|
||||
const expectedReference = expectedReferences.find((x) => x.targetPostId === targetPostId);
|
||||
return weightPPM === expectedReference.weightPPM;
|
||||
});
|
||||
};
|
||||
|
||||
const validateWorkEvidence = async (sender, post) => {
|
||||
let valid = false;
|
||||
if (sender === work2.target) {
|
||||
|
@ -44,6 +52,7 @@ const validatePost = async (sender, post) => {
|
|||
module.exports = {
|
||||
stakeRollupAvailability,
|
||||
authorsMatch,
|
||||
referencesMatch,
|
||||
validateWorkEvidence,
|
||||
validatePost,
|
||||
};
|
||||
|
|
|
@ -266,7 +266,7 @@ Rather than submit every Post on-chain and conduct every Validation Pool on-chai
|
|||
|
||||
With this Rollup Post, we have the opportunity to attribute credit to multiple authors, with a weight assigned to each author.
|
||||
|
||||
The Rollup Post can weight authorship in accordance with the off-chain Validation Pools that have taken place. The off-chain system can fully model the Forum and Bench outlined in the [Requirements](./requirements.md) section. For demonstration purposes, our prototype makes some simplifying assumptions. Work Evidence Posts (WEV) are assumed to contain no references to prior Posts. In reality, we want WEV to be able to reference prior Posts, such as those representing policies of the DAO, prior work by other DAO members, prior art outside the DAO, and so on. So, a proper implementation of this system should account for these references.
|
||||
The Rollup Post should weight authorship in accordance with the off-chain Validation Pools that have taken place.
|
||||
|
||||
To achieve the Rollup requirements, the contract must do the following:
|
||||
|
||||
|
|
Loading…
Reference in New Issue