Added a calculation for elasticity of bitcoin transaction fees

This commit is contained in:
Vitalik Buterin 2017-06-05 07:20:10 -04:00
parent 2ce0859ef5
commit c4f945077b
7 changed files with 91 additions and 18 deletions

View File

@ -106,7 +106,11 @@ The commit-following part of this rule can be viewed in some ways as mirroring t
### Adding Dynamic Validator Sets
The above assumes that there is a single set of validators that never changes. In reality, however, we want validators to be able to join and leave.
The above assumes that there is a single set of validators that never changes. In reality, however, we want validators to be able to join and leave. This introduces two classes of considerations. First, all of the above math assumes that the validator set that prepares and commits any two given checkpoints is identical. If this is not the case, then there may be situations where two conflicting checkpoints get finalized, but no one can be slashed because they were finalized by two completely different validator sets. Hence, we need to think carefully about how validator set transitions happen and how one validator set "passes the baton" to the next.
<copy text here>
Second, we need to establish what is the specific mechanism by which validators can join and leave. We start off with a simple one: validators apply to join the validator set by sending a transaction containing (i) the ETH they want to deposit, (ii) the "validation code" (a kind of generalized public key), and (iii) the return address that their deposit will be sent to when they withdraw. If this transaction gets included during dynasty N, then they become part of dynasty N + 2, as well as all future dynasties until they decide to log off. The two-dynasty delay ensures that the joining transaction will be confirmed by the time dynasty N + 1 begins, and so any candidate blocks that initiates dynasty N + 2 is guaranteed to have the same validator set for dynasty N + 2. Leaving the validator set is symmetrical: validators can send a transcation to log off in dynasty N, which takes effect in dynasty N + 2.
### Economic Fundamentals

36
elasticity/analyzer.py Normal file
View File

@ -0,0 +1,36 @@
lines = open('data.csv').read().split('\n')
data = [(int(x[:x.find(',')]), float(x[x.find(',')+1:])) for x in lines if x]
REPORT_THRESHOLD = 0.23
def get_error(scale, elasticity, growth):
err = 0
bs_fac, fee_fac = 1 / (1 + elasticity), elasticity / (1 + elasticity)
for i, (block_size, avg_fee) in enumerate(data):
expected = scale * (1 + growth) ** i
actual = block_size ** bs_fac * avg_fee ** fee_fac
# if i >= len(data) - 6:
# err += ((expected / actual - 1) ** 2) * 2
err += (expected / actual - 1) ** 2
return err
best = (0, 0, 0, 9999999999999999999999999.0)
for scale in [1 * 1.05 ** x for x in range(300)]:
for elasticity in [x*0.025 for x in range(120)]:
for growth in [x*0.001 for x in range(120)]:
err = get_error(scale, elasticity, growth)
if err <= REPORT_THRESHOLD:
print('%d %.3f %.3f: %.3f' % (scale, elasticity, growth, err))
if err < best[-1]:
best = scale, elasticity, growth, err
print('Best params: %d %.3f %.3f (err %.3f)' % best)
scale, elasticity, growth, err = best
bs_fac, fee_fac = 1 / (1 + elasticity), elasticity / (1 + elasticity)
for i, (block_size, avg_fee) in enumerate(data):
expected = scale * (1 + growth) ** i
actual = block_size ** bs_fac * avg_fee ** fee_fac
print(i, actual, expected)

28
elasticity/data.csv Normal file
View File

@ -0,0 +1,28 @@
358373,0.0001556
373527,0.00014486
387199,0.00014816
404860,0.00017626
392748,0.00015425
454844,0.00016187
594453,0.0002212
463351,0.00021871
555749,0.00019946
543490,0.00019368
566810,0.00017566
623697,0.00017407
662227,0.00019356
732706,0.00020016
715370,0.00022996
743867,0.00025096
769594,0.00022846
820848,0.00027225
785600,0.00027909
777843,0.00030502
749121,0.00027106
827073,0.00031194
863884,0.00032692
873975,0.00035117
864976,0.00040644
949126,0.00054382
955451,0.00080043
957912,0.00076301
1 358373 0.0001556
2 373527 0.00014486
3 387199 0.00014816
4 404860 0.00017626
5 392748 0.00015425
6 454844 0.00016187
7 594453 0.0002212
8 463351 0.00021871
9 555749 0.00019946
10 543490 0.00019368
11 566810 0.00017566
12 623697 0.00017407
13 662227 0.00019356
14 732706 0.00020016
15 715370 0.00022996
16 743867 0.00025096
17 769594 0.00022846
18 820848 0.00027225
19 785600 0.00027909
20 777843 0.00030502
21 749121 0.00027106
22 827073 0.00031194
23 863884 0.00032692
24 873975 0.00035117
25 864976 0.00040644
26 949126 0.00054382
27 955451 0.00080043
28 957912 0.00076301

BIN
elasticity/data.png Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 14 KiB

View File

@ -1,12 +1,12 @@
import random
import datetime
diffs = [453.71 * 10**12]
hashpower = diffs[0] / 15.73
times = [1495425834]
diffs = [512.60 * 10**12]
hashpower = diffs[0] / 15.50
times = [1496227377]
for i in range(3746966, 6010000):
for i in range(3797763, 6010000):
blocktime = random.expovariate(hashpower / diffs[-1])
adjfac = max(1 - int(blocktime / 10), -99) / 2048.
newdiff = diffs[-1] * (1 + adjfac)

View File

@ -1,25 +1,30 @@
data = [[float(y) for y in x.strip().split(', ')] for x in open('block_datadump.csv').readlines()]
for i in range(0, 2283416, 200000):
print 'Checking 200k blocks from %d' % i
print('Checking 200k blocks from %d' % i)
dataset = []
totuncles, totuncreward = 0, 0
totbs = [0 for j in range(40)]
totus = [0 for j in range(40)]
for num, uncs, uncrew, uncgas, txs, gas, length, zeroes in data[i:i+200000]:
dataset.append([gas, 0])
for i in range(int(uncs)):
dataset.append([uncgas / uncs * 1.0, 1])
totuncles += uncs
totuncreward += uncrew
print 'Average uncle reward:', totuncreward * 1.0 / totuncles
print 'Average nephew reward:', totuncles * 5 / 32. / len(dataset)
totus[int(gas / 100000)] += uncs
totbs[int(gas / 100000)] += 1
print([totus[j] * 100.0 / (totbs[j] + 0.000000001) for j in range(40)])
print('Average uncle reward:', totuncreward * 1.0 / totuncles)
print('Average nephew reward:', totuncles * 5 / 32. / len(dataset))
mean_x = sum([x[0] for x in dataset]) * 1.0 / len(dataset)
mean_y = sum([x[1] for x in dataset]) * 1.0 / len(dataset)
print 'Average gas used:', mean_x
print 'Average uncle rate:', mean_y
print('Average gas used:', mean_x)
print('Average uncle rate:', mean_y)
covar = sum([(x[0] - mean_x) * (x[1] - mean_y) for x in dataset])
var = sum([(x[0] - mean_x) ** 2 for x in dataset])
print 'm = ', covar / var
print 'b = ', mean_y - mean_x * (covar / var)
print('m = ', covar / var)
print('b = ', mean_y - mean_x * (covar / var))

View File

@ -1,7 +1,7 @@
data = [[float(y) for y in x.strip().split(', ')] for x in open('block_datadump.csv').readlines()]
for i in range(0, 2283416, 200000):
print 'Checking 200k blocks from %d' % i
print('Checking 200k blocks from %d' % i)
dataset = []
for j in range(i, min(i + 200000, 2283400), 100):
gas = 0
@ -35,8 +35,8 @@ for i in range(0, 2283416, 200000):
var2 = sum([(x[2] - mean_x2) ** 2 for x in dataset])
covar3 = sum([(x[3] - mean_x2) * (x[-1] - mean_y2) for x in dataset])
var3 = sum([(x[3] - mean_x2) ** 2 for x in dataset])
print 'Base m =', covar / var
print 'Base b =', mean_y - mean_x * (covar / var)
print 'Excess m for txs=', covar1 / var1
print 'Excess m for nonzero bytes=', covar2 / var2
print 'Excess m for zero bytes=', covar3 / var3
print('Base m =', covar / var)
print('Base b =', mean_y - mean_x * (covar / var))
print('Excess m for txs=', covar1 / var1)
print('Excess m for nonzero bytes=', covar2 / var2)
print('Excess m for zero bytes=', covar3 / var3)