forked from JoepdeJong/special-topic-cse
-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathanalyse_price.py
111 lines (80 loc) · 2.28 KB
/
analyse_price.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
import numpy as np
import networkx as nx
from read_datasets.entropy import *
from read_datasets.hierarchy import *
import matplotlib.pyplot as plt
from models.prices_model import prices_model
n = 20 # Number of nodes
n_iterations = 5 # Number of iterations
m = 3 # Number of edges
k0 = 1 # Initial factor
initial_nodes = 1
reciprocal_threshold = np.linspace(0, 1, 100)
# Make a logaritmic scale of probabilities closer to 0
# reciprocal_threshold = np.logspace(-2, 0, num=100)
tol = 1e-10 # Entropy tolerance
# Set random seed
# np.random.seed(0)
data = np.zeros((len(reciprocal_threshold), 5))
for i in range(len(reciprocal_threshold)):
p = reciprocal_threshold[i]
avg_df = 0
avg_h1 = 0
avg_h2 = 0
avg_h3 = 0
for k in range(n_iterations):
graph = prices_model(n, m, k0, initial_nodes, p)
A = nx.adjacency_matrix(graph)
A = A.todense()
# df = Henrici(A)
# h1 = entropy(tol, A)
h2 = entropy2(A).real
h3 = h2/entropy2((A+A.T)/2).real
spectral_scaling, spectral_gap = spectralScalingMeasure(A)
print(spectral_scaling, spectral_gap)
# avg_df += df
# avg_h1 += h1
avg_h2 += h2
avg_h3 += h3
# Distance number to leader node
avg_df /= n_iterations
# avg_h1 /= n_iterations
avg_h2 /= n_iterations
avg_h3 /= n_iterations
# print(p, avg_h2)
data[i,:] = [p, avg_df, avg_h1, avg_h2, avg_h3]
# Plot the Henrici against the reciprocal probability
print(data[:, 4])
plt.figure()
x = data[:,0]
y = data[:,1]
plt.plot(x, y, 'o')
plt.xlabel('Reciprocal probability')
plt.ylabel('Henrici')
plt.figure()
# Plot the entropy against the reciprocal probability
x = data[:,0]
y = data[:,3]
plt.plot(x, y, 'o')
plt.xlabel('Reciprocal probability')
plt.ylabel('Entropy')
plt.figure()
# Plot the entropy against the reciprocal probability
x = data[:,0]
y = data[:,3]
plt.plot(x, y, 'o')
# Plot the y-axis on a log scale
plt.xscale('log')
plt.xlabel('Reciprocal probability')
plt.ylabel('Entropy')
plt.figure()
# Plot the entropy against the reciprocal probability
x = data[:,0]
y = data[:,4]
plt.plot(x, y, 'o')
# Plot the y-axis on a log scale
plt.xscale('log')
plt.xlabel('Reciprocal probability')
plt.ylabel('Entropy')
plt.show()
# TODO: make henrici thing