1
+ import random
2
+ import math
3
+
4
+
5
+ """
6
+ 1: Logarithmic Weighted Random Selector
7
+ """
8
+ def lwrs (elements : list ):
9
+ """
10
+ Logarithmic Weighted Random Selector:
11
+
12
+ Gets a list of elements, and using a logarithmic scale,
13
+ selects randomly one member of the list giving a higher
14
+ probability weight to the initial elements of the list.
15
+
16
+ This algorithm has to select with more frecuency
17
+ the initial members of the list, based on the
18
+ logarithmic/exponential curve form.
19
+
20
+ Parameters:
21
+ - elements [list or iter]: List of elements in order of importance.
22
+ Returns:
23
+ - Random element selected from the input list.
24
+ """
25
+
26
+ #* Amount of elements: "n":
27
+ n = len (elements )
28
+
29
+ #* Validation cases:
30
+ if not hasattr (elements , "__iter__" ):
31
+ raise TypeError (f"\" elements\" has to be a list or iterable, but given { type (elements )} " )
32
+ if n == 0 :
33
+ raise ValueError ("\" elements\" is empty list. List" )
34
+ if n == 1 :
35
+ return elements [0 ]
36
+
37
+ #* Throws a random float number based on the n elements:
38
+ #? To operate the numbers, it will consider 1 as the
39
+ #? first element; then when it selects an index of the
40
+ #? list, it will take at 0-starting notation:
41
+ #/ Random float between 1 and n with 4 decimals.
42
+ #* This random represent a random point of selection
43
+ #* in a linear axis in which all elements has the same
44
+ #* space to be selected:
45
+ random_point = round (random .uniform (0 , n ), 4 )
46
+ # print(f"\nLineal: {random_point}; ", end="")
47
+
48
+ """
49
+ This is the propuse:
50
+
51
+ * Having a linear scale between 1 - n, all the spaces
52
+ between elements has the same size.
53
+
54
+ * Having a logarithmic scale between 1 - n, the spaces
55
+ between elements are different; the first element has
56
+ the biggest space, the second is smaller, etc. The
57
+ differences between the spaces is based on logarithms.
58
+
59
+ * When the random is selected, you get the linear scale
60
+ element; then it has to ponderate the space with the
61
+ logarithmic scale, to select the equivalent.
62
+
63
+ Example:
64
+ NOTE: E1, E2, ..., E6 are the 6 elements of the list.
65
+
66
+ LINEAR SCALE:
67
+ E1 E2 E3 E4 E5 E6
68
+ 0 1 2 3 4 5 6
69
+ |-------|-------|-------|-------|-------|-------|
70
+
71
+ RANDOM POINT:
72
+ *
73
+
74
+ LOGARITHMIC SCALE:
75
+ E1 E2 E3 E4 E5 E6
76
+ 0 1 2 3 4 5 6
77
+ |----------------|-----------|-------|----|---|-|
78
+
79
+ SELECTION:
80
+
81
+ * Linear: 3.24: [E4];
82
+ * Logarithmic: 3: [E2];
83
+
84
+ """
85
+
86
+ #?// If the random is 1; it takes at defect the first element:
87
+ # if random_point == 1:
88
+ # return 0
89
+
90
+ #? The formula to calculate the distance between the start of
91
+ #? the scale, and a randomLinear point, is:
92
+ #/ logPoint = n*log_n+1(linearPoint);
93
+ #/ The linearPoint has to be >= 2; and <= n+1:
94
+ #* So, it starts a loop to check if the random linear point is
95
+ #* between the space of the first log point, or the second, or
96
+ #* the third, etc.
97
+ for i in range (1 , n + 1 ):
98
+ if random_point <= n * math .log (i + 1 , n + 1 ):
99
+ return elements [i - 1 ]
100
+
101
+
102
+ """
103
+ 2: Exponential Weighted Random Selection
104
+ """
105
+ def ewrs (lista , base = 2 ):
106
+
107
+ #? Weights calcultation:
108
+ wieghts = [base ** i for i in range (len (lista ))]
109
+
110
+ #? Order inversion of the weights:
111
+ wieghts = wieghts [::- 1 ]
112
+ total_weights = sum (wieghts )
113
+
114
+ #? Normalization to 1:
115
+ probabilities = [weight / total_weights for weight in wieghts ]
116
+
117
+ #? Selection loop:
118
+ random_point = random .uniform (0 , 1 )
119
+ cumulative = 0
120
+ for i , probability in enumerate (probabilities ):
121
+ cumulative += probability
122
+ if cumulative >= random_point :
123
+ return lista [i ]
124
+
125
+
126
+ """
127
+ 3: Complexity Weighted Random Selector
128
+ """
129
+ def cwrs (elements : list , complexity : float ):
130
+ """
131
+ Select one element from the list "elements" based on a bias determined by the complexity value
132
+ The weight is given by a power law decay:
133
+ - w(i) = 1 / (i+1)^s
134
+ where:
135
+ - s = s_max * (1 - complexity/100)
136
+ - s_max = (n / (n*(complexity/100))) * ((complexity/100)+1)
137
+ - complexity is between 0 and 100
138
+
139
+ For complexity=0, s = n (maximum steepness) and the first element is nearly always selected
140
+ For complexity=1, s = 0 and all elements have weight 1 (uniform selection)
141
+ """
142
+ n = len (elements )
143
+ if complexity < 0 and complexity > 1 :
144
+ raise ValueError ("\" complexity\" must be between 0 and 100" )
145
+
146
+ if complexity == 0 :
147
+ s_max = n
148
+ else :
149
+ s_max = (n / (n * (complexity / 100 ))) * ((complexity / 100 )+ 1 )
150
+
151
+ s = s_max * (1 - complexity / 100.0 )
152
+
153
+ # Compute weights for each element using the power law:
154
+ weights = [1 / ((i + 1 ) ** s ) for i in range (n )]
155
+
156
+ # Normalize the weights to get a probability distribution:
157
+ total = sum (weights )
158
+ probabilities = [w / total for w in weights ]
159
+
160
+ return random .choices (elements , weights = probabilities , k = 1 )[0 ]
0 commit comments