-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathutilitarios.py
64 lines (47 loc) · 1.47 KB
/
utilitarios.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
import math
from collections import Counter
arr = [1,2,3,4,5,6,7,8,9,10,11,12,13,14,15]
arr2 = [0]*len(arr)
offset = 3
# Time: O(n)
# Space: O(2n)
def shiftArray(arr, offset):
arr2 = [0]*len(arr)
for i, num in enumerate(arr):
pos = (i+offset) % len(arr)
arr2[pos] = num
return arr2
print(arr)
print(arr2)
def min_entropy(bit_string):
probabilities = {}
for bit in bit_string:
if bit in probabilities:
probabilities[bit] += 1
else:
probabilities[bit] = 1
# Normalize counts to probabilities
length = len(bit_string)
max_prob = max(probabilities[bit] / length for bit in probabilities)
# Calculate min-entropy
import math
return -math.log2(max_prob)
# Example usage:
bit_string = "010010"
print(min_entropy(bit_string)) # Outputs the min-entropy
def calculate_entropy(s):
# Calculate frequency of each character
frequency = Counter(s)
total_chars = len(s)
# Calculate Shannon entropy
shannon_entropy = -sum((count / total_chars) * math.log2(count / total_chars) for count in frequency.values())
# Calculate pattern complexity
runs = 0
for i in range(1, len(s)):
if s[i] != s[i - 1]:
runs += 1
runs += 1 # Count the first run
# Incorporate pattern complexity into entropy
pattern_complexity = runs / total_chars
adjusted_entropy = shannon_entropy * (1 + pattern_complexity)
return adjusted_entropy