I've just discovered the hashpspool setting and found that it is set to false on all of my pools.
I can't really work out what this setting does though.On 11 November 2015 at 14:51, James Eckersall <james.eckersall@xxxxxxxxx> wrote:
The two new OSD nodes (07 and 08) are completely stuffed full of data, despite having adjusted (reweight-by-utilization) the osd weights to eventually less than 0.1.The issue I'm having is the distribution of data.The crush map is fairly standard and I've included it at the bottom of this mail.We keep 3 replicas of all data and have 5 pools.RACK-4 - OSD servers 7 and 8 (The new servers)RACK-3 - OSD server 2 and 4RACK-2 - OSD server 3 and 6RACK-1 - OSD server 1 and 5The servers are organised into racks.New servers recently added:6x OSD servers with 32 4TB SAS drives. The drives are configured with RAID0 in pairs, so 16 8TB OSD's per node.The cluster nodes look like as follows:Hi,I have a Ceph cluster running on 0.80.10 and I'm having problems with the data balancing on two new nodes that were recently added.
2x OSD servers with 20 4TB SAS drives. These are configured as 20 individual OSD's per node.
I've included the "ceph osd tree" output at the bottom of the mail too.I'm sure this is related to how it's picking the racks, but can't work out what I should change the map to, to make it more even.I'd appreciate any advice on what is going on here and how to resolve it.ThanksJ
CRUSH MAP:
# begin crush map
tunable choose_local_tries 0
tunable choose_local_fallback_tries 0
tunable choose_total_tries 50
tunable chooseleaf_descend_once 1
tunable chooseleaf_vary_r 1
tunable straw_calc_version 1
# devices
device 0 osd.0
device 1 osd.1
device 2 osd.2
device 3 osd.3
device 4 osd.4
device 5 osd.5
device 6 osd.6
device 7 osd.7
device 8 osd.8
device 9 osd.9
device 10 osd.10
device 11 osd.11
device 12 osd.12
device 13 osd.13
device 14 osd.14
device 15 osd.15
device 16 osd.16
device 17 osd.17
device 18 osd.18
device 19 osd.19
device 20 osd.20
device 21 osd.21
device 22 osd.22
device 23 osd.23
device 24 osd.24
device 25 osd.25
device 26 osd.26
device 27 osd.27
device 28 osd.28
device 29 osd.29
device 30 osd.30
device 31 osd.31
device 32 osd.32
device 33 osd.33
device 34 osd.34
device 35 osd.35
device 36 osd.36
device 37 osd.37
device 38 osd.38
device 39 osd.39
device 40 osd.40
device 41 osd.41
device 42 osd.42
device 43 osd.43
device 44 osd.44
device 45 osd.45
device 46 osd.46
device 47 osd.47
device 48 osd.48
device 49 osd.49
device 50 osd.50
device 51 osd.51
device 52 osd.52
device 53 osd.53
device 54 osd.54
device 55 osd.55
device 56 osd.56
device 57 osd.57
device 58 osd.58
device 59 osd.59
device 60 osd.60
device 61 osd.61
device 62 osd.62
device 63 osd.63
device 64 osd.64
device 65 osd.65
device 66 osd.66
device 67 osd.67
device 68 osd.68
device 69 osd.69
device 70 osd.70
device 71 osd.71
device 72 osd.72
device 73 osd.73
device 74 osd.74
device 75 osd.75
device 76 osd.76
device 77 osd.77
device 78 osd.78
device 79 osd.79
device 80 osd.80
device 81 osd.81
device 82 osd.82
device 83 osd.83
device 84 osd.84
device 85 osd.85
device 86 osd.86
device 87 osd.87
device 88 osd.88
device 89 osd.89
device 90 osd.90
device 91 osd.91
device 92 osd.92
device 93 osd.93
device 94 osd.94
device 95 osd.95
device 96 osd.96
device 97 osd.97
device 98 osd.98
device 99 osd.99
device 100 osd.100
device 101 osd.101
device 102 osd.102
device 103 osd.103
device 104 osd.104
device 105 osd.105
device 106 osd.106
device 107 osd.107
device 108 osd.108
device 109 osd.109
device 110 osd.110
device 111 osd.111
device 112 osd.112
device 113 osd.113
device 114 osd.114
device 115 osd.115
device 116 osd.116
device 117 osd.117
device 118 osd.118
device 119 osd.119
device 120 osd.120
device 121 osd.121
device 122 osd.122
device 123 osd.123
device 124 osd.124
device 125 osd.125
device 126 osd.126
device 127 osd.127
device 128 osd.128
device 129 osd.129
device 130 osd.130
device 131 osd.131
device 132 osd.132
device 133 osd.133
device 134 osd.134
device 135 osd.135
# types
type 0 osd
type 1 host
type 2 rack
type 3 row
type 4 room
type 5 datacenter
type 6 root
# buckets
host OSD-01 {
id -2 # do not change unnecessarily
# weight 116.320
alg straw
hash 0 # rjenkins1
item osd.0 weight 7.270
item osd.1 weight 7.270
item osd.3 weight 7.270
item osd.5 weight 7.270
item osd.6 weight 7.270
item osd.7 weight 7.270
item osd.8 weight 7.270
item osd.10 weight 7.270
item osd.11 weight 7.270
item osd.12 weight 7.270
item osd.13 weight 7.270
item osd.15 weight 7.270
item osd.16 weight 7.270
item osd.17 weight 7.270
item osd.18 weight 7.270
item osd.2 weight 7.270
}
host OSD-05 {
id -6 # do not change unnecessarily
# weight 116.320
alg straw
hash 0 # rjenkins1
item osd.80 weight 7.270
item osd.82 weight 7.270
item osd.83 weight 7.270
item osd.92 weight 7.270
item osd.93 weight 7.270
item osd.95 weight 7.270
item osd.84 weight 7.270
item osd.85 weight 7.270
item osd.86 weight 7.270
item osd.88 weight 7.270
item osd.89 weight 7.270
item osd.90 weight 7.270
item osd.91 weight 7.270
item osd.63 weight 7.270
item osd.62 weight 7.270
item osd.65 weight 7.270
}
rack RACK-1 {
id -8 # do not change unnecessarily
# weight 232.640
alg straw
hash 0 # rjenkins1
item OSD-01 weight 116.320
item OSD-05 weight 116.320
}
host OSD-03 {
id -4 # do not change unnecessarily
# weight 116.320
alg straw
hash 0 # rjenkins1
item osd.4 weight 7.270
item osd.40 weight 7.270
item osd.41 weight 7.270
item osd.42 weight 7.270
item osd.28 weight 7.270
item osd.29 weight 7.270
item osd.43 weight 7.270
item osd.44 weight 7.270
item osd.33 weight 7.270
item osd.34 weight 7.270
item osd.49 weight 7.270
item osd.50 weight 7.270
item osd.38 weight 7.270
item osd.39 weight 7.270
item osd.55 weight 7.270
item osd.56 weight 7.270
}
host OSD-06 {
id -7 # do not change unnecessarily
# weight 116.320
alg straw
hash 0 # rjenkins1
item osd.96 weight 7.270
item osd.97 weight 7.270
item osd.98 weight 7.270
item osd.99 weight 7.270
item osd.100 weight 7.270
item osd.101 weight 7.270
item osd.102 weight 7.270
item osd.103 weight 7.270
item osd.107 weight 7.270
item osd.108 weight 7.270
item osd.110 weight 7.270
item osd.111 weight 7.270
item osd.104 weight 7.270
item osd.105 weight 7.270
item osd.106 weight 7.270
item osd.61 weight 7.270
}
rack RACK-2 {
id -9 # do not change unnecessarily
# weight 232.640
alg straw
hash 0 # rjenkins1
item OSD-03 weight 116.320
item OSD-06 weight 116.320
}
host OSD-02 {
id -3 # do not change unnecessarily
# weight 116.320
alg straw
hash 0 # rjenkins1
item osd.20 weight 7.270
item osd.21 weight 7.270
item osd.22 weight 7.270
item osd.23 weight 7.270
item osd.9 weight 7.270
item osd.25 weight 7.270
item osd.26 weight 7.270
item osd.27 weight 7.270
item osd.14 weight 7.270
item osd.30 weight 7.270
item osd.31 weight 7.270
item osd.32 weight 7.270
item osd.19 weight 7.270
item osd.35 weight 7.270
item osd.36 weight 7.270
item osd.37 weight 7.270
}
host OSD-04 {
id -5 # do not change unnecessarily
# weight 116.320
alg straw
hash 0 # rjenkins1
item osd.24 weight 7.270
item osd.64 weight 7.270
item osd.72 weight 7.270
item osd.77 weight 7.270
item osd.45 weight 7.270
item osd.46 weight 7.270
item osd.47 weight 7.270
item osd.48 weight 7.270
item osd.51 weight 7.270
item osd.52 weight 7.270
item osd.53 weight 7.270
item osd.54 weight 7.270
item osd.57 weight 7.270
item osd.58 weight 7.270
item osd.59 weight 7.270
item osd.60 weight 7.270
}
rack RACK-3 {
id -10 # do not change unnecessarily
# weight 232.640
alg straw
hash 0 # rjenkins1
item OSD-02 weight 116.320
item OSD-04 weight 116.320
}
host OSD-07 {
id -11 # do not change unnecessarily
# weight 72.800
alg straw
hash 0 # rjenkins1
item osd.66 weight 3.640
item osd.67 weight 3.640
item osd.68 weight 3.640
item osd.69 weight 3.640
item osd.70 weight 3.640
item osd.71 weight 3.640
item osd.94 weight 3.640
item osd.109 weight 3.640
item osd.112 weight 3.640
item osd.113 weight 3.640
item osd.114 weight 3.640
item osd.115 weight 3.640
item osd.120 weight 3.640
item osd.121 weight 3.640
item osd.122 weight 3.640
item osd.123 weight 3.640
item osd.128 weight 3.640
item osd.129 weight 3.640
item osd.130 weight 3.640
item osd.131 weight 3.640
}
host OSD-08 {
id -12 # do not change unnecessarily
# weight 72.800
alg straw
hash 0 # rjenkins1
item osd.73 weight 3.640
item osd.74 weight 3.640
item osd.75 weight 3.640
item osd.76 weight 3.640
item osd.78 weight 3.640
item osd.79 weight 3.640
item osd.81 weight 3.640
item osd.87 weight 3.640
item osd.116 weight 3.640
item osd.117 weight 3.640
item osd.118 weight 3.640
item osd.119 weight 3.640
item osd.124 weight 3.640
item osd.125 weight 3.640
item osd.126 weight 3.640
item osd.127 weight 3.640
item osd.132 weight 3.640
item osd.133 weight 3.640
item osd.134 weight 3.640
item osd.135 weight 3.640
}
rack RACK-4 {
id -13 # do not change unnecessarily
# weight 145.600
alg straw
hash 0 # rjenkins1
item OSD-07 weight 72.800
item OSD-08 weight 72.800
}
root default {
id -1 # do not change unnecessarily
# weight 843.519
alg straw
hash 0 # rjenkins1
item RACK-1 weight 232.640
item RACK-2 weight 232.640
item RACK-3 weight 232.640
item RACK-4 weight 145.600
}
# rules
rule data {
ruleset 0
type replicated
min_size 1
max_size 10
step take default
step choose firstn 3 type rack
step chooseleaf firstn 1 type host
step emit
}
# end crush mapCEPH OSD TREE:
# id weight type name up/down reweight
-1 843.5 root default
-8 232.6 rack RACK-1
-2 116.3 host OSD-01
0 7.27 osd.0 up 1
1 7.27 osd.1 up 1
3 7.27 osd.3 up 1
5 7.27 osd.5 up 1
6 7.27 osd.6 up 1
7 7.27 osd.7 up 1
8 7.27 osd.8 up 1
10 7.27 osd.10 up 1
11 7.27 osd.11 up 1
12 7.27 osd.12 up 1
13 7.27 osd.13 up 1
15 7.27 osd.15 up 1
16 7.27 osd.16 up 1
17 7.27 osd.17 up 1
18 7.27 osd.18 up 1
2 7.27 osd.2 up 1
-6 116.3 host OSD-05
80 7.27 osd.80 up 1
82 7.27 osd.82 up 1
83 7.27 osd.83 up 1
92 7.27 osd.92 up 1
93 7.27 osd.93 up 1
95 7.27 osd.95 up 1
84 7.27 osd.84 up 1
85 7.27 osd.85 up 1
86 7.27 osd.86 up 1
88 7.27 osd.88 up 1
89 7.27 osd.89 up 1
90 7.27 osd.90 up 1
91 7.27 osd.91 up 1
63 7.27 osd.63 up 1
62 7.27 osd.62 up 1
65 7.27 osd.65 up 1
-9 232.6 rack RACK-2
-4 116.3 host OSD-03
4 7.27 osd.4 up 1
40 7.27 osd.40 up 1
41 7.27 osd.41 up 1
42 7.27 osd.42 up 1
28 7.27 osd.28 up 1
29 7.27 osd.29 up 1
43 7.27 osd.43 up 1
44 7.27 osd.44 up 1
33 7.27 osd.33 up 1
34 7.27 osd.34 up 0.8302
49 7.27 osd.49 up 1
50 7.27 osd.50 up 1
38 7.27 osd.38 up 1
39 7.27 osd.39 up 1
55 7.27 osd.55 up 1
56 7.27 osd.56 up 1
-7 116.3 host OSD-06
96 7.27 osd.96 up 1
97 7.27 osd.97 up 1
98 7.27 osd.98 up 1
99 7.27 osd.99 up 1
100 7.27 osd.100 up 1
101 7.27 osd.101 up 1
102 7.27 osd.102 up 1
103 7.27 osd.103 up 1
107 7.27 osd.107 up 1
108 7.27 osd.108 up 1
110 7.27 osd.110 up 1
111 7.27 osd.111 up 1
104 7.27 osd.104 up 1
105 7.27 osd.105 up 1
106 7.27 osd.106 up 1
61 7.27 osd.61 up 1
-10 232.6 rack RACK-3
-3 116.3 host OSD-02
20 7.27 osd.20 up 1
21 7.27 osd.21 up 1
22 7.27 osd.22 up 1
23 7.27 osd.23 up 1
9 7.27 osd.9 up 1
25 7.27 osd.25 up 1
26 7.27 osd.26 up 1
27 7.27 osd.27 up 1
14 7.27 osd.14 up 1
30 7.27 osd.30 up 1
31 7.27 osd.31 up 1
32 7.27 osd.32 up 1
19 7.27 osd.19 up 1
35 7.27 osd.35 up 1
36 7.27 osd.36 up 0.7939
37 7.27 osd.37 up 0.8274
-5 116.3 host OSD-04
24 7.27 osd.24 up 1
64 7.27 osd.64 up 1
72 7.27 osd.72 up 1
77 7.27 osd.77 up 1
45 7.27 osd.45 up 1
46 7.27 osd.46 up 1
47 7.27 osd.47 up 1
48 7.27 osd.48 up 1
51 7.27 osd.51 up 1
52 7.27 osd.52 up 1
53 7.27 osd.53 up 1
54 7.27 osd.54 up 1
57 7.27 osd.57 up 1
58 7.27 osd.58 up 1
59 7.27 osd.59 up 1
60 7.27 osd.60 up 1
-13 145.6 rack RACK-4
-11 72.8 host OSD-07
66 3.64 osd.66 up 0.05103
67 3.64 osd.67 up 0.04135
68 3.64 osd.68 up 0.04774
69 3.64 osd.69 up 0.07388
70 3.64 osd.70 up 0.04306
71 3.64 osd.71 up 0.07347
94 3.64 osd.94 up 0.06604
109 3.64 osd.109 up 0.03926
112 3.64 osd.112 up 0.04701
113 3.64 osd.113 up 0.05846
114 3.64 osd.114 up 0.03975
115 3.64 osd.115 up 0.03624
120 3.64 osd.120 up 0.03993
121 3.64 osd.121 up 0.04449
122 3.64 osd.122 up 0.05862
123 3.64 osd.123 up 0.06563
128 3.64 osd.128 up 0.05992
129 3.64 osd.129 up 0.04529
130 3.64 osd.130 up 0.05048
131 3.64 osd.131 up 0.04294
-12 72.8 host OSD-08
73 3.64 osd.73 up 0.05293
74 3.64 osd.74 up 0.05865
75 3.64 osd.75 up 0.04553
76 3.64 osd.76 up 0.03278
78 3.64 osd.78 up 0.05333
79 3.64 osd.79 up 0.05684
81 3.64 osd.81 up 0.05049
87 3.64 osd.87 up 0.03659
116 3.64 osd.116 up 0.0403
117 3.64 osd.117 up 0.06807
118 3.64 osd.118 up 0.04045
119 3.64 osd.119 up 0.03661
124 3.64 osd.124 up 0.05888
125 3.64 osd.125 up 0.05049
126 3.64 osd.126 up 0.05513
127 3.64 osd.127 up 0.04015
132 3.64 osd.132 up 0.05896
133 3.64 osd.133 up 0.0531
134 3.64 osd.134 up 0.07033
135 3.64 osd.135 up 0.04817
_______________________________________________ ceph-users mailing list ceph-users@xxxxxxxxxxxxxx http://lists.ceph.com/listinfo.cgi/ceph-users-ceph.com