Skip to content

Commit 5016647

Browse files
FR84: Add "allocation" and "disable" thresholds per storage pool (apache#526)
Introduces multiple scope configuration parameters by changing the column `scope` of table `configuration` from string to bitmask. Following configuration parameters can now be configured at a Storage Pool scope in addition to the Zone scope. 1. **pool.storage.capacity.disablethreshold**: Percentage (as a value between 0 and 1) of storage utilization or used space relative to the total storage capacity above which the storage pool is disabled for new allocations. migration and resize operations. 2. **pool.storage.allocated.capacity.disablethreshold**: Percentage (as a value between 0 and 1) of allocated storage space relative to total overprovisioned storage capacity above which the storage pool is disabled for new allocations, migration and resize operations. 3. **pool.storage.allocated.resize.capacity.disablethreshold**: Percentage (as a value between 0 and 1) of allocated storage space relative to total overprovisioned storage capacity above which the storage pool is disabled for volume resize. This is applicable only when volume.resize.allowed.beyond.allocation is set to true 4. **volume.resize.allowed.beyond.allocation**: Specifies whether a volume can be resized beyond the pool capacity allocation disable threshold (pool.storage.allocated.capacity.disablethreshold) but not exceeding the resize capacity disable threshold (pool.storage.allocated.resize.capacity.disablethreshold). Please note that resize due to change of disk offering still honours pool capacity allocation disable threshold (pool.storage.allocated.capacity.disablethreshold). If set, the granular scope (storage pool) overrides the value set for the broader scope (zone). If set, the granular scope (storage pool) overrides the value set for the broader scope (zone). The operator will see these configurations in the Settings tab under zone as well as under primary storage. listConfigurations storageid=<storage_id> | zoneid=<zone_id> and updateConfigurations storage_id=<storage_id> | zoneid=<zone_id> will also work. Upstream PR: apache#10300 --------- Co-authored-by: Harikrishna Patnala <[email protected]>
1 parent c8fad18 commit 5016647

File tree

34 files changed

+792
-109
lines changed

34 files changed

+792
-109
lines changed

engine/components-api/src/main/java/com/cloud/capacity/CapacityManager.java

Lines changed: 5 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -16,6 +16,8 @@
1616
// under the License.
1717
package com.cloud.capacity;
1818

19+
import java.util.List;
20+
1921
import org.apache.cloudstack.framework.config.ConfigKey;
2022
import org.apache.cloudstack.storage.datastore.db.StoragePoolVO;
2123

@@ -67,7 +69,7 @@ public interface CapacityManager {
6769
"0.85",
6870
"Percentage (as a value between 0 and 1) of storage utilization above which allocators will disable using the pool for low storage available.",
6971
true,
70-
ConfigKey.Scope.Zone);
72+
List.of(ConfigKey.Scope.StoragePool, ConfigKey.Scope.Zone));
7173
static final ConfigKey<Double> StorageOverprovisioningFactor =
7274
new ConfigKey<>(
7375
"Storage",
@@ -85,7 +87,7 @@ public interface CapacityManager {
8587
"0.85",
8688
"Percentage (as a value between 0 and 1) of allocated storage utilization above which allocators will disable using the pool for low allocated storage available.",
8789
true,
88-
ConfigKey.Scope.Zone);
90+
List.of(ConfigKey.Scope.StoragePool, ConfigKey.Scope.Zone));
8991
static final ConfigKey<Boolean> StorageOperationsExcludeCluster =
9092
new ConfigKey<>(
9193
Boolean.class,
@@ -125,7 +127,7 @@ public interface CapacityManager {
125127
"Percentage (as a value between 0 and 1) of allocated storage utilization above which allocators will disable using the pool for volume resize. " +
126128
"This is applicable only when volume.resize.allowed.beyond.allocation is set to true.",
127129
true,
128-
ConfigKey.Scope.Zone);
130+
List.of(ConfigKey.Scope.StoragePool, ConfigKey.Scope.Zone));
129131

130132
ConfigKey<Integer> CapacityCalculateWorkers = new ConfigKey<>(ConfigKey.CATEGORY_ADVANCED, Integer.class,
131133
"capacity.calculate.workers", "1",

engine/components-api/src/main/java/com/cloud/storage/StorageManager.java

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -211,7 +211,7 @@ public interface StorageManager extends StorageService {
211211
ConfigKey<Boolean> AllowVolumeReSizeBeyondAllocation = new ConfigKey<Boolean>("Advanced", Boolean.class, "volume.resize.allowed.beyond.allocation", "false",
212212
"Determines whether volume size can exceed the pool capacity allocation disable threshold (pool.storage.allocated.capacity.disablethreshold) " +
213213
"when resize a volume upto resize capacity disable threshold (pool.storage.allocated.resize.capacity.disablethreshold)",
214-
true, ConfigKey.Scope.Zone);
214+
true, List.of(ConfigKey.Scope.StoragePool, ConfigKey.Scope.Zone));
215215

216216
ConfigKey<Integer> StoragePoolHostConnectWorkers = new ConfigKey<>("Storage", Integer.class,
217217
"storage.pool.host.connect.workers", "1",

engine/schema/src/main/java/com/cloud/dc/ClusterDetailsDaoImpl.java

Lines changed: 18 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -22,18 +22,27 @@
2222
import java.util.Map;
2323
import java.util.stream.Collectors;
2424

25+
import javax.inject.Inject;
26+
2527
import org.apache.cloudstack.framework.config.ConfigKey;
2628
import org.apache.cloudstack.framework.config.ConfigKey.Scope;
2729
import org.apache.cloudstack.framework.config.ScopedConfigStorage;
2830
import org.apache.commons.collections.CollectionUtils;
2931

32+
import com.cloud.dc.dao.ClusterDao;
33+
import com.cloud.org.Cluster;
34+
import com.cloud.utils.Pair;
3035
import com.cloud.utils.crypt.DBEncryptionUtil;
3136
import com.cloud.utils.db.GenericDaoBase;
3237
import com.cloud.utils.db.SearchBuilder;
3338
import com.cloud.utils.db.SearchCriteria;
3439
import com.cloud.utils.db.TransactionLegacy;
3540

3641
public class ClusterDetailsDaoImpl extends GenericDaoBase<ClusterDetailsVO, Long> implements ClusterDetailsDao, ScopedConfigStorage {
42+
43+
@Inject
44+
ClusterDao clusterDao;
45+
3746
protected final SearchBuilder<ClusterDetailsVO> ClusterSearch;
3847
protected final SearchBuilder<ClusterDetailsVO> DetailSearch;
3948

@@ -180,4 +189,13 @@ private String getCpuMemoryOvercommitRatio(String name) {
180189

181190
return name;
182191
}
192+
193+
@Override
194+
public Pair<Scope, Long> getParentScope(long id) {
195+
Cluster cluster = clusterDao.findById(id);
196+
if (cluster == null) {
197+
return null;
198+
}
199+
return new Pair<>(getScope().getParent(), cluster.getDataCenterId());
200+
}
183201
}

engine/schema/src/main/java/com/cloud/storage/dao/StoragePoolDetailsDaoImpl.java

Lines changed: 15 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -30,6 +30,8 @@
3030
import org.apache.cloudstack.storage.datastore.db.StoragePoolDetailsDao;
3131
import org.apache.cloudstack.storage.datastore.db.StoragePoolVO;
3232

33+
import com.cloud.utils.Pair;
34+
3335
public class StoragePoolDetailsDaoImpl extends ResourceDetailsDaoBase<StoragePoolDetailVO> implements StoragePoolDetailsDao, ScopedConfigStorage {
3436

3537
@Inject
@@ -57,4 +59,17 @@ public void addDetail(long resourceId, String key, String value, boolean display
5759
}
5860
super.addDetail(new StoragePoolDetailVO(resourceId, key, value, display));
5961
}
62+
63+
@Override
64+
public Pair<Scope, Long> getParentScope(long id) {
65+
StoragePoolVO pool = _storagePoolDao.findById(id);
66+
if (pool != null) {
67+
if (pool.getClusterId() != null) {
68+
return new Pair<>(getScope().getParent(), pool.getClusterId());
69+
} else {
70+
return new Pair<>(ConfigKey.Scope.Zone, pool.getDataCenterId());
71+
}
72+
}
73+
return null;
74+
}
6075
}

engine/schema/src/main/java/com/cloud/upgrade/ConfigurationGroupsAggregator.java

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -53,7 +53,7 @@ public ConfigurationGroupsAggregator() {
5353

5454
public void updateConfigurationGroups() {
5555
LOG.debug("Updating configuration groups");
56-
List<ConfigurationVO> configs = configDao.listAllIncludingRemoved();
56+
List<ConfigurationVO> configs = configDao.searchPartialConfigurations();
5757
if (CollectionUtils.isEmpty(configs)) {
5858
return;
5959
}

engine/schema/src/main/java/com/cloud/upgrade/dao/DatabaseAccessObject.java

Lines changed: 30 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -90,6 +90,36 @@ public String generateIndexName(String tableName, String... columnNames) {
9090
return String.format("i_%s__%s", tableName, StringUtils.join(columnNames, "__"));
9191
}
9292

93+
public String getColumnType(Connection conn, String tableName, String columnName) {
94+
try (PreparedStatement pstmt = conn.prepareStatement(String.format("DESCRIBE %s %s", tableName, columnName));){
95+
ResultSet rs = pstmt.executeQuery();
96+
if (rs.next()) {
97+
return rs.getString("Type");
98+
}
99+
} catch (SQLException e) {
100+
s_logger.warn("Type for column " + columnName + " can not be retrieved in " + tableName + " ignoring exception: " + e.getMessage());
101+
}
102+
return null;
103+
}
104+
105+
public void addColumn(Connection conn, String tableName, String columnName, String columnDefinition) {
106+
try (PreparedStatement pstmt = conn.prepareStatement(String.format("ALTER TABLE %s ADD COLUMN %s %s", tableName, columnName, columnDefinition));){
107+
pstmt.executeUpdate();
108+
s_logger.debug("Column " + columnName + " is added successfully from the table " + tableName);
109+
} catch (SQLException e) {
110+
s_logger.warn("Unable to add column " + columnName + " to table " + tableName + " due to exception ", e);
111+
}
112+
}
113+
114+
public void changeColumn(Connection conn, String tableName, String oldColumnName, String newColumnName, String columnDefinition) {
115+
try (PreparedStatement pstmt = conn.prepareStatement(String.format("ALTER TABLE %s CHANGE COLUMN %s %s %s", tableName, oldColumnName, newColumnName, columnDefinition));){
116+
pstmt.executeUpdate();
117+
s_logger.debug("Column " + oldColumnName + " is changed successfully to " + newColumnName + " from the table " + tableName);
118+
} catch (SQLException e) {
119+
s_logger.warn("Unable to add column " + oldColumnName + " to " + newColumnName + " from the table " + tableName + " due to exception", e);
120+
}
121+
}
122+
93123
public boolean indexExists(Connection conn, String tableName, String indexName) {
94124
try (PreparedStatement pstmt = conn.prepareStatement(String.format("SHOW INDEXES FROM %s where Key_name = \"%s\"", tableName, indexName))) {
95125
ResultSet result = pstmt.executeQuery();

engine/schema/src/main/java/com/cloud/upgrade/dao/DbUpgradeUtils.java

Lines changed: 16 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -58,4 +58,20 @@ public static void dropTableColumnsIfExist(Connection conn, String tableName, Li
5858
}
5959
}
6060

61+
public static String getTableColumnType(Connection conn, String tableName, String columnName) {
62+
return dao.getColumnType(conn, tableName, columnName);
63+
}
64+
65+
public static void addTableColumnIfNotExist(Connection conn, String tableName, String columnName, String columnDefinition) {
66+
if (!dao.columnExists(conn, tableName, columnName)) {
67+
dao.addColumn(conn, tableName, columnName, columnDefinition);
68+
}
69+
}
70+
71+
public static void changeTableColumnIfNotExist(Connection conn, String tableName, String oldColumnName, String newColumnName, String columnDefinition) {
72+
if (dao.columnExists(conn, tableName, oldColumnName)) {
73+
dao.changeColumn(conn, tableName, oldColumnName, newColumnName, columnDefinition);
74+
}
75+
}
76+
6177
}

engine/schema/src/main/java/com/cloud/upgrade/dao/Upgrade41811to41812.java

Lines changed: 38 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -16,11 +16,17 @@
1616
// under the License.
1717
package com.cloud.upgrade.dao;
1818

19+
import com.cloud.utils.db.TransactionLegacy;
1920
import com.cloud.utils.exception.CloudRuntimeException;
21+
22+
import org.apache.cloudstack.framework.config.ConfigKey;
2023
import org.apache.log4j.Logger;
2124

2225
import java.io.InputStream;
2326
import java.sql.Connection;
27+
import java.sql.PreparedStatement;
28+
import java.sql.SQLException;
29+
import java.util.List;
2430

2531
public class Upgrade41811to41812 implements DbUpgrade {
2632
final static Logger LOG = Logger.getLogger(Upgrade41811to41812.class);
@@ -53,6 +59,7 @@ public InputStream[] getPrepareScripts() {
5359

5460
@Override
5561
public void performDataMigration(Connection conn) {
62+
migrateConfigurationScopeToBitmask(conn);
5663
}
5764

5865

@@ -66,4 +73,35 @@ public InputStream[] getCleanupScripts() {
6673

6774
return new InputStream[]{script};
6875
}
76+
77+
protected void migrateConfigurationScopeToBitmask(Connection conn) {
78+
String scopeDataType = DbUpgradeUtils.getTableColumnType(conn, "configuration", "scope");
79+
LOG.info("Data type of the column scope of table configuration is " + scopeDataType);
80+
if (!"varchar(255)".equals(scopeDataType)) {
81+
return;
82+
}
83+
DbUpgradeUtils.addTableColumnIfNotExist(conn, "configuration", "new_scope", "BIGINT DEFAULT 0");
84+
migrateExistingConfigurationScopeValues(conn);
85+
DbUpgradeUtils.dropTableColumnsIfExist(conn, "configuration", List.of("scope"));
86+
DbUpgradeUtils.changeTableColumnIfNotExist(conn, "configuration", "new_scope", "scope", "BIGINT NOT NULL DEFAULT 0 COMMENT 'Bitmask for scope(s) of this parameter'");
87+
}
88+
89+
protected void migrateExistingConfigurationScopeValues(Connection conn) {
90+
StringBuilder sql = new StringBuilder("UPDATE configuration\n" +
91+
"SET new_scope = " +
92+
" CASE ");
93+
for (ConfigKey.Scope scope : ConfigKey.Scope.values()) {
94+
sql.append(" WHEN scope = '").append(scope.name()).append("' THEN ").append(scope.getBitValue()).append(" ");
95+
}
96+
sql.append(" ELSE 0 " +
97+
" END " +
98+
"WHERE scope IS NOT NULL;");
99+
TransactionLegacy txn = TransactionLegacy.currentTxn();
100+
try (PreparedStatement pstmt = txn.prepareAutoCloseStatement(sql.toString())) {
101+
pstmt.executeUpdate();
102+
} catch (SQLException e) {
103+
LOG.error("Failed to migrate existing configuration scope values to bitmask", e);
104+
throw new CloudRuntimeException(String.format("Failed to migrate existing configuration scope values to bitmask due to: %s", e.getMessage()));
105+
}
106+
}
69107
}

engine/schema/src/main/java/com/cloud/user/AccountDetailsDaoImpl.java

Lines changed: 10 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -33,6 +33,7 @@
3333
import com.cloud.domain.dao.DomainDao;
3434
import com.cloud.domain.dao.DomainDetailsDao;
3535
import com.cloud.user.dao.AccountDao;
36+
import com.cloud.utils.Pair;
3637
import com.cloud.utils.db.GenericDaoBase;
3738
import com.cloud.utils.db.QueryBuilder;
3839
import com.cloud.utils.db.SearchBuilder;
@@ -151,4 +152,13 @@ public String getConfigValue(long id, String key) {
151152
}
152153
return value;
153154
}
155+
156+
@Override
157+
public Pair<Scope, Long> getParentScope(long id) {
158+
Account account = _accountDao.findById(id);
159+
if (account == null) {
160+
return null;
161+
}
162+
return new Pair<>(getScope().getParent(), account.getDomainId());
163+
}
154164
}

engine/schema/src/main/java/org/apache/cloudstack/storage/datastore/db/ImageStoreDetailsDaoImpl.java

Lines changed: 15 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -20,13 +20,17 @@
2020
import java.util.List;
2121
import java.util.Map;
2222

23+
import javax.inject.Inject;
24+
2325
import org.apache.cloudstack.api.ApiConstants;
2426
import org.apache.cloudstack.framework.config.ConfigKey;
2527
import org.apache.cloudstack.framework.config.ConfigKey.Scope;
2628
import org.apache.cloudstack.framework.config.ScopedConfigStorage;
2729
import org.apache.cloudstack.resourcedetail.ResourceDetailsDaoBase;
2830
import org.springframework.stereotype.Component;
2931

32+
import com.cloud.storage.ImageStore;
33+
import com.cloud.utils.Pair;
3034
import com.cloud.utils.crypt.DBEncryptionUtil;
3135
import com.cloud.utils.db.QueryBuilder;
3236
import com.cloud.utils.db.SearchBuilder;
@@ -36,6 +40,8 @@
3640

3741
@Component
3842
public class ImageStoreDetailsDaoImpl extends ResourceDetailsDaoBase<ImageStoreDetailVO> implements ImageStoreDetailsDao, ScopedConfigStorage {
43+
@Inject
44+
ImageStoreDao imageStoreDao;
3945

4046
protected final SearchBuilder<ImageStoreDetailVO> storeSearch;
4147

@@ -115,4 +121,13 @@ public void addDetail(long resourceId, String key, String value, boolean display
115121
super.addDetail(new ImageStoreDetailVO(resourceId, key, value, display));
116122
}
117123

124+
@Override
125+
public Pair<Scope, Long> getParentScope(long id) {
126+
ImageStore store = imageStoreDao.findById(id);
127+
if (store == null) {
128+
return null;
129+
}
130+
return new Pair<>(getScope().getParent(), store.getDataCenterId());
131+
}
132+
118133
}
Lines changed: 70 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,70 @@
1+
// Licensed to the Apache Software Foundation (ASF) under one
2+
// or more contributor license agreements. See the NOTICE file
3+
// distributed with this work for additional information
4+
// regarding copyright ownership. The ASF licenses this file
5+
// to you under the Apache License, Version 2.0 (the
6+
// "License"); you may not use this file except in compliance
7+
// with the License. You may obtain a copy of the License at
8+
//
9+
// http://www.apache.org/licenses/LICENSE-2.0
10+
//
11+
// Unless required by applicable law or agreed to in writing,
12+
// software distributed under the License is distributed on an
13+
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
14+
// KIND, either express or implied. See the License for the
15+
// specific language governing permissions and limitations
16+
// under the License.
17+
package com.cloud.upgrade;
18+
19+
import static org.mockito.Mockito.when;
20+
21+
import java.util.Collections;
22+
23+
import org.apache.cloudstack.framework.config.dao.ConfigurationDao;
24+
import org.apache.cloudstack.framework.config.dao.ConfigurationGroupDao;
25+
import org.apache.cloudstack.framework.config.dao.ConfigurationSubGroupDao;
26+
import org.apache.cloudstack.framework.config.impl.ConfigurationSubGroupVO;
27+
import org.apache.cloudstack.framework.config.impl.ConfigurationVO;
28+
import org.junit.Assert;
29+
import org.junit.Test;
30+
import org.junit.runner.RunWith;
31+
import org.mockito.InjectMocks;
32+
import org.mockito.Mock;
33+
import org.mockito.Mockito;
34+
import org.mockito.junit.MockitoJUnitRunner;
35+
36+
@RunWith(MockitoJUnitRunner.class)
37+
public class ConfigurationGroupsAggregatorTest {
38+
@InjectMocks
39+
private ConfigurationGroupsAggregator configurationGroupsAggregator = new ConfigurationGroupsAggregator();
40+
41+
@Mock
42+
private ConfigurationDao configDao;
43+
44+
@Mock
45+
private ConfigurationGroupDao configGroupDao;
46+
47+
@Mock
48+
private ConfigurationSubGroupDao configSubGroupDao;
49+
50+
@Test
51+
public void testUpdateConfigurationGroups() {
52+
ConfigurationVO config = new ConfigurationVO("Advanced", "DEFAULT", "management-server",
53+
"test.config.name", null, "description");
54+
config.setGroupId(1L);
55+
config.setSubGroupId(1L);
56+
57+
when(configDao.searchPartialConfigurations()).thenReturn(Collections.singletonList(config));
58+
59+
ConfigurationSubGroupVO configSubGroup = Mockito.mock(ConfigurationSubGroupVO.class);
60+
when(configSubGroupDao.findByName("name")).thenReturn(configSubGroup);
61+
Mockito.when(configSubGroup.getId()).thenReturn(10L);
62+
Mockito.when(configSubGroup.getGroupId()).thenReturn(5L);
63+
64+
configurationGroupsAggregator.updateConfigurationGroups();
65+
66+
Assert.assertEquals(Long.valueOf(5), config.getGroupId());
67+
Assert.assertEquals(Long.valueOf(10), config.getSubGroupId());
68+
Mockito.verify(configDao, Mockito.times(1)).persist(config);
69+
}
70+
}

0 commit comments

Comments
 (0)