001package io.ebeaninternal.dbmigration.ddlgeneration.platform; 002 003import io.ebean.config.dbplatform.DatabasePlatform; 004import io.ebean.config.dbplatform.DbPlatformType; 005import io.ebeaninternal.dbmigration.ddlgeneration.DdlAlterTable; 006import io.ebeaninternal.dbmigration.ddlgeneration.DdlBuffer; 007import io.ebeaninternal.dbmigration.ddlgeneration.DdlWrite; 008import io.ebeaninternal.dbmigration.migration.AlterColumn; 009 010import java.util.ArrayList; 011import java.util.HashSet; 012import java.util.LinkedHashMap; 013import java.util.List; 014import java.util.Map; 015import java.util.Map.Entry; 016import java.util.Objects; 017import java.util.Set; 018import java.util.regex.Matcher; 019import java.util.regex.Pattern; 020 021public abstract class AbstractHanaDdl extends PlatformDdl { 022 023 private static final Pattern ARRAY_PATTERN = Pattern.compile("(\\w+)\\s*\\[\\s*\\]\\s*(\\(\\d+\\))?", Pattern.CASE_INSENSITIVE); 024 025 public AbstractHanaDdl(DatabasePlatform platform) { 026 super(platform); 027 this.addColumn = "add"; 028 this.alterColumn = "alter"; 029 this.columnDropDefault = "default null"; 030 this.columnSetDefault = "default"; 031 this.columnSetNotnull = "not null"; 032 this.columnSetNull = " null"; 033 this.dropColumn = "drop"; 034 this.dropConstraintIfExists = "drop constraint "; 035 this.dropIndexIfExists = "drop index "; 036 this.dropSequenceIfExists = "drop sequence "; 037 this.dropTableCascade = " cascade"; 038 this.dropTableIfExists = "drop table "; 039 this.fallbackArrayType = "nvarchar(1000)"; 040 this.historyDdl = new HanaHistoryDdl(); 041 this.identitySuffix = " generated by default as identity"; 042 } 043 044 @Override 045 public void alterColumn(DdlWrite writer, AlterColumn alter) { 046 String tableName = alter.getTableName(); 047 String columnName = alter.getColumnName(); 048 String currentType = alter.getCurrentType(); 049 String type = alter.getType() != null ? alter.getType() : currentType; 050 type = convert(type); 051 currentType = convert(currentType); 052 boolean notnull = (alter.isNotnull() != null) ? alter.isNotnull() : Boolean.TRUE.equals(alter.isCurrentNotnull()); 053 String notnullClause = notnull ? " not null" : ""; 054 String defaultValue = DdlHelp.isDropDefault(alter.getDefaultValue()) ? "null" 055 : (alter.getDefaultValue() != null ? alter.getDefaultValue() : alter.getCurrentDefaultValue()); 056 String defaultValueClause = (defaultValue == null || defaultValue.isEmpty()) ? "" : " default " + defaultValue; 057 058 if (!isConvertible(currentType, type)) { 059 // add an intermediate conversion if possible 060 if (isNumberType(currentType)) { 061 // numbers can always be converted to decimal 062 alterTable(writer, tableName).append(alterColumn, columnName).append("decimal").append(notnullClause); 063 064 } else if (isStringType(currentType)) { 065 // strings can always be converted to nclob 066 // Note: we do not add default clause here to avoid error[SAP DBTech JDBC: [336]: invalid default value: 067 // default value cannot be created on column of data type NCLOB 068 alterTable(writer, tableName).append(alterColumn, columnName).append("nclob").append(notnullClause); 069 } 070 } 071 072 alterTable(writer, tableName).append(alterColumn, columnName).append(type).append(defaultValueClause).append(notnullClause); 073 } 074 075 @Override 076 protected String convertArrayType(String logicalArrayType) { 077 Matcher matcher = ARRAY_PATTERN.matcher(logicalArrayType); 078 if (matcher.matches()) { 079 return convert(matcher.group(1)) + " array" + (matcher.group(2) == null ? "" : matcher.group(2)); 080 } else { 081 return fallbackArrayType; 082 } 083 } 084 085 @Override 086 public String alterTableAddUniqueConstraint(String tableName, String uqName, String[] columns, String[] nullableColumns) { 087 if (nullableColumns == null || nullableColumns.length == 0) { 088 return super.alterTableAddUniqueConstraint(tableName, uqName, columns, nullableColumns); 089 } else { 090 return "-- cannot create unique index \"" + uqName + "\" on table \"" + tableName + "\" with nullable columns"; 091 } 092 } 093 094 @Override 095 public String alterTableDropUniqueConstraint(String tableName, String uniqueConstraintName) { 096 DdlBuffer buffer = new BaseDdlBuffer(); 097 098 buffer.append("delimiter $$").newLine(); 099 buffer.append("do").newLine(); 100 buffer.append("begin").newLine(); 101 buffer.append("declare exit handler for sql_error_code 397 begin end").endOfStatement(); 102 buffer.append("exec 'alter table ").append(tableName).append(" ").append(dropUniqueConstraint).append(" ") 103 .append(maxConstraintName(uniqueConstraintName)).append("'").endOfStatement(); 104 buffer.append("end").endOfStatement(); 105 buffer.append("$$"); 106 return buffer.getBuffer(); 107 } 108 109 @Override 110 public String alterTableDropConstraint(String tableName, String constraintName) { 111 return alterTableDropUniqueConstraint(tableName, constraintName); 112 } 113 114 /** 115 * It is rather complex to delete a column on HANA as there must not exist any 116 * foreign keys. That's why we call a user stored procedure here 117 */ 118 @Override 119 public void alterTableDropColumn(DdlWrite writer, String tableName, String columnName) { 120 alterTable(writer, tableName).raw("CALL usp_ebean_drop_column('").append(tableName).append("', '").append(columnName).append("')"); 121 } 122 123 /** 124 * Check if a data type can be converted to another data type. Data types can't 125 * be converted if the target type has a lower precision than the source type. 126 * 127 * @param sourceType The source data type 128 * @param targetType the target data type 129 * @return {@code true} if the type can be converted, {@code false} otherwise 130 */ 131 private boolean isConvertible(String sourceType, String targetType) { 132 if (Objects.equals(sourceType, targetType)) { 133 return true; 134 } 135 136 if (sourceType == null || targetType == null) { 137 return true; 138 } 139 140 if ("bigint".equals(sourceType)) { 141 if ("integer".equals(targetType) || "smallint".equals(targetType) || "tinyint".equals(targetType)) { 142 return false; 143 } 144 } else if ("integer".equals(sourceType)) { 145 if ("smallint".equals(targetType) || "tinyint".equals(targetType)) { 146 return false; 147 } 148 } else if ("smallint".equals(sourceType)) { 149 if ("tinyint".equals(targetType)) { 150 return false; 151 } 152 } else if ("double".equals(sourceType)) { 153 if ("real".equals(targetType)) { 154 return false; 155 } 156 } 157 158 DbPlatformType dbPlatformSourceType = DbPlatformType.parse(sourceType); 159 160 if ("float".equals(dbPlatformSourceType.getName())) { 161 if ("real".equals(targetType)) { 162 return false; 163 } 164 } else if ("varchar".equals(dbPlatformSourceType.getName()) || "nvarchar".equals(dbPlatformSourceType.getName())) { 165 DbPlatformType dbPlatformTargetType = DbPlatformType.parse(targetType); 166 if ("varchar".equals(dbPlatformTargetType.getName()) || "nvarchar".equals(dbPlatformTargetType.getName())) { 167 if (dbPlatformSourceType.getDefaultLength() > dbPlatformTargetType.getDefaultLength()) { 168 return false; 169 } 170 } 171 } else if ("decimal".equals(dbPlatformSourceType.getName())) { 172 DbPlatformType dbPlatformTargetType = DbPlatformType.parse(targetType); 173 if ("decimal".equals(dbPlatformTargetType.getName())) { 174 if (dbPlatformSourceType.getDefaultLength() > dbPlatformTargetType.getDefaultLength() 175 || dbPlatformSourceType.getDefaultScale() > dbPlatformTargetType.getDefaultScale()) { 176 return false; 177 } 178 } 179 } 180 181 return true; 182 } 183 184 private boolean isNumberType(String type) { 185 return type != null 186 && ("bigint".equals(type) || "integer".equals(type) || "smallint".equals(type) || "tinyint".equals(type) 187 || type.startsWith("float") || "real".equals(type) || "double".equals(type) || type.startsWith("decimal")); 188 } 189 190 private boolean isStringType(String type) { 191 return type != null 192 && (type.startsWith("varchar") || type.startsWith("nvarchar") || "clob".equals(type) || "nclob".equals(type)); 193 } 194 195 @Override 196 protected DdlAlterTable alterTable(DdlWrite writer, String tableName) { 197 return writer.applyAlterTable(tableName, HanaAlterTableWrite::new); 198 } 199 200 /** 201 * Joins alter table commands and add open/closing brackets for the alter statements 202 */ 203 class HanaAlterTableWrite extends BaseAlterTableWrite { 204 205 public HanaAlterTableWrite(String tableName) { 206 super(tableName, AbstractHanaDdl.this); 207 } 208 209 @Override 210 protected List<AlterCmd> postProcessCommands(List<AlterCmd> cmds) { 211 List<AlterCmd> newCmds = new ArrayList<>(); 212 Map<String, List<AlterCmd>> batches = new LinkedHashMap<>(); 213 Set<String> columns = new HashSet<>(); 214 for (AlterCmd cmd : cmds) { 215 switch (cmd.getOperation()) { 216 case "add": 217 case "alter": 218 case "drop": 219 if (cmd.getColumn() != null && !columns.add(cmd.getColumn())) { 220 // column already seen 221 flushBatches(newCmds, batches); 222 columns.clear(); 223 } 224 batches.computeIfAbsent(cmd.getOperation(), k -> new ArrayList<>()).add(cmd); 225 break; 226 default: 227 flushBatches(newCmds, batches); 228 columns.clear(); 229 newCmds.add(cmd); 230 } 231 } 232 flushBatches(newCmds, batches); 233 return newCmds; 234 } 235 236 /** 237 * Merges add/alter/drop commands into one statement. 238 */ 239 private void flushBatches(List<AlterCmd> newCmds, Map<String, List<AlterCmd>> batches) { 240 for (Entry<String, List<AlterCmd>> entry : batches.entrySet()) { 241 AlterCmd raw = newRawCommand("alter table ").append(tableName()).append(" ") 242 .append(entry.getKey()).append(" ("); 243 List<AlterCmd> cmds = entry.getValue(); 244 for (int i = 0; i < cmds.size(); i++) { 245 AlterCmd cmd = cmds.get(i); 246 if (i > 0) { 247 raw.append(",\n "); 248 } 249 raw.append(cmd.getColumn()); 250 if (!cmd.getAlternation().isEmpty()) { 251 raw.append(" ").append(cmd.getAlternation()); 252 } 253 } 254 raw.append(")"); 255 newCmds.add(raw); 256 } 257 batches.clear(); 258 } 259 } 260 261}