diff --git a/src/main/java/com/microsoft/lst_bench/common/SessionExecutor.java b/src/main/java/com/microsoft/lst_bench/common/SessionExecutor.java index 6e7b8074..99a62854 100644 --- a/src/main/java/com/microsoft/lst_bench/common/SessionExecutor.java +++ b/src/main/java/com/microsoft/lst_bench/common/SessionExecutor.java @@ -20,7 +20,7 @@ import com.microsoft.lst_bench.client.ConnectionManager; import com.microsoft.lst_bench.exec.SessionExec; import com.microsoft.lst_bench.exec.TaskExec; -import com.microsoft.lst_bench.input.Task.CustomTaskExecutorArguments; +import com.microsoft.lst_bench.task.TaskExecutor; import com.microsoft.lst_bench.telemetry.EventInfo; import com.microsoft.lst_bench.telemetry.EventInfo.EventType; import com.microsoft.lst_bench.telemetry.EventInfo.Status; @@ -120,8 +120,7 @@ private TaskExecutor getTaskExecutor(TaskExec task) { try { Constructor constructor = Class.forName(task.getCustomTaskExecutor()) - .getDeclaredConstructor( - SQLTelemetryRegistry.class, String.class, CustomTaskExecutorArguments.class); + .getDeclaredConstructor(SQLTelemetryRegistry.class, String.class, Map.class); return (TaskExecutor) constructor.newInstance( this.telemetryRegistry, diff --git a/src/main/java/com/microsoft/lst_bench/exec/FileExec.java b/src/main/java/com/microsoft/lst_bench/exec/FileExec.java index a02f60e6..8158f1e2 100644 --- a/src/main/java/com/microsoft/lst_bench/exec/FileExec.java +++ b/src/main/java/com/microsoft/lst_bench/exec/FileExec.java @@ -25,5 +25,7 @@ public interface FileExec { String getId(); + String getName(); + List getStatements(); } diff --git a/src/main/java/com/microsoft/lst_bench/exec/TaskExec.java b/src/main/java/com/microsoft/lst_bench/exec/TaskExec.java index 5abf0ed9..bfc2883b 100644 --- a/src/main/java/com/microsoft/lst_bench/exec/TaskExec.java +++ b/src/main/java/com/microsoft/lst_bench/exec/TaskExec.java @@ -15,8 +15,8 @@ */ package com.microsoft.lst_bench.exec; -import com.microsoft.lst_bench.input.Task.CustomTaskExecutorArguments; import java.util.List; +import java.util.Map; import javax.annotation.Nullable; import org.immutables.value.Value; @@ -36,5 +36,5 @@ public interface TaskExec { @Nullable String getCustomTaskExecutor(); @Value.Parameter(false) - @Nullable CustomTaskExecutorArguments getCustomTaskExecutorArguments(); + @Nullable Map getCustomTaskExecutorArguments(); } diff --git a/src/main/java/com/microsoft/lst_bench/input/BenchmarkObjectFactory.java b/src/main/java/com/microsoft/lst_bench/input/BenchmarkObjectFactory.java index 45a8c9fe..98d9d0d1 100644 --- a/src/main/java/com/microsoft/lst_bench/input/BenchmarkObjectFactory.java +++ b/src/main/java/com/microsoft/lst_bench/input/BenchmarkObjectFactory.java @@ -15,17 +15,21 @@ */ package com.microsoft.lst_bench.input; +import com.microsoft.lst_bench.client.ClientException; import com.microsoft.lst_bench.client.ConnectionManager; import com.microsoft.lst_bench.client.JDBCConnectionManager; import com.microsoft.lst_bench.client.SparkConnectionManager; import com.microsoft.lst_bench.common.BenchmarkConfig; import com.microsoft.lst_bench.exec.FileExec; +import com.microsoft.lst_bench.exec.ImmutableFileExec; import com.microsoft.lst_bench.exec.ImmutablePhaseExec; import com.microsoft.lst_bench.exec.ImmutableSessionExec; +import com.microsoft.lst_bench.exec.ImmutableStatementExec; import com.microsoft.lst_bench.exec.ImmutableTaskExec; import com.microsoft.lst_bench.exec.ImmutableWorkloadExec; import com.microsoft.lst_bench.exec.PhaseExec; import com.microsoft.lst_bench.exec.SessionExec; +import com.microsoft.lst_bench.exec.StatementExec; import com.microsoft.lst_bench.exec.TaskExec; import com.microsoft.lst_bench.input.config.ConnectionConfig; import com.microsoft.lst_bench.input.config.ExperimentConfig; @@ -41,9 +45,20 @@ import java.util.stream.Collectors; import org.apache.commons.lang3.ObjectUtils; -/** Factory class for creating benchmark objects from the input configuration. */ +/** + * Factory class for creating benchmark objects from the input configuration. + * + *

Per convention, the identifiers for each phase, session, task, file, and statement are + * hierarchically constructed. For example, task template 'test-task' that is only task in phase + * 'test-phase' which only has a single session will be identified as + * test-phase;session-0;test-task-0. + */ public class BenchmarkObjectFactory { + public static final String DEFAULT_ID_SEPARATOR = ";"; + public static final String DEFAULT_ID_CONNECTOR = "_"; + public static final String DEFAULT_FILE_SEPARATOR = "/"; + private BenchmarkObjectFactory() { // Defeat instantiation } @@ -92,9 +107,11 @@ private static SparkConnectionManager sparkConnectionManager( * @param taskLibrary the task library * @param workload the workload * @return a benchmark configuration + * @throws ClientException */ public static BenchmarkConfig benchmarkConfig( - ExperimentConfig experimentConfig, TaskLibrary taskLibrary, Workload workload) { + ExperimentConfig experimentConfig, TaskLibrary taskLibrary, Workload workload) + throws ClientException { Map idToTaskTemplate = parseTaskLibrary(taskLibrary); ImmutableWorkloadExec workloadExec = createWorkloadExec(workload, idToTaskTemplate, experimentConfig); @@ -131,12 +148,13 @@ private static Map parseTaskLibrary(TaskLibrary taskLibrar * @param idToTaskTemplate a map of task templates with unique IDs * @param experimentConfig the experiment configuration * @return a workload execution - * @throws IllegalArgumentException if the workload contains an invalid task template ID + * @throws ClientException */ private static ImmutableWorkloadExec createWorkloadExec( Workload workload, Map idToTaskTemplate, - ExperimentConfig experimentConfig) { + ExperimentConfig experimentConfig) + throws ClientException { Map taskTemplateIdToPermuteOrderCounter = new HashMap<>(); Map taskTemplateIdToParameterValuesCounter = new HashMap<>(); List phases = new ArrayList<>(); @@ -158,38 +176,50 @@ private static PhaseExec createPhaseExec( Map idToTaskTemplate, ExperimentConfig experimentConfig, Map taskTemplateIdToPermuteOrderCounter, - Map taskTemplateIdToParameterValuesCounter) { + Map taskTemplateIdToParameterValuesCounter) + throws ClientException { List sessions = new ArrayList<>(); for (int i = 0; i < phase.getSessions().size(); i++) { Session session = phase.getSessions().get(i); - String sessionId = String.valueOf(i); - SessionExec sessionExec = - createSessionExec( - sessionId, - session, - idToTaskTemplate, - experimentConfig, - taskTemplateIdToPermuteOrderCounter, - taskTemplateIdToParameterValuesCounter); - sessions.add(sessionExec); + for (int j = 1; j <= session.getNumInstances(); j++) { + SessionExec sessionExec = + createSessionExec( + createSessionId(phase.getId(), i, j), + session, + idToTaskTemplate, + experimentConfig, + taskTemplateIdToPermuteOrderCounter, + taskTemplateIdToParameterValuesCounter); + sessions.add(sessionExec); + } } return ImmutablePhaseExec.of(phase.getId(), sessions); } + private static String createSessionId(String phaseId, int number, int numInstances) { + final String SESSION_PREFIX = "session"; + String sessionId = + phaseId + DEFAULT_ID_SEPARATOR + SESSION_PREFIX + DEFAULT_ID_CONNECTOR + number; + if (numInstances > 1) { + sessionId += DEFAULT_ID_CONNECTOR + numInstances; + } + return sessionId; + } + private static SessionExec createSessionExec( String sessionId, Session session, Map idToTaskTemplate, ExperimentConfig experimentConfig, Map taskTemplateIdToPermuteOrderCounter, - Map taskTemplateIdToParameterValuesCounter) { + Map taskTemplateIdToParameterValuesCounter) + throws ClientException { List tasks = new ArrayList<>(); - for (int j = 0; j < session.getTasks().size(); j++) { - Task task = session.getTasks().get(j); - String taskId = task.getTemplateId() + "_" + j; + for (int i = 0; i < session.getTasks().size(); i++) { + Task task = session.getTasks().get(i); TaskExec taskExec = createTaskExec( - taskId, + createTaskId(sessionId, task.getTemplateId(), i), task, idToTaskTemplate, experimentConfig, @@ -201,19 +231,25 @@ private static SessionExec createSessionExec( sessionId, tasks, ObjectUtils.defaultIfNull(session.getTargetEndpoint(), 0)); } + private static String createTaskId(String sessionId, String templateId, int number) { + return sessionId + DEFAULT_ID_SEPARATOR + templateId + DEFAULT_ID_CONNECTOR + number; + } + private static TaskExec createTaskExec( String taskId, Task task, Map idToTaskTemplate, ExperimentConfig experimentConfig, Map taskTemplateIdToPermuteOrderCounter, - Map taskTemplateIdToParameterValuesCounter) { + Map taskTemplateIdToParameterValuesCounter) + throws ClientException { TaskTemplate taskTemplate = idToTaskTemplate.get(task.getTemplateId()); if (taskTemplate == null) { throw new IllegalArgumentException("Unknown task template id: " + task.getTemplateId()); } List files = createFileExecList( + taskId, taskTemplate, task, experimentConfig, @@ -226,14 +262,21 @@ private static TaskExec createTaskExec( } private static List createFileExecList( + String taskId, TaskTemplate taskTemplate, Task task, ExperimentConfig experimentConfig, Map taskTemplateIdToPermuteOrderCounter, - Map taskTemplateIdToParameterValuesCounter) { + Map taskTemplateIdToParameterValuesCounter) + throws ClientException { List files = new ArrayList<>(); for (String file : taskTemplate.getFiles()) { - files.add(SQLParser.getStatements(file)); + final String fileId = createFileId(taskId, file); + files.add( + ImmutableFileExec.of( + fileId, + createFileName(file), + createStatementExecList(fileId, SQLParser.getStatements(file)))); } files = applyPermutationOrder(taskTemplate, task, taskTemplateIdToPermuteOrderCounter, files); files = applyReplaceRegex(task, files); @@ -243,18 +286,45 @@ private static List createFileExecList( return files; } + private static String createFileId(String taskId, String filePath) { + return taskId + DEFAULT_ID_SEPARATOR + filePath; + } + + private static String createFileName(String filePath) { + String[] fileNames = filePath.split(DEFAULT_FILE_SEPARATOR); + return fileNames[fileNames.length - 1]; + } + + private static List createStatementExecList( + String fileId, List statements) { + List statement_execs = new ArrayList<>(); + for (int i = 0; i < statements.size(); i++) { + statement_execs.add( + ImmutableStatementExec.of(createStatementId(fileId, i), statements.get(i))); + } + return statement_execs; + } + + private static String createStatementId(String fileId, int number) { + final String STATEMENT_PREFIX = "statement"; + return fileId + DEFAULT_ID_SEPARATOR + STATEMENT_PREFIX + DEFAULT_ID_CONNECTOR + number; + } + private static List applyPermutationOrder( TaskTemplate taskTemplate, Task task, Map taskTemplateIdToPermuteOrderCounter, - List files) { + List files) + throws ClientException { if (taskTemplate.getPermutationOrdersDirectory() == null) { // Create statements with certain order return files; } - Map idToFile = new HashMap<>(); + Map nameToFile = new HashMap<>(); + // The permutation order is identified by file name, i.e., the last part of the file path, as + // per current convention. for (FileExec file : files) { - idToFile.put(file.getId(), file); + nameToFile.put(file.getName(), file); } int counter; if (Boolean.TRUE.equals(task.isPermuteOrder())) { @@ -268,7 +338,15 @@ private static List applyPermutationOrder( FileParser.getPermutationOrder(taskTemplate.getPermutationOrdersDirectory(), counter); List sortedFiles = new ArrayList<>(); for (String fileId : permutationOrder) { - sortedFiles.add(idToFile.get(fileId)); + if (!nameToFile.containsKey(fileId)) { + throw new ClientException( + "Could not find file " + + fileId + + " in file list: " + + nameToFile.toString() + + "; permutation of order unsuccessful."); + } + sortedFiles.add(nameToFile.get(fileId)); } return sortedFiles; } diff --git a/src/main/java/com/microsoft/lst_bench/input/Session.java b/src/main/java/com/microsoft/lst_bench/input/Session.java index 59f1353d..52b2395c 100644 --- a/src/main/java/com/microsoft/lst_bench/input/Session.java +++ b/src/main/java/com/microsoft/lst_bench/input/Session.java @@ -34,4 +34,10 @@ public interface Session { @JsonProperty("target_endpoint") @Nullable Integer getTargetEndpoint(); + + @JsonProperty("num_instances") + @Value.Default + default int getNumInstances() { + return 1; + } } diff --git a/src/main/java/com/microsoft/lst_bench/input/Task.java b/src/main/java/com/microsoft/lst_bench/input/Task.java index 38be48b7..0f267e61 100644 --- a/src/main/java/com/microsoft/lst_bench/input/Task.java +++ b/src/main/java/com/microsoft/lst_bench/input/Task.java @@ -20,6 +20,7 @@ import com.fasterxml.jackson.databind.annotation.JsonDeserialize; import com.fasterxml.jackson.databind.annotation.JsonSerialize; import java.util.List; +import java.util.Map; import javax.annotation.Nullable; import org.immutables.value.Value; @@ -43,15 +44,7 @@ public interface Task { @Nullable String getCustomTaskExecutor(); @JsonProperty("custom_task_executor_arguments") - @Nullable CustomTaskExecutorArguments getCustomTaskExecutorArguments(); - - @Value.Immutable - @JsonSerialize(as = ImmutableCustomTaskExecutorArguments.class) - @JsonDeserialize(as = ImmutableCustomTaskExecutorArguments.class) - interface CustomTaskExecutorArguments { - @JsonProperty("dependent_task_batch_size") - @Nullable Integer getDependentTaskBatchSize(); - } + @Nullable Map getCustomTaskExecutorArguments(); @JsonProperty("replace_regex") @Nullable List getReplaceRegex(); diff --git a/src/main/java/com/microsoft/lst_bench/sql/SQLParser.java b/src/main/java/com/microsoft/lst_bench/sql/SQLParser.java index 7383948c..d4c71bb1 100644 --- a/src/main/java/com/microsoft/lst_bench/sql/SQLParser.java +++ b/src/main/java/com/microsoft/lst_bench/sql/SQLParser.java @@ -15,10 +15,6 @@ */ package com.microsoft.lst_bench.sql; -import com.microsoft.lst_bench.exec.FileExec; -import com.microsoft.lst_bench.exec.ImmutableFileExec; -import com.microsoft.lst_bench.exec.ImmutableStatementExec; -import com.microsoft.lst_bench.exec.StatementExec; import java.io.BufferedReader; import java.io.File; import java.io.IOException; @@ -35,16 +31,15 @@ private SQLParser() { // Defeat instantiation } - public static FileExec getStatements(String filepath) { + public static List getStatements(String filepath) { return getStatements(new File(filepath)); } - public static FileExec getStatements(File file) { - final List statements = new ArrayList<>(); + public static List getStatements(File file) { + final List statements = new ArrayList<>(); try (BufferedReader br = new BufferedReader( new InputStreamReader(Files.newInputStream(file.toPath()), StandardCharsets.UTF_8))) { - int i = 0; for (; ; ) { String statement; try { @@ -55,12 +50,12 @@ public static FileExec getStatements(File file) { if (statement == null) { break; } - statements.add(ImmutableStatementExec.of(file.getName() + "_" + i++, statement)); + statements.add(statement); } } catch (IOException e) { throw new RuntimeException("Cannot read query in file: " + file, e); } - return ImmutableFileExec.of(file.getName(), statements); + return statements; } private static String nextStatement(BufferedReader reader) throws IOException { diff --git a/src/main/java/com/microsoft/lst_bench/common/TaskExecutor.java b/src/main/java/com/microsoft/lst_bench/task/TaskExecutor.java similarity index 99% rename from src/main/java/com/microsoft/lst_bench/common/TaskExecutor.java rename to src/main/java/com/microsoft/lst_bench/task/TaskExecutor.java index bd417bf9..9ec72d0c 100644 --- a/src/main/java/com/microsoft/lst_bench/common/TaskExecutor.java +++ b/src/main/java/com/microsoft/lst_bench/task/TaskExecutor.java @@ -13,7 +13,7 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package com.microsoft.lst_bench.common; +package com.microsoft.lst_bench.task; import com.microsoft.lst_bench.client.ClientException; import com.microsoft.lst_bench.client.Connection; diff --git a/src/main/java/com/microsoft/lst_bench/task/custom/ConcurrentPerfStresstestTaskExecutor.java b/src/main/java/com/microsoft/lst_bench/task/custom/ConcurrentPerfStresstestTaskExecutor.java new file mode 100644 index 00000000..354dfcff --- /dev/null +++ b/src/main/java/com/microsoft/lst_bench/task/custom/ConcurrentPerfStresstestTaskExecutor.java @@ -0,0 +1,192 @@ +/* + * Copyright (c) Microsoft Corporation. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.microsoft.lst_bench.task.custom; + +import com.microsoft.lst_bench.client.ClientException; +import com.microsoft.lst_bench.client.Connection; +import com.microsoft.lst_bench.exec.FileExec; +import com.microsoft.lst_bench.exec.ImmutableStatementExec; +import com.microsoft.lst_bench.exec.StatementExec; +import com.microsoft.lst_bench.exec.TaskExec; +import com.microsoft.lst_bench.telemetry.EventInfo.Status; +import com.microsoft.lst_bench.telemetry.SQLTelemetryRegistry; +import com.microsoft.lst_bench.util.StringUtils; +import java.time.Instant; +import java.util.Map; +import java.util.regex.Pattern; +import org.apache.commons.lang3.exception.ExceptionUtils; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +/** + * Custom task executor implementation that allows users to execute concurrent tasks for specfic + * performance stress testing. This type of testing focuses on queries that a) get enhanced with + * additional joins (number specified by the user) and b) get augmented with query padding (empty + * characters) at the end of the query, if specified by the user. This augmentation requires a + * specific query of the form "SELECT ... FROM ..." without additional clauses such as "WHERE" or + * "ORDER" to allow for join extensions. The properties of this class are defined via the + * 'custom_task_executor_arguments' property that are part of the workload configuration. Valid + * parameter names are 'concurrent_task_num_joins' and 'concurrent_task_min_query_length', their + * defaults are set to '0'. The user may further choose to specify parameters + * 'concurrent_id_separator' and 'concurrent_id_connector' which are used to build the id of the + * newly generated queries. The default for these parameters is set to ';' resp. '-'. + */ +public class ConcurrentPerfStresstestTaskExecutor extends CustomTaskExecutor { + + private static final Logger LOGGER = + LoggerFactory.getLogger(ConcurrentPerfStresstestTaskExecutor.class); + + private final int DEFAULT_CONCURRENT_TASK_NUM_JOINS = 0; + private final int DEFAULT_CONCURRENT_TASK_MIN_QUERY_LENGTH = 0; + private final String CONCURRENT_TASK_NUM_JOINS = "concurrent_task_num_joins"; + private final String CONCURRENT_TASK_MIN_QUERY_LENGTH = "concurrent_task_min_query_length"; + private final String DEFAULT_CONCURRENT_ID_SEPARATOR = ";"; + private final String DEFAULT_CONCURRENT_ID_CONNECTOR = "-"; + private final String CONCURRENT_ID_SEPARATOR = "concurrent_id_separator"; + private final String CONCURRENT_ID_CONNECTOR = "concurrent_id_connector"; + + private final Pattern WHERE_PATTERN = Pattern.compile("WHERE|where|Where"); + private final Pattern ORDER_PATTERN = Pattern.compile("ORDER|order|Order"); + private final Pattern FROM_PATTERN = Pattern.compile("FROM|from|From"); + + private final String QUERY_END_TOKEN = ";"; + + public ConcurrentPerfStresstestTaskExecutor( + SQLTelemetryRegistry telemetryRegistry, + String experimentStartTime, + Map arguments) { + super(telemetryRegistry, experimentStartTime, arguments); + } + + @Override + public void executeTask(Connection connection, TaskExec task, Map values) + throws ClientException { + // Set default values. + int numJoins; + if (this.getArguments() == null || this.getArguments().get(CONCURRENT_TASK_NUM_JOINS) == null) { + numJoins = DEFAULT_CONCURRENT_TASK_NUM_JOINS; + } else { + numJoins = Integer.valueOf(this.getArguments().get(CONCURRENT_TASK_NUM_JOINS)); + } + int minQueryLength; + if (this.getArguments() == null + || this.getArguments().get(CONCURRENT_TASK_MIN_QUERY_LENGTH) == null) { + minQueryLength = DEFAULT_CONCURRENT_TASK_MIN_QUERY_LENGTH; + } else { + minQueryLength = Integer.valueOf(this.getArguments().get(CONCURRENT_TASK_MIN_QUERY_LENGTH)); + } + + for (FileExec file : task.getFiles()) { + Instant fileStartTime = Instant.now(); + + if (file.getStatements().size() != 1) { + writeFileEvent(fileStartTime, file.getId(), Status.FAILURE); + throw new ClientException("Concurrent task execution requires one statement per file."); + } + StatementExec statement = file.getStatements().get(0); + + if (WHERE_PATTERN.matcher(statement.getStatement()).find() + || ORDER_PATTERN.matcher(statement.getStatement()).find()) { + writeStatementEvent( + fileStartTime, + file.getId(), + Status.FAILURE, + /* payload= */ "Query contains invalid key words (WHERE, ORDER, etc.): " + + statement.getStatement()); + throw new ClientException( + "Query contains invalid key words (WHERE, ORDER, etc.): " + statement.getStatement()); + } else if (!FROM_PATTERN.matcher(statement.getStatement()).find()) { + writeStatementEvent( + fileStartTime, + file.getId(), + Status.FAILURE, + /* payload= */ "Query does not contain keyword 'FROM': " + statement.getStatement()); + throw new ClientException( + "Query does not contain keyword 'FROM': " + statement.getStatement()); + } + + try { + String query = statement.getStatement().split(QUERY_END_TOKEN)[0]; + String join_clause = query.split(FROM_PATTERN.pattern())[1].trim(); + // Adjust number of joins. + for (int i = 0; i < numJoins; i++) { + query += ", " + join_clause + i; + } + // Adjust query padding. + int queryPadding = minQueryLength - query.length(); + if (queryPadding > 0) { + query += new String(new char[queryPadding]).replace('\0', ' '); + } + query += QUERY_END_TOKEN; + + StatementExec mod_statement = + ImmutableStatementExec.of( + statement.getId() + + getIdSeparator() + + "numJoins" + + numJoins + + getIdConnector() + + "minQueryLength" + + minQueryLength, + query); + executeStatement(connection, values, mod_statement); + } catch (ClientException e) { + LOGGER.error("Exception executing file: " + file.getId()); + writeFileEvent(fileStartTime, file.getId(), Status.FAILURE); + throw e; + } + writeFileEvent(fileStartTime, file.getId(), Status.SUCCESS); + } + } + + private void executeStatement( + Connection connection, Map values, StatementExec statement) + throws ClientException { + Instant statementStartTime = Instant.now(); + try { + connection.execute(StringUtils.replaceParameters(statement, values).getStatement()); + } catch (Exception e) { + String error_msg = + "Exception executing statement: " + + statement.getId() + + "; " + + ExceptionUtils.getStackTrace(e); + writeStatementEvent(statementStartTime, statement.getId(), Status.FAILURE, error_msg); + throw new ClientException(error_msg); + } + writeStatementEvent(statementStartTime, statement.getId(), Status.SUCCESS, /* payload= */ null); + } + + private String getIdSeparator() { + String idSeparator; + if (this.getArguments() == null || this.getArguments().get(CONCURRENT_ID_SEPARATOR) == null) { + idSeparator = DEFAULT_CONCURRENT_ID_SEPARATOR; + } else { + idSeparator = this.getArguments().get(CONCURRENT_ID_SEPARATOR); + } + return idSeparator; + } + + private String getIdConnector() { + String idConnector; + if (this.getArguments() == null || this.getArguments().get(CONCURRENT_ID_CONNECTOR) == null) { + idConnector = DEFAULT_CONCURRENT_ID_CONNECTOR; + } else { + idConnector = this.getArguments().get(CONCURRENT_ID_CONNECTOR); + } + return idConnector; + } +} diff --git a/src/main/java/com/microsoft/lst_bench/task/custom/CustomTaskExecutor.java b/src/main/java/com/microsoft/lst_bench/task/custom/CustomTaskExecutor.java new file mode 100644 index 00000000..cbe222b4 --- /dev/null +++ b/src/main/java/com/microsoft/lst_bench/task/custom/CustomTaskExecutor.java @@ -0,0 +1,54 @@ +/* + * Copyright (c) Microsoft Corporation. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.microsoft.lst_bench.task.custom; + +import com.microsoft.lst_bench.client.ClientException; +import com.microsoft.lst_bench.client.Connection; +import com.microsoft.lst_bench.exec.TaskExec; +import com.microsoft.lst_bench.task.TaskExecutor; +import com.microsoft.lst_bench.telemetry.SQLTelemetryRegistry; +import java.util.Map; + +/** + * Custom task executor implementation that allows users to execute dependent tasks. We call a + * dependent task a task that iteratively executes a) a statement that is expected to return a + * result; and b) a statement repeatedly that is expected to use that result. The result of the + * first statement is stored in an intermediate object that can be specific to the connection. The + * expected object for a JDBC connection is of type List>, handling of other + * objects would need to be added to the if-clause that checks the instance of the object. + */ +public class CustomTaskExecutor extends TaskExecutor { + + protected final Map arguments; + + public CustomTaskExecutor( + SQLTelemetryRegistry telemetryRegistry, + String experimentStartTime, + Map arguments) { + super(telemetryRegistry, experimentStartTime); + this.arguments = arguments; + } + + @Override + public void executeTask(Connection connection, TaskExec task, Map values) + throws ClientException { + super.executeTask(connection, task, values); + } + + protected Map getArguments() { + return this.arguments; + } +} diff --git a/src/main/java/com/microsoft/lst_bench/common/DependentTaskExecutor.java b/src/main/java/com/microsoft/lst_bench/task/custom/DependentTaskExecutor.java similarity index 79% rename from src/main/java/com/microsoft/lst_bench/common/DependentTaskExecutor.java rename to src/main/java/com/microsoft/lst_bench/task/custom/DependentTaskExecutor.java index b1566554..bb5b243f 100644 --- a/src/main/java/com/microsoft/lst_bench/common/DependentTaskExecutor.java +++ b/src/main/java/com/microsoft/lst_bench/task/custom/DependentTaskExecutor.java @@ -13,7 +13,7 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package com.microsoft.lst_bench.common; +package com.microsoft.lst_bench.task.custom; import com.microsoft.lst_bench.client.ClientException; import com.microsoft.lst_bench.client.Connection; @@ -21,7 +21,6 @@ import com.microsoft.lst_bench.exec.FileExec; import com.microsoft.lst_bench.exec.StatementExec; import com.microsoft.lst_bench.exec.TaskExec; -import com.microsoft.lst_bench.input.Task.CustomTaskExecutorArguments; import com.microsoft.lst_bench.telemetry.EventInfo.Status; import com.microsoft.lst_bench.telemetry.SQLTelemetryRegistry; import com.microsoft.lst_bench.util.StringUtils; @@ -35,34 +34,33 @@ * Custom task executor implementation that allows users to execute dependent tasks. We call a * dependent task a task that iteratively executes a) a statement that is expected to return a * result; and b) a statement repeatedly that is expected to use that result. The result of the - * first statement is stored in an intermediate object that can be specific to the connection. The - * expected object for a JDBC connection is of type List>, handling of other - * objects would need to be added to the if-clause that checks the instance of the object. + * first statement is stored in a QueryResult object which is then used and interpreted by the + * second statement. For this task executor, we allow the second statement to be executed in + * batches. The batch size can be set via the 'custom_task_executor_arguments' property that is part + * of the workload configuration. The parameter name is 'dependent_task_batch_size'. */ -public class DependentTaskExecutor extends TaskExecutor { +public class DependentTaskExecutor extends CustomTaskExecutor { private static final Logger LOGGER = LoggerFactory.getLogger(DependentTaskExecutor.class); - private final CustomTaskExecutorArguments arguments; - private final int DEFAULT_BATCH_SIZE = 1; + private final String DEPENDENT_TASK_BATCH_SIZE = "dependent_task_batch_size"; public DependentTaskExecutor( SQLTelemetryRegistry telemetryRegistry, String experimentStartTime, - CustomTaskExecutorArguments arguments) { - super(telemetryRegistry, experimentStartTime); - this.arguments = arguments; + Map arguments) { + super(telemetryRegistry, experimentStartTime, arguments); } @Override public void executeTask(Connection connection, TaskExec task, Map values) throws ClientException { - int batch_size; - if (this.arguments == null || this.arguments.getDependentTaskBatchSize() == null) { - batch_size = DEFAULT_BATCH_SIZE; + int batchSize; + if (this.getArguments() == null || this.getArguments().get(DEPENDENT_TASK_BATCH_SIZE) == null) { + batchSize = DEFAULT_BATCH_SIZE; } else { - batch_size = this.arguments.getDependentTaskBatchSize().intValue(); + batchSize = Integer.valueOf(this.getArguments().get(DEPENDENT_TASK_BATCH_SIZE)); } QueryResult queryResult = null; @@ -92,8 +90,8 @@ public void executeTask(Connection connection, TaskExec task, Map size ? size : (j + batch_size); + for (int j = 0; j < size; j += batchSize) { + int localMax = (j + batchSize) > size ? size : (j + batchSize); Map localValues = new HashMap<>(values); localValues.putAll(queryResult.getStringMappings(j, localMax)); diff --git a/src/main/java/com/microsoft/lst_bench/telemetry/SQLTelemetryRegistry.java b/src/main/java/com/microsoft/lst_bench/telemetry/SQLTelemetryRegistry.java index b1b9a81e..84d1a939 100644 --- a/src/main/java/com/microsoft/lst_bench/telemetry/SQLTelemetryRegistry.java +++ b/src/main/java/com/microsoft/lst_bench/telemetry/SQLTelemetryRegistry.java @@ -18,7 +18,6 @@ import com.microsoft.lst_bench.client.ClientException; import com.microsoft.lst_bench.client.Connection; import com.microsoft.lst_bench.client.ConnectionManager; -import com.microsoft.lst_bench.exec.StatementExec; import com.microsoft.lst_bench.sql.SQLParser; import com.microsoft.lst_bench.util.StringUtils; import java.util.ArrayList; @@ -37,7 +36,7 @@ public class SQLTelemetryRegistry { private final ConnectionManager connectionManager; - private final List insertFileStatements; + private final List insertFileStatements; // TODO: Make writing events thread-safe. private List eventsStream; @@ -52,9 +51,10 @@ public SQLTelemetryRegistry( this.connectionManager = connectionManager; this.eventsStream = Collections.synchronizedList(new ArrayList<>()); this.insertFileStatements = - SQLParser.getStatements(insertFile).getStatements().stream() + SQLParser.getStatements(insertFile).stream() .map(s -> StringUtils.replaceParameters(s, parameterValues)) .collect(Collectors.toUnmodifiableList()); + // Create the tables if they don't exist. if (executeDdl) { executeDdl(ddlFile, parameterValues); @@ -65,9 +65,9 @@ private void executeDdl(String ddlFile, Map parameterValues) throws ClientException { LOGGER.info("Creating new logging tables..."); try (Connection connection = connectionManager.createConnection()) { - List ddlFileStatements = SQLParser.getStatements(ddlFile).getStatements(); - for (StatementExec query : ddlFileStatements) { - String currentQuery = StringUtils.replaceParameters(query, parameterValues).getStatement(); + List ddlFileStatements = SQLParser.getStatements(ddlFile); + for (String query : ddlFileStatements) { + String currentQuery = StringUtils.replaceParameters(query, parameterValues); connection.execute(currentQuery); } } @@ -101,8 +101,8 @@ public void flush() throws EventException { StringUtils.quote(o.getStatus().toString()), StringUtils.quote(o.getPayload()))) .collect(Collectors.joining("),(", "(", ")"))); - for (StatementExec query : insertFileStatements) { - String currentQuery = StringUtils.replaceParameters(query, values).getStatement(); + for (String query : insertFileStatements) { + String currentQuery = StringUtils.replaceParameters(query, values); connection.execute(currentQuery); } diff --git a/src/main/java/com/microsoft/lst_bench/util/StringUtils.java b/src/main/java/com/microsoft/lst_bench/util/StringUtils.java index f1194f35..9a0650cd 100644 --- a/src/main/java/com/microsoft/lst_bench/util/StringUtils.java +++ b/src/main/java/com/microsoft/lst_bench/util/StringUtils.java @@ -50,6 +50,14 @@ public static String quote(String str) { return "'" + str + "'"; } + public static String replaceParameters(String statement, Map parameterValues) { + if (parameterValues == null || parameterValues.isEmpty()) { + // Nothing to do + return statement; + } + return StringUtils.format(statement, parameterValues); + } + public static StatementExec replaceParameters( StatementExec statement, Map parameterValues) { if (parameterValues == null || parameterValues.isEmpty()) { @@ -67,16 +75,18 @@ public static FileExec replaceParameters(FileExec file, Map para } return ImmutableFileExec.of( file.getId(), + file.getName(), file.getStatements().stream() .map(s -> replaceParameters(s, parameterValues)) .collect(Collectors.toList())); } - public static FileExec replaceRegex(FileExec f, String regex, String replacement) { + public static FileExec replaceRegex(FileExec file, String regex, String replacement) { Pattern pattern = Pattern.compile(regex); return ImmutableFileExec.of( - f.getId(), - f.getStatements().stream() + file.getId(), + file.getName(), + file.getStatements().stream() .map( s -> ImmutableStatementExec.of( diff --git a/src/main/resources/config/spark/tpcds/wp1_longevity_trickle_1k_batches.yaml b/src/main/resources/config/spark/tpcds/wp1_longevity_trickle_1k_batches.yaml index 9d13fb02..caeaee9e 100644 --- a/src/main/resources/config/spark/tpcds/wp1_longevity_trickle_1k_batches.yaml +++ b/src/main/resources/config/spark/tpcds/wp1_longevity_trickle_1k_batches.yaml @@ -36,11 +36,11 @@ phases: sessions: - tasks: - template_id: data_maintenance_dependent - custom_task_executor: com.microsoft.lst_bench.common.DependentTaskExecutor + custom_task_executor: com.microsoft.lst_bench.task.custom.DependentTaskExecutor custom_task_executor_arguments: dependent_task_batch_size: 1000 - template_id: data_maintenance_dependent - custom_task_executor: com.microsoft.lst_bench.common.DependentTaskExecutor + custom_task_executor: com.microsoft.lst_bench.task.custom.DependentTaskExecutor custom_task_executor_arguments: dependent_task_batch_size: 1000 - id: single_user_2 @@ -51,11 +51,11 @@ phases: sessions: - tasks: - template_id: data_maintenance_dependent - custom_task_executor: com.microsoft.lst_bench.common.DependentTaskExecutor + custom_task_executor: com.microsoft.lst_bench.task.custom.DependentTaskExecutor custom_task_executor_arguments: dependent_task_batch_size: 1000 - template_id: data_maintenance_dependent - custom_task_executor: com.microsoft.lst_bench.common.DependentTaskExecutor + custom_task_executor: com.microsoft.lst_bench.task.custom.DependentTaskExecutor custom_task_executor_arguments: dependent_task_batch_size: 1000 - id: single_user_3 @@ -66,11 +66,11 @@ phases: sessions: - tasks: - template_id: data_maintenance_dependent - custom_task_executor: com.microsoft.lst_bench.common.DependentTaskExecutor + custom_task_executor: com.microsoft.lst_bench.task.custom.DependentTaskExecutor custom_task_executor_arguments: dependent_task_batch_size: 1000 - template_id: data_maintenance_dependent - custom_task_executor: com.microsoft.lst_bench.common.DependentTaskExecutor + custom_task_executor: com.microsoft.lst_bench.task.custom.DependentTaskExecutor custom_task_executor_arguments: dependent_task_batch_size: 1000 - id: single_user_4 @@ -81,11 +81,11 @@ phases: sessions: - tasks: - template_id: data_maintenance_dependent - custom_task_executor: com.microsoft.lst_bench.common.DependentTaskExecutor + custom_task_executor: com.microsoft.lst_bench.task.custom.DependentTaskExecutor custom_task_executor_arguments: dependent_task_batch_size: 1000 - template_id: data_maintenance_dependent - custom_task_executor: com.microsoft.lst_bench.common.DependentTaskExecutor + custom_task_executor: com.microsoft.lst_bench.task.custom.DependentTaskExecutor custom_task_executor_arguments: dependent_task_batch_size: 1000 - id: single_user_5 @@ -96,11 +96,11 @@ phases: sessions: - tasks: - template_id: template_id: data_maintenance_dependent - custom_task_executor: com.microsoft.lst_bench.common.DependentTaskExecutor + custom_task_executor: com.microsoft.lst_bench.task.custom.DependentTaskExecutor custom_task_executor_arguments: dependent_task_batch_size: 1000 - template_id: data_maintenance_dependent - custom_task_executor: com.microsoft.lst_bench.common.DependentTaskExecutor + custom_task_executor: com.microsoft.lst_bench.task.custom.DependentTaskExecutor custom_task_executor_arguments: dependent_task_batch_size: 1000 - id: single_user_6 diff --git a/src/main/resources/schemas/workload.json b/src/main/resources/schemas/workload.json index 7ad13283..1f84066b 100644 --- a/src/main/resources/schemas/workload.json +++ b/src/main/resources/schemas/workload.json @@ -63,14 +63,7 @@ "custom_task_executor_arguments": { "type": "object", "title": "Custom task executor arguments", - "description": "Any arguments passed to a custom executor need to be defined here.", - "properties": { - "dependent_task_batch_size": { - "type": "integer", - "title": "Batch size for DependentTaskExecutor", - "description": "Sets the batch size for a task executed; specific to the DependentTaskExecutor class" - } - } + "description": "Any arguments passed to a custom executor need to be defined here." }, "replace_regex": { "type": "array", @@ -99,6 +92,11 @@ "type": "integer", "title": "Target endpoint index (default: 0)", "description": "The positional index (starting from 0) of the connection manager within the connections configuration file" + }, + "num_instances": { + "type": "integer", + "title": "Number of instances for this session", + "description": "Determines how often a session will be instantiated (default: 1)" } } } diff --git a/src/test/java/com/microsoft/lst_bench/DriverSparkTest.java b/src/test/java/com/microsoft/lst_bench/DriverSparkTest.java index 261ee5ba..a4843c4a 100644 --- a/src/test/java/com/microsoft/lst_bench/DriverSparkTest.java +++ b/src/test/java/com/microsoft/lst_bench/DriverSparkTest.java @@ -181,6 +181,19 @@ public void testJDBCSessionIceberg() throws Exception { "src/test/resources/config/spark/w_all_tpcds_single_session_jdbc-iceberg.yaml"); } + /* TODO: Replace eventually with SQL Server version, once CI pipeline has been established. */ + @Test + @EnabledIfSystemProperty(named = "lst-bench.test.lst", matches = "delta") + @EnabledIfSystemProperty(named = "lst-bench.test.connection", matches = "jdbc") + public void testJDBCConcurrentPerfStresstest() throws Exception { + runDriver( + "src/test/resources/config/spark/jdbc_connection_config.yaml", + "src/test/resources/config/spark/experiment_config-delta.yaml", + "src/test/resources/config/spark/telemetry_config.yaml", + "src/test/resources/config/spark/conc_perf_stresstest_task_library.yaml", + "src/test/resources/config/spark/w_perf_stresstest.yaml"); + } + private void runDriver(String arg0, String arg1, String arg2, String arg3, String arg4) throws Exception { Driver.main(new String[] {"-c", arg0, "-e", arg1, "-t", arg2, "-l", arg3, "-w", arg4}); diff --git a/src/test/java/com/microsoft/lst_bench/input/ParserTest.java b/src/test/java/com/microsoft/lst_bench/input/ParserTest.java index f30b37e3..2ce17df1 100644 --- a/src/test/java/com/microsoft/lst_bench/input/ParserTest.java +++ b/src/test/java/com/microsoft/lst_bench/input/ParserTest.java @@ -496,6 +496,7 @@ public void testParseWP3RWConcurrencyMulti() throws IOException { List tasksDM = sessions.get(1).getTasks(); Assertions.assertEquals(2, tasksDM.size()); Assertions.assertEquals(1, sessions.get(1).getTargetEndpoint()); + Assertions.assertEquals(1, sessions.get(1).getNumInstances()); } break; case "single_user_2_optimize_1": @@ -505,6 +506,7 @@ public void testParseWP3RWConcurrencyMulti() throws IOException { List tasksO = sessions.get(1).getTasks(); Assertions.assertEquals(1, tasksO.size()); Assertions.assertEquals(1, sessions.get(1).getTargetEndpoint()); + Assertions.assertEquals(1, sessions.get(1).getNumInstances()); } break; case "setup": diff --git a/src/test/resources/config/spark/conc_perf_stresstest_task_library.yaml b/src/test/resources/config/spark/conc_perf_stresstest_task_library.yaml new file mode 100644 index 00000000..64a9e6d0 --- /dev/null +++ b/src/test/resources/config/spark/conc_perf_stresstest_task_library.yaml @@ -0,0 +1,12 @@ +# Description: Tasks Library +--- +version: 1 +task_templates: +# Create external tables needed for benchmark +- id: setup + files: + - src/main/resources/scripts/tpcds/setup/spark/ddl-external-tables.sql +# Example task template for the base query of the custom concurrent performance stresstest. +- id: base_query + files: + - src/test/resources/scripts/spark/perf_stresstest.sql diff --git a/src/test/resources/config/spark/w_all_tpcds-delta.yaml b/src/test/resources/config/spark/w_all_tpcds-delta.yaml index d057efea..4a0d35bb 100644 --- a/src/test/resources/config/spark/w_all_tpcds-delta.yaml +++ b/src/test/resources/config/spark/w_all_tpcds-delta.yaml @@ -31,7 +31,7 @@ phases: sessions: - tasks: - template_id: data_maintenance_dependent - custom_task_executor: com.microsoft.lst_bench.common.DependentTaskExecutor + custom_task_executor: com.microsoft.lst_bench.task.custom.DependentTaskExecutor custom_task_executor_arguments: dependent_task_batch_size: 100 - id: optimize diff --git a/src/test/resources/config/spark/w_all_tpcds-hudi.yaml b/src/test/resources/config/spark/w_all_tpcds-hudi.yaml index 2e9307ac..cdad28d8 100644 --- a/src/test/resources/config/spark/w_all_tpcds-hudi.yaml +++ b/src/test/resources/config/spark/w_all_tpcds-hudi.yaml @@ -34,7 +34,7 @@ phases: sessions: - tasks: - template_id: data_maintenance_dependent - custom_task_executor: com.microsoft.lst_bench.common.DependentTaskExecutor + custom_task_executor: com.microsoft.lst_bench.task.custom.DependentTaskExecutor custom_task_executor_arguments: dependent_task_batch_size: 100 - id: optimize diff --git a/src/test/resources/config/spark/w_all_tpcds-iceberg.yaml b/src/test/resources/config/spark/w_all_tpcds-iceberg.yaml index 6025973c..5b0ed8c0 100644 --- a/src/test/resources/config/spark/w_all_tpcds-iceberg.yaml +++ b/src/test/resources/config/spark/w_all_tpcds-iceberg.yaml @@ -31,7 +31,7 @@ phases: sessions: - tasks: - template_id: data_maintenance_dependent - custom_task_executor: com.microsoft.lst_bench.common.DependentTaskExecutor + custom_task_executor: com.microsoft.lst_bench.task.custom.DependentTaskExecutor custom_task_executor_arguments: dependent_task_batch_size: 100 - id: optimize diff --git a/src/test/resources/config/spark/w_all_tpcds_single_session_jdbc-delta.yaml b/src/test/resources/config/spark/w_all_tpcds_single_session_jdbc-delta.yaml index fc580ee1..ef14c26d 100644 --- a/src/test/resources/config/spark/w_all_tpcds_single_session_jdbc-delta.yaml +++ b/src/test/resources/config/spark/w_all_tpcds_single_session_jdbc-delta.yaml @@ -13,5 +13,5 @@ phases: - template_id: single_user - template_id: data_maintenance_delta - template_id: data_maintenance_dependent - custom_task_executor: com.microsoft.lst_bench.common.DependentTaskExecutor + custom_task_executor: com.microsoft.lst_bench.task.custom.DependentTaskExecutor - template_id: optimize_delta diff --git a/src/test/resources/config/spark/w_all_tpcds_single_session_jdbc-hudi.yaml b/src/test/resources/config/spark/w_all_tpcds_single_session_jdbc-hudi.yaml index f7a51282..6715e10e 100644 --- a/src/test/resources/config/spark/w_all_tpcds_single_session_jdbc-hudi.yaml +++ b/src/test/resources/config/spark/w_all_tpcds_single_session_jdbc-hudi.yaml @@ -16,5 +16,5 @@ phases: - template_id: single_user - template_id: data_maintenance_hudi - template_id: data_maintenance_dependent - custom_task_executor: com.microsoft.lst_bench.common.DependentTaskExecutor + custom_task_executor: com.microsoft.lst_bench.task.custom.DependentTaskExecutor - template_id: optimize_hudi diff --git a/src/test/resources/config/spark/w_all_tpcds_single_session_jdbc-iceberg.yaml b/src/test/resources/config/spark/w_all_tpcds_single_session_jdbc-iceberg.yaml index 09c5e210..b304adf0 100644 --- a/src/test/resources/config/spark/w_all_tpcds_single_session_jdbc-iceberg.yaml +++ b/src/test/resources/config/spark/w_all_tpcds_single_session_jdbc-iceberg.yaml @@ -13,5 +13,5 @@ phases: - template_id: single_user - template_id: data_maintenance_iceberg - template_id: data_maintenance_dependent - custom_task_executor: com.microsoft.lst_bench.common.DependentTaskExecutor + custom_task_executor: com.microsoft.lst_bench.task.custom.DependentTaskExecutor - template_id: optimize_iceberg diff --git a/src/test/resources/config/spark/w_perf_stresstest.yaml b/src/test/resources/config/spark/w_perf_stresstest.yaml new file mode 100644 index 00000000..ae8098ec --- /dev/null +++ b/src/test/resources/config/spark/w_perf_stresstest.yaml @@ -0,0 +1,24 @@ +# Description: Example for custom concurrent performance stresstest workload +--- +version: 1 +id: perf-stresstest +phases: +- id: setup + sessions: + - tasks: + - template_id: setup +- id: query_execution_1 + sessions: + - tasks: + - template_id: base_query + custom_task_executor: com.microsoft.lst_bench.task.custom.ConcurrentPerfStresstestTaskExecutor + custom_task_executor_arguments: + concurrent_task_num_joins: 5 + num_instances: 5 +- id: query_execution_2 + sessions: + - tasks: + - template_id: base_query + custom_task_executor: com.microsoft.lst_bench.task.custom.ConcurrentPerfStresstestTaskExecutor + custom_task_executor_arguments: + concurrent_task_min_query_length: 1000 \ No newline at end of file diff --git a/src/test/resources/config/sqlserver/experiment_config.yaml b/src/test/resources/config/sqlserver/experiment_config.yaml new file mode 100644 index 00000000..1be530a7 --- /dev/null +++ b/src/test/resources/config/sqlserver/experiment_config.yaml @@ -0,0 +1,7 @@ +# Description: Experiment Configuration +--- +version: 1 +id: perf-stresstest +repetitions: 1 +metadata: + system: sqlserver diff --git a/src/test/resources/config/sqlserver/jdbc_connection_config.yaml b/src/test/resources/config/sqlserver/jdbc_connection_config.yaml new file mode 100644 index 00000000..6913bd39 --- /dev/null +++ b/src/test/resources/config/sqlserver/jdbc_connection_config.yaml @@ -0,0 +1,9 @@ +# Description: Connections Configuration +--- +version: 1 +connections: +- id: sqlserver_0 + driver: com.microsoft.sqlserver.jdbc.SQLServerDriver + url: jdbc:sqlserver://localhost:1433;encrypt=false;database=testdb; + username: $USER + password: $PASSWORD diff --git a/src/test/resources/config/sqlserver/task_library.yaml b/src/test/resources/config/sqlserver/task_library.yaml new file mode 100644 index 00000000..c968b22e --- /dev/null +++ b/src/test/resources/config/sqlserver/task_library.yaml @@ -0,0 +1,8 @@ +# Description: Tasks Library +--- +version: 1 +task_templates: +# Example task template for the base query of the custom concurrent performance stresstest. +- id: base_query + files: + - src/test/resources/scripts/sqlserver/perf_stresstest.sql diff --git a/src/test/resources/config/sqlserver/telemetry_config.yaml b/src/test/resources/config/sqlserver/telemetry_config.yaml new file mode 100644 index 00000000..4d2a2bd5 --- /dev/null +++ b/src/test/resources/config/sqlserver/telemetry_config.yaml @@ -0,0 +1,13 @@ +# Description: Telemetry Configuration +--- +version: 1 +connection: + id: duckdb_0 + driver: org.duckdb.DuckDBDriver + url: jdbc:duckdb:./telemetry +execute_ddl: true +ddl_file: 'src/main/resources/scripts/logging/duckdb/ddl.sql' +insert_file: 'src/main/resources/scripts/logging/duckdb/insert.sql' +# The following parameter values will be used to replace the variables in the logging statements. +parameter_values: + data_path: '' diff --git a/src/test/resources/config/sqlserver/w_perf_stresstest.yaml b/src/test/resources/config/sqlserver/w_perf_stresstest.yaml new file mode 100644 index 00000000..1926c884 --- /dev/null +++ b/src/test/resources/config/sqlserver/w_perf_stresstest.yaml @@ -0,0 +1,20 @@ +# Description: Example for custom concurrent performance stresstest workload +--- +version: 1 +id: perf-stresstest +phases: +- id: query_execution_1 + sessions: + - tasks: + - template_id: base_query + custom_task_executor: com.microsoft.lst_bench.task.custom.ConcurrentPerfStresstestTaskExecutor + custom_task_executor_arguments: + concurrent_task_num_joins: 5 + num_instances: 5 +- id: query_execution_2 + sessions: + - tasks: + - template_id: base_query + custom_task_executor: com.microsoft.lst_bench.task.custom.ConcurrentPerfStresstestTaskExecutor + custom_task_executor_arguments: + concurrent_task_min_query_length: 1000 \ No newline at end of file diff --git a/src/test/resources/scripts/spark/perf_stresstest.sql b/src/test/resources/scripts/spark/perf_stresstest.sql new file mode 100644 index 00000000..8e00b432 --- /dev/null +++ b/src/test/resources/scripts/spark/perf_stresstest.sql @@ -0,0 +1 @@ +SELECT s.* FROM ${catalog}.${database}.store s; \ No newline at end of file diff --git a/src/test/resources/scripts/sqlserver/perf_stresstest.sql b/src/test/resources/scripts/sqlserver/perf_stresstest.sql new file mode 100644 index 00000000..5481c25a --- /dev/null +++ b/src/test/resources/scripts/sqlserver/perf_stresstest.sql @@ -0,0 +1 @@ +SELECT t.* FROM test_table t; \ No newline at end of file