Added test module

This commit is contained in:
Ytbarek Hailu
2025-01-28 22:38:01 -08:00
parent 9e0bf8bb4f
commit 251544bd6f
35 changed files with 121008 additions and 40 deletions

View File

@@ -4,25 +4,24 @@
---
DLSync is a database change management that deploys database changes to our database.
Each object(view, table, udf ...) in our database will
have a corresponding SQL script file where every change to this object is tracked in this file only. DLSync keeps track of what changes have been deployed to database
DLSync is a database change management tool designed to streamline the development and deployment of snowflake changes.
By associating each database object(view, table, udf ...) with a corresponding SQL script file, DLSync tracks every modification, ensuring efficient and accurate updates.
Each script can also have a corresponding test script that can be used to write unit tests for the database object.
. DLSync keeps track of what changes have been deployed to database
by using hash. Hence DLSync is capable of identifying what scripts have changed in the current deployment.
Using this DLSync only deploys changed script to database objects.
DLSync also understands interdependency between different scripts, thus applies these changes
according their dependency.
Based on how we define the changes to database objects, DLSync divides database object scripts to 2 types, State and migration scripts.
## Key Features
- It combines state based and migration based change management to manage database changes
- Each object will have it's corresponding unique Script file where we can define the change to the object
- It can detect change between previous deployment and current script state.
- It can reorder scripts based on their dependency before deploying to database.
- It supports parametrization of scripts where we can define variables that change between different database instances.
- It supports parameter config file where each parameter config file corresponds to an instance
- It supports rollback to previous deployment state.
- Rollback is very simple and intuitive. Only one needs to rollback git repository of the script and triggering rollback module.
- It supports verify module where each database object is checked with current script to check for deployment verification or tracking out of sync database changes.
- It supports create script where we can create script file for each database objects.
- Hybrid Change Management: It combines state based and migration based change management to manage database changes
- Unique Script per object: Each object will have it's corresponding unique Script file where we can define the change to the object
- Unit Testing: It supports unit testing where we can write test scripts for each database object.
- Change detection: It can detect change between previous deployment and current script state.
- Dependency resolution: It can reorder scripts based on their dependency before deploying to database.
- Parametrization: It supports parametrization of scripts where we can define variables that change between different database instances. Each instance is associated with parameter config file, where each parameter config lists the variables and their value for that instance.
- Rollback: It supports rollback to previous deployment state. Rollback is very simple and intuitive. Only one needs to rollback git repository of the script and triggering rollback module.
- Verification: It supports verify module where each database object is checked with current script to check for deployment verification or tracking out of sync database changes.
- Script creation: It supports create script where we can create script file for each database objects.
## Project structure
To use this tool first create your script root directory.
@@ -47,6 +46,15 @@ Inside this directory create a directory structure like:
│ │ │ │ ├── object_name_7.sql # The database object name(table name, view name, function name ...)
│ │ │ │ ├── object_name_8.sql # The database object name(table name, view name, function name ...)
├── /tests # SQL unit test scripts
│ ├── /database_name_1
│ │ ├── /schema_name_1
│ │ │ ├── /[object_type]_1
│ │ │ │ ├── object_name_1_test.sql # unit test file for object object_name_1_test
│ │ │ │ ├── object_name_2_test.sql # unit test file for object object_name_2_test
│ │ ├── /schema_name_2
│ │ │ ├── /[object_type]_1
│ │ │ │ ├── object_name_5_test.sql # unit test file for object object_name_5_test
│ │ │ │ ├── object_name_6_test.sql # unit test file for object object_name_6_test
├── config.yml # configuration file
├── parameter-[profile-1].properties # parameter property file
├── parameter-[profile-2].properties # parameter property file

View File

@@ -1,8 +1,4 @@
# DLSync Backlog
- [x] Rollback for migration
- [x] Verify module State Script
- [x] create script to capture config tables
- [x] Script hierarchy design
- [x] Verify module for migration Script
- [ ] Migration Script parsing using ATLR
- [ ] Support for different DB
- [ ] use config file for connection properties
- [ ] create command line application
- [ ] use antlr4 for verify module

View File

@@ -21,6 +21,7 @@ dependencies {
implementation 'org.slf4j:slf4j-api:2.0.4'
implementation 'com.fasterxml.jackson.dataformat:jackson-dataformat-yaml:2.18.2'
implementation 'commons-cli:commons-cli:1.9.0'
implementation 'org.antlr:antlr4-runtime:4.13.2'
compileOnly 'org.projectlombok:lombok:1.18.24'
annotationProcessor 'org.projectlombok:lombok:1.18.24'
@@ -48,4 +49,4 @@ jar {
test {
useJUnitPlatform()
}
}

View File

@@ -0,0 +1,8 @@
CREATE OR REPLACE FUNCTION ${EXAMPLE_DB}.${MAIN_SCHEMA}.GET_RETURNED_ORDERS(USER VARCHAR)
RETURNS NUMERIC(10, 2)
LANGUAGE SQL
AS
$$
SELECT COUNT(*) FROM ${EXAMPLE_DB}.${MAIN_SCHEMA}.ORDER_RETURNS
WHERE USER_ID = USER
$$

View File

@@ -0,0 +1,10 @@
---version: 0, author: DlSync
CREATE TABLE ${EXAMPLE_DB}.${MAIN_SCHEMA}.ORDER_RETURNS (
ID INT AUTOINCREMENT PRIMARY KEY,
USER_ID INT REFERENCES ${EXAMPLE_DB}.${MAIN_SCHEMA}.USERS(ID),
ORDER_ID INT REFERENCES ${EXAMPLE_DB}.${MAIN_SCHEMA}.ORDERS(ID),
REASON VARCHAR,
RETURN_DATE TIMESTAMP DEFAULT CURRENT_TIMESTAMP
);
---rollback: DROP TABLE IF EXISTS ${EXAMPLE_DB}.${MAIN_SCHEMA}.ORDER_RETURNS;
---verify: SELECT * FROM ${EXAMPLE_DB}.${MAIN_SCHEMA}.ORDER_RETURNS LIMIT 1;

View File

@@ -0,0 +1,7 @@
with MOCK_DATA AS (
SELECT 100 AS P_PRICE, 10 AS P_DISCOUNT_RATE
),
EXPECTED_DATA AS (
SELECT 90 AS RETURN_VALUE
)
SELECT CALCULATE_ORDER_TOTAL(P_PRICE, P_DISCOUNT_RATE) AS RETURN_VALUE from MOCK_DATA;

View File

@@ -0,0 +1,10 @@
WITH MOCK_DATA AS (
SELECT 1 AS USER
),
ORDER_RETURNS AS (
SELECT * FROM VALUES(1, 1, 1, 'broken'), (2, 1, 2, 'not arrived'), (3, 2, 3, 'quality') AS T(ID, USER_ID, ORDER_ID, RETURNED_QUANTITY)
),
EXPECTED_DATA AS (
SELECT 2 AS RETURN_VALUE
)
SELECT GET_RETURNED_ORDERS(USER) AS RETURN_VALUE from MOCK_DATA;

View File

@@ -0,0 +1,7 @@
WITH PRODUCTS AS (
SELECT * FROM VALUES(1, 'Product 1', 5, 100), (2, 'Product 2', 20, 50), (3, 'Product 3', 30, 5) AS T(ID, PRODUCT_NAME, STOCK, PRICE)
),
EXPECTED_DATA AS (
SELECT * FROM VALUES(1, 'Product 1', 5, 'LOW STOCK'), (2, 'Product 2', 20, 'SUFFICIENT STOCK'), (3, 'Product 3', 30, 'SUFFICIENT STOCK') AS T(ID, PRODUCT_NAME, STOCK, STOCK_STATUS)
)
SELECT * FROM STOCK_SUMMARY;

View File

@@ -0,0 +1,13 @@
WITH USERS AS (
SELECT * FROM VALUES(1, 'Alice'), (2, 'Bob') AS T(ID, USER_NAME)
),
ORDERS AS (
SELECT * FROM VALUES(1, 1, 1, 10), (2, 1, 2, 20), (3, 2, 3, 30) AS T(ID, USER_ID, PRODUCT_ID, QUANTITY)
),
PRODUCTS AS (
SELECT * FROM VALUES(1, 'Product 1', 100), (2, 'Product 2', 50), (3, 'Product 3', 5) AS T(ID, PRODUCT_NAME, PRICE)
),
EXPECTED_DATA AS (
SELECT * FROM VALUES(1, 'Alice', 2, 2000), (2, 'Bob', 1, 150) AS T(USER_ID, USER_NAME, TOTAL_ORDERS, TOTAL_SPENT)
)
SELECT * FROM ${EXAMPLE_DB}.${MAIN_SCHEMA}.USER_ORDER_SUMMARY;

View File

@@ -1 +1 @@
releaseVersion=1.5.0
releaseVersion=2.0.0

View File

@@ -6,7 +6,7 @@
<groupId>com.snowflake</groupId>
<artifactId>dlsync</artifactId>
<version>1.0-SNAPSHOT</version>
<version>2.0-SNAPSHOT</version>
<properties>
<maven.compiler.source>11</maven.compiler.source>
@@ -57,6 +57,11 @@
<artifactId>commons-cli</artifactId>
<version>1.9.0</version>
</dependency>
<dependency>
<groupId>antlr4-runtime</groupId>
<artifactId>org.antlr</artifactId>
<version>4.13.2</version>
</dependency>
<dependency>
<groupId>org.junit.jupiter</groupId>

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

View File

@@ -5,10 +5,12 @@ import com.snowflake.dlsync.doa.ScriptRepo;
import com.snowflake.dlsync.doa.ScriptSource;
import com.snowflake.dlsync.models.*;
import com.snowflake.dlsync.parser.ParameterInjector;
import com.snowflake.dlsync.parser.TestQueryGenerator;
import lombok.extern.slf4j.Slf4j;
import java.io.*;
import java.security.NoSuchAlgorithmException;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.util.*;
import java.util.stream.Collectors;
@@ -206,6 +208,35 @@ public class ChangeManager {
endSyncSuccess(ChangeType.CREATE_LINEAGE, (long)dependencyList.size());
}
public void test() throws SQLException, IOException {
log.info("Started Test module.");
startSync(ChangeType.TEST);
List<Script> scripts = scriptSource.getAllScripts().stream()
.filter(script -> !config.isScriptExcluded(script))
.filter(script -> !script.getObjectType().isMigration())
.collect(Collectors.toList());
scripts.forEach(script -> parameterInjector.injectParameters(script));
List<TestScript> testScripts = scriptSource.getTestScripts(scripts);
int size = testScripts.size();
int index = 1;
for(TestScript script: testScripts) {
log.info("{} of {}: testing object: {}", index++, size, script);
// TestQueryGenerator testQueryGenerator = new TestQueryGenerator(script);
log.debug("Testing query: [{}]", script.getTestQuery());
List<TestResult> testResults = scriptRepo.runTest(script);
if(testResults.size() > 0) {
log.info("Test query for script: {} is: \n{}", script, script.getTestQuery());
log.error("Test failed for script: {} with error: [{}]", script, testResults);
}
else {
log.info("Test passed for script: {}", script);
}
}
endSyncSuccess(ChangeType.TEST, (long)size);
}
public void startSync(ChangeType changeType) throws SQLException {
scriptRepo.insertChangeSync(changeType, Status.IN_PROGRESS, changeType.toString() + " started.");
}

View File

@@ -51,6 +51,10 @@ public class Main {
changeManager.createLineage();
log.info("DLsync successfully created lineage to DB.");
break;
case TEST:
changeManager.test();
log.info("DLsync successfully tested.");
break;
default:
log.error("Change type not specified as an argument.");
}

View File

@@ -9,6 +9,14 @@ public class ScriptFactory {
return new StateScript(databaseName, schemaName, objectName, objectType, content);
}
public static StateScript getStateScript(String scriptPath, String databaseName, String schemaName, ScriptObjectType objectType, String objectName, String content) {
return new StateScript(scriptPath, databaseName, schemaName, objectName, objectType, content);
}
public static MigrationScript getMigrationScript(String scriptPath, String databaseName, String schemaName, ScriptObjectType objectType, String objectName, String content, Long version, String author, String rollback, String verify) {
return new MigrationScript(scriptPath, databaseName, schemaName, objectName, objectType, content, version, author, rollback, verify);
}
public static MigrationScript getMigrationScript(String databaseName, String schemaName, ScriptObjectType objectType, String objectName, String content, Long version, String author, String rollback, String verify) {
return new MigrationScript(databaseName, schemaName, objectName, objectType, content, version, author, rollback, verify);
}
@@ -45,4 +53,8 @@ public class ScriptFactory {
return new MigrationScript(databaseName, schemaName, objectName, objectType, migration.getContent(), migration.getVersion(), migration.getAuthor(), migration.getRollback(), migration.getVerify());
}
public static TestScript getTestScript(String scriptPath, String databaseName, String schemaName, ScriptObjectType objectType, String objectName, String content, Script script) {
return new TestScript(scriptPath, databaseName, schemaName, objectName, objectType, content, script);
}
}

View File

@@ -7,6 +7,7 @@ import com.snowflake.dlsync.models.*;
import com.snowflake.dlsync.parser.SqlTokenizer;
import lombok.extern.slf4j.Slf4j;
import java.io.IOException;
import java.sql.*;
import java.util.*;
import java.util.stream.Collectors;
@@ -426,7 +427,7 @@ public class ScriptRepo {
return connectionProperties.getProperty("schema");
}
private ResultSet executeQuery(String query) throws SQLException {
public ResultSet executeQuery(String query) throws SQLException {
return connection.createStatement().executeQuery(query);
}
@@ -563,5 +564,22 @@ public class ScriptRepo {
public boolean compareScript(Script script1, Script script2) {
return SqlTokenizer.compareScripts(script1, script2);
}
public List<TestResult> runTest(TestScript testScript) throws IOException {
List<TestResult> testResults = new ArrayList<>();
try {
log.debug("Running test script: {}", testScript.getObjectName());
ResultSet resultSet = connection.createStatement().executeQuery(testScript.getTestQuery());
while(resultSet.next()) {
TestResult testResult = new TestResult(resultSet.getString(1), resultSet.getString(2));
testResults.add(testResult);
}
return testResults;
} catch (SQLException e) {
log.error("Error while running test script: {}", e.getMessage());
testResults.add(new TestResult(e));
return testResults;
}
}
}

View File

@@ -25,7 +25,7 @@ public class ScriptSource {
public ScriptSource(String scriptRoot) {
this.scriptRoot = scriptRoot;
mainScriptDir = Files.exists(Path.of(scriptRoot, "main")) ? Path.of(scriptRoot, "main").toString(): scriptRoot;
testScriptDir = Path.of(scriptRoot, "tests").toString();
testScriptDir = Path.of(scriptRoot, "test").toString();
log.debug("Script file reader initialized with scriptRoot: {}", scriptRoot);
}
@@ -72,6 +72,20 @@ public class ScriptSource {
return allScripts;
}
public List<TestScript> getTestScripts(List<Script> scripts) throws IOException {
List<TestScript> testScripts = scripts.stream()
.map(script -> {
try {
return getTestScript(script);
} catch (IOException e) {
throw new RuntimeException(e);
}
})
.filter(testScript -> testScript != null)
.collect(Collectors.toList());
return testScripts;
}
public List<Script> getScriptsInSchema(String database, String schema) throws IOException {
log.info("Reading script files from schema: {}", schema);
List<Script> scripts = new ArrayList<>();
@@ -123,7 +137,7 @@ public class ScriptSource {
}
}
else {
Script script = ScriptFactory.getStateScript(database, schema, objectType, objectName, content);
Script script = ScriptFactory.getStateScript(file.getPath(), database, schema, objectType, objectName, content);
// Script script = new Script(database, schema, objectType, objectName, content);
scripts.add(script);
}
@@ -131,14 +145,19 @@ public class ScriptSource {
}
public Script getScriptByName(String database, String schema, ScriptObjectType type, String objectName) throws IOException {
File file = Path.of(mainScriptDir, database, schema, type.toString(), objectName + ".SQL").toFile();
public TestScript getTestScript(Script script) throws IOException {
String objectName = script.getObjectName() + "_TEST";
String testScriptPath = script.getScriptPath().replace(".SQL", "_TEST.SQL");
testScriptPath = testScriptPath.replaceAll("^" + mainScriptDir, testScriptDir);
File file = Path.of(testScriptPath).toFile();
if(file.exists()) {
log.info("Test script file found: {}", file.getPath());
String content = Files.readString(file.toPath());
TestScript testScript = ScriptFactory.getTestScript(file.getPath(), script.getDatabaseName(), script.getSchemaName(), script.getObjectType(), objectName, content, script);
return testScript;
}
return null;
String content = Files.readString(file.toPath());
Script script = ScriptFactory.getStateScript(database, schema, type, objectName, content);
// Script script = new Script(database, schema, type, objectName, content);
return script;
}
public void createScriptFiles(List<Script> scripts) {

View File

@@ -1,5 +1,5 @@
package com.snowflake.dlsync.models;
public enum ChangeType {
DEPLOY, VERIFY, ROLLBACK, CREATE_SCRIPT, CREATE_LINEAGE
DEPLOY, VERIFY, ROLLBACK, CREATE_SCRIPT, CREATE_LINEAGE, TEST
};

View File

@@ -7,14 +7,19 @@ public class MigrationScript extends Script {
private String rollback;
private String verify;
public MigrationScript(String databaseName, String schemaName, String objectName, ScriptObjectType objectType, String content, Long version, String author, String rollback, String verify) {
super(databaseName, schemaName, objectName, objectType, content);
public MigrationScript(String scriptPath, String databaseName, String schemaName, String objectName, ScriptObjectType objectType, String content, Long version, String author, String rollback, String verify) {
super(scriptPath, databaseName, schemaName, objectName, objectType, content);
this.version = version;
this.author = author;
this.rollback = rollback;
this.verify = verify;
}
public MigrationScript(String databaseName, String schemaName, String objectName, ScriptObjectType objectType, String content, Long version, String author, String rollback, String verify) {
this(null, databaseName, schemaName, objectName, objectType, content, version, author, rollback, verify);
}
@Override
public String getId() {
return String.format("%s:%s", getFullObjectName(), version);

View File

@@ -9,6 +9,7 @@ import java.util.Objects;
@Slf4j
public abstract class Script {
private String scriptPath;
private String databaseName;
private String schemaName;
private String objectName;
@@ -16,7 +17,8 @@ public abstract class Script {
private String content;
private String hash;
public Script(String databaseName, String schemaName, String objectName, ScriptObjectType objectType, String content) {
public Script(String scriptPath, String databaseName, String schemaName, String objectName, ScriptObjectType objectType, String content) {
this.scriptPath = scriptPath;
this.databaseName = databaseName.toUpperCase();
this.schemaName = schemaName.toUpperCase();
this.objectName = objectName.toUpperCase();
@@ -25,6 +27,10 @@ public abstract class Script {
this.hash = hash = Util.getMd5Hash(this.content);
}
public String getScriptPath() {
return scriptPath;
}
public String getDatabaseName() {
return databaseName;
}

View File

@@ -3,9 +3,13 @@ package com.snowflake.dlsync.models;
public class StateScript extends Script {
public StateScript(String databaseName, String schemaName, String objectName, ScriptObjectType objectType, String content) {
super(databaseName, schemaName, objectName, objectType, content);
public StateScript(String scriptPath, String databaseName, String schemaName, String objectName, ScriptObjectType objectType, String content) {
super(scriptPath, databaseName, schemaName, objectName, objectType, content);
}
public StateScript(String databaseName, String schemaName, String objectName, ScriptObjectType objectType, String content) {
this(null, databaseName, schemaName, objectName, objectType, content);
}
@Override
public String getId() {

View File

@@ -0,0 +1,20 @@
package com.snowflake.dlsync.models;
import lombok.Data;
@Data
public class TestResult {
private String result;
private String message;
public TestResult(String result, String message) {
this.result = result;
this.message = message;
}
public TestResult(Exception e) {
this.result = "ERROR";
this.message = e.getMessage();
}
}

View File

@@ -0,0 +1,29 @@
package com.snowflake.dlsync.models;
import com.snowflake.dlsync.parser.TestQueryGenerator;
import java.io.IOException;
public class TestScript extends Script {
private Script mainScript;
private TestQueryGenerator testQueryGenerator;
public TestScript(String scriptPath, String databaseName, String schemaName, String objectName, ScriptObjectType objectType, String content, Script mainScript) {
super(scriptPath, databaseName, schemaName, objectName, objectType, content);
this.mainScript = mainScript;
this.testQueryGenerator = new TestQueryGenerator(this);
}
public Script getMainScript() {
return mainScript;
}
public String getTestQuery() throws IOException {
return testQueryGenerator.generateTestQuery();
}
@Override
public String getId() {
return mainScript.getId() + "_TEST";
}
}

View File

@@ -0,0 +1,56 @@
package com.snowflake.dlsync.parser;
import com.snowflake.dlsync.models.Script;
import com.snowflake.dlsync.parser.antlr.SnowflakeLexer;
import com.snowflake.dlsync.parser.antlr.SnowflakeParser;
import lombok.extern.slf4j.Slf4j;
import org.antlr.v4.runtime.CharStreams;
import org.antlr.v4.runtime.CommonTokenStream;
import org.antlr.v4.runtime.tree.ParseTree;
import org.antlr.v4.runtime.tree.ParseTreeWalker;
@Slf4j
public class FunctionScriptParser extends ScriptParser {
private boolean isSql = false;
public FunctionScriptParser(Script script) {
super(script.getContent());
}
public boolean isSql() {
return isSql;
}
@Override
public void enterCreate_function(SnowflakeParser.Create_functionContext ctx) {
if(ctx.SQL() != null) {
isSql = true;
}
else {
log.error("Error in parsing {}, Only sql function are supported in testing", content);
throw new UnsupportedOperationException("Only SQL functions are supported");
}
objectName = ctx.object_name().getText();
}
@Override
public void enterFunction_definition(SnowflakeParser.Function_definitionContext ctx) {
mainQuery = content.substring(ctx.getStart().getStartIndex(), ctx.getStop().getStopIndex() + 1);
if(ctx.getChild(0) instanceof SnowflakeParser.StringContext) {
mainQuery = mainQuery.replace("''", "'");
mainQuery = mainQuery.substring(1, mainQuery.length() - 1);
}
else {
mainQuery = mainQuery.replace("$$", "");
}
mainQuery = String.format("SELECT (%S) AS RETURN FROM MOCK_DATA", mainQuery);
}
@Override
public void exitSnowflake_file(SnowflakeParser.Snowflake_fileContext ctx) {
ScriptParser scriptParser = new ScriptParser(mainQuery);
scriptParser.parse();
objectReferences.addAll(scriptParser.getObjectReferences());
}
}

View File

@@ -0,0 +1,53 @@
package com.snowflake.dlsync.parser;
import com.snowflake.dlsync.parser.antlr.SnowflakeLexer;
import com.snowflake.dlsync.parser.antlr.SnowflakeParser;
import com.snowflake.dlsync.parser.antlr.SnowflakeParserBaseListener;
import org.antlr.v4.runtime.CharStreams;
import org.antlr.v4.runtime.CommonTokenStream;
import org.antlr.v4.runtime.tree.ParseTree;
import org.antlr.v4.runtime.tree.ParseTreeWalker;
import java.util.HashSet;
import java.util.Set;
public class ScriptParser extends SnowflakeParserBaseListener {
protected String content;
protected String objectName;
protected String mainQuery;
protected Set<String> objectReferences = new HashSet<>();
public ScriptParser(String content) {
this.content = content;
}
public void parse() {
SnowflakeLexer lexer = new SnowflakeLexer(CharStreams.fromString(content));
CommonTokenStream tokens = new CommonTokenStream(lexer);
SnowflakeParser parser = new SnowflakeParser(tokens);
ParseTree tree = parser.snowflake_file();
ParseTreeWalker walker = new ParseTreeWalker();
walker.walk(this, tree);
}
@Override
public void enterObject_name(SnowflakeParser.Object_nameContext ctx) {
objectReferences.add(ctx.getText());
}
public String getContent() {
return content;
}
public String getObjectName() {
return objectName;
}
public String getMainQuery() {
return mainQuery;
}
public Set<String> getObjectReferences() {
return objectReferences;
}
}

View File

@@ -0,0 +1,94 @@
package com.snowflake.dlsync.parser;
import com.snowflake.dlsync.models.Script;
import com.snowflake.dlsync.models.TestScript;
import com.snowflake.dlsync.parser.antlr.*;
import lombok.extern.slf4j.Slf4j;
import org.antlr.v4.runtime.CharStreams;
import org.antlr.v4.runtime.CommonTokenStream;
import org.antlr.v4.runtime.tree.ParseTree;
import org.antlr.v4.runtime.tree.ParseTreeWalker;
import java.io.IOException;
import java.util.HashMap;
import java.util.Map;
import java.util.Set;
@Slf4j
public class TestQueryGenerator {
private TestScript testScript;
private TestScriptParser testScriptParser;
private ScriptParser scriptParser;
private String assertion = "select count(1) as result, 'rows missing from actual data' as message from (\n" +
"\t\tselect * from actual_data\n" +
"\t\texcept \n" +
"\t\tselect * from expected_data\n" +
"\t) having result > 0\n" +
"\tunion \n" +
"\tselect count(1) as result, 'rows missing from expected data' as message from ( \n" +
"\t\tselect * from expected_data\n" +
"\t\texcept\n" +
"\t\tselect * from actual_data\n" +
"\t) having result > 0";
public TestQueryGenerator(TestScript testScript) {
this.testScript = testScript;
}
public String generateTestQuery() throws IOException {
parseQueryScript();
parseMainScript();
Map<String, String> cteMap = new HashMap<>(testScriptParser.getCteMap());
String expectedData = cteMap.remove("EXPECTED_DATA");
String actualData = scriptParser.getMainQuery();
Set<String> objectReferences = scriptParser.getObjectReferences();
actualData = updateQueryWithMockData(actualData, objectReferences, cteMap);
log.debug("objectReferences for {} are: [{}]", testScript.getMainScript().getFullObjectName(), objectReferences);
StringBuilder testQuery = new StringBuilder();
testQuery.append("with ");
for(String mock : cteMap.keySet()) {
testQuery.append(mock).append(" as (\n\t").append(cteMap.get(mock)).append("\n),");
}
testQuery.append("\nexpected_data as (\n\t").append(expectedData).append("\n),");
testQuery.append("\nactual_data as (\n\t").append(actualData).append("\n),");
testQuery.append("\nassertion as (\n\t").append(assertion).append("\n)");
testQuery.append("\nselect * from assertion;");
return testQuery.toString();
}
public String updateQueryWithMockData(String query, Set<String> objectReferences, Map<String, String> mockData) {
for(String mock : mockData.keySet()) {
if(!objectReferences.contains(mock)) {
for(String objectName : objectReferences) {
if(objectName.endsWith(mock)) {
query = query.replace(objectName,mock);
}
}
}
}
return query;
}
public void parseQueryScript() throws IOException {
testScriptParser = new TestScriptParser(testScript);
testScriptParser.parse();
}
public void parseMainScript() throws IOException {
Script mainScript = testScript.getMainScript();
switch (mainScript.getObjectType()) {
case VIEWS:
scriptParser = new ViewScriptParser(mainScript);
break;
case FUNCTIONS:
scriptParser = new FunctionScriptParser(mainScript);
break;
default:
log.error("Unsupported test for object type: {} of script: {}", mainScript.getObjectType(), mainScript);
throw new UnsupportedOperationException("Unsupported test for object type: " + mainScript.getObjectType());
}
scriptParser.parse();
}
}

View File

@@ -0,0 +1,35 @@
package com.snowflake.dlsync.parser;
import com.snowflake.dlsync.models.TestScript;
import com.snowflake.dlsync.parser.antlr.SnowflakeParser;
import java.util.HashMap;
import java.util.Map;
public class TestScriptParser extends ScriptParser {
private Map<String, String> cteMap = new HashMap<>();
public TestScriptParser(TestScript testScript) {
super(testScript.getContent());
objectName = testScript.getObjectName();
}
@Override
public void enterCommon_table_expression(SnowflakeParser.Common_table_expressionContext ctx) {
String cteName = ctx.getChild(0).getText();
var selectCtx = ctx.select_statement_in_parentheses();
String cteQuery = content.substring(selectCtx.getStart().getStartIndex(), selectCtx.getStop().getStopIndex()+1);
cteMap.put(cteName.toUpperCase(), cteQuery);
}
@Override
public void enterSelect_statement_in_parentheses(SnowflakeParser.Select_statement_in_parenthesesContext ctx) {
if(ctx.getParent() instanceof SnowflakeParser.Query_statementContext) {
mainQuery = content.substring(ctx.getStart().getStartIndex(), ctx.getStop().getStopIndex()+1);
}
}
public Map<String, String> getCteMap() {
return cteMap;
}
}

View File

@@ -0,0 +1,30 @@
package com.snowflake.dlsync.parser;
import com.snowflake.dlsync.models.Script;
import com.snowflake.dlsync.parser.antlr.SnowflakeParser;
public class ViewScriptParser extends ScriptParser {
public ViewScriptParser(Script script) {
super(script.getContent());
}
@Override
public void enterCreate_view(SnowflakeParser.Create_viewContext ctx) {
objectName = ctx.object_name().getText();
}
@Override
public void enterObject_name(SnowflakeParser.Object_nameContext ctx) {
if(ctx.getParent() instanceof SnowflakeParser.Create_viewContext) {
objectName = ctx.getText();
}
else {
objectReferences.add(ctx.getText());
}
}
@Override
public void enterQuery_statement(SnowflakeParser.Query_statementContext ctx) {
mainQuery = content.substring(ctx.getStart().getStartIndex(), ctx.getStop().getStopIndex()+1);
}
}

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff