1
0
mirror of https://github.com/Dikootje/dlsync.git synced 2025-12-17 18:01:28 +00:00

Open source intitial commit

This commit is contained in:
Ytbarek Hailu
2025-01-08 14:30:44 -08:00
commit 9e0bf8bb4f
72 changed files with 4540 additions and 0 deletions

33
.github/repo_meta.yaml vendored Normal file
View File

@@ -0,0 +1,33 @@
# point_of_contact: the owner of this repository, can be a GitHub user or GitHub team
point_of_contact: sfc-gh-yhailu
# production: whether this repository meets the criteria for being "production", see https://snowflakecomputing.atlassian.net/wiki/spaces/CLO/pages/2239988967/Production+Repository+Criteria for criteria
production: true
# distributed: whether any source code in this repository is distributed directly to customers (e.g. driver and frontend software)
distributed: false
# modified: whether any open source dependencies in this repository have been modified
modified: false
# release_branches: list of release branch patterns, exact matches or regex is acceptable
release_branches:
- main
- release.*
# code_owners_file_present: whether there is a CODEOWNERS file in this repository
code_owners_file_present: true
# jira_project_issue_type: the jira issuetype used to raise issues related to this repository in the SNOW Jira project
jira_project_issue_type: Bug
# jira_area: the jira area that raised issues should use
jira_area: Orphaned
# audit_in_scope: whether this repository is included in scope for audits or certifications (SOX, SOC, ISO, Fedramp etc.)
audit_in_scope: false

12
.gitignore vendored Normal file
View File

@@ -0,0 +1,12 @@
.idea/
.DS_Store
.gradle
build
out
*.iml
*.iws
log/
target/
.env
Jenkinsfile-app
out/

7
.pre-commit-config.yaml Normal file
View File

@@ -0,0 +1,7 @@
repos:
- repo: git@github.com:GitGuardian/ggshield.git
rev: v1.28.0
hooks:
- id: ggshield
language_version: python3
stages: [commit]

0
CHANGELOG.md Normal file
View File

15
Dockerfile Normal file
View File

@@ -0,0 +1,15 @@
FROM adoptopenjdk:11-jre-hotspot
# Update and install dependencies
RUN apt update && \
apt-get -y install coreutils python3-venv jq
# Install AWS CLI
RUN python3 -m venv /opt/venv
ENV PATH="/opt/venv/bin:$PATH"
RUN pip install awscli
# DlSync app
RUN mkdir /opt/app
WORKDIR /opt/app
COPY build/libs/dlsync-*.jar dlsync.jar

3
LEGAL.md Normal file
View File

@@ -0,0 +1,3 @@
#### This application is not part of the Snowflake Service and is governed by the terms in LICENSE, unless expressly agreed to in writing. You use this application at your own risk, and Snowflake has no obligation to support your use of this application.

201
LICENSE Normal file
View File

@@ -0,0 +1,201 @@
Apache License
Version 2.0, January 2004
http://www.apache.org/licenses/
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
1. Definitions.
"License" shall mean the terms and conditions for use, reproduction,
and distribution as defined by Sections 1 through 9 of this document.
"Licensor" shall mean the copyright owner or entity authorized by
the copyright owner that is granting the License.
"Legal Entity" shall mean the union of the acting entity and all
other entities that control, are controlled by, or are under common
control with that entity. For the purposes of this definition,
"control" means (i) the power, direct or indirect, to cause the
direction or management of such entity, whether by contract or
otherwise, or (ii) ownership of fifty percent (50%) or more of the
outstanding shares, or (iii) beneficial ownership of such entity.
"You" (or "Your") shall mean an individual or Legal Entity
exercising permissions granted by this License.
"Source" form shall mean the preferred form for making modifications,
including but not limited to software source code, documentation
source, and configuration files.
"Object" form shall mean any form resulting from mechanical
transformation or translation of a Source form, including but
not limited to compiled object code, generated documentation,
and conversions to other media types.
"Work" shall mean the work of authorship, whether in Source or
Object form, made available under the License, as indicated by a
copyright notice that is included in or attached to the work
(an example is provided in the Appendix below).
"Derivative Works" shall mean any work, whether in Source or Object
form, that is based on (or derived from) the Work and for which the
editorial revisions, annotations, elaborations, or other modifications
represent, as a whole, an original work of authorship. For the purposes
of this License, Derivative Works shall not include works that remain
separable from, or merely link (or bind by name) to the interfaces of,
the Work and Derivative Works thereof.
"Contribution" shall mean any work of authorship, including
the original version of the Work and any modifications or additions
to that Work or Derivative Works thereof, that is intentionally
submitted to Licensor for inclusion in the Work by the copyright owner
or by an individual or Legal Entity authorized to submit on behalf of
the copyright owner. For the purposes of this definition, "submitted"
means any form of electronic, verbal, or written communication sent
to the Licensor or its representatives, including but not limited to
communication on electronic mailing lists, source code control systems,
and issue tracking systems that are managed by, or on behalf of, the
Licensor for the purpose of discussing and improving the Work, but
excluding communication that is conspicuously marked or otherwise
designated in writing by the copyright owner as "Not a Contribution."
"Contributor" shall mean Licensor and any individual or Legal Entity
on behalf of whom a Contribution has been received by Licensor and
subsequently incorporated within the Work.
2. Grant of Copyright License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
copyright license to reproduce, prepare Derivative Works of,
publicly display, publicly perform, sublicense, and distribute the
Work and such Derivative Works in Source or Object form.
3. Grant of Patent License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
(except as stated in this section) patent license to make, have made,
use, offer to sell, sell, import, and otherwise transfer the Work,
where such license applies only to those patent claims licensable
by such Contributor that are necessarily infringed by their
Contribution(s) alone or by combination of their Contribution(s)
with the Work to which such Contribution(s) was submitted. If You
institute patent litigation against any entity (including a
cross-claim or counterclaim in a lawsuit) alleging that the Work
or a Contribution incorporated within the Work constitutes direct
or contributory patent infringement, then any patent licenses
granted to You under this License for that Work shall terminate
as of the date such litigation is filed.
4. Redistribution. You may reproduce and distribute copies of the
Work or Derivative Works thereof in any medium, with or without
modifications, and in Source or Object form, provided that You
meet the following conditions:
(a) You must give any other recipients of the Work or
Derivative Works a copy of this License; and
(b) You must cause any modified files to carry prominent notices
stating that You changed the files; and
(c) You must retain, in the Source form of any Derivative Works
that You distribute, all copyright, patent, trademark, and
attribution notices from the Source form of the Work,
excluding those notices that do not pertain to any part of
the Derivative Works; and
(d) If the Work includes a "NOTICE" text file as part of its
distribution, then any Derivative Works that You distribute must
include a readable copy of the attribution notices contained
within such NOTICE file, excluding those notices that do not
pertain to any part of the Derivative Works, in at least one
of the following places: within a NOTICE text file distributed
as part of the Derivative Works; within the Source form or
documentation, if provided along with the Derivative Works; or,
within a display generated by the Derivative Works, if and
wherever such third-party notices normally appear. The contents
of the NOTICE file are for informational purposes only and
do not modify the License. You may add Your own attribution
notices within Derivative Works that You distribute, alongside
or as an addendum to the NOTICE text from the Work, provided
that such additional attribution notices cannot be construed
as modifying the License.
You may add Your own copyright statement to Your modifications and
may provide additional or different license terms and conditions
for use, reproduction, or distribution of Your modifications, or
for any such Derivative Works as a whole, provided Your use,
reproduction, and distribution of the Work otherwise complies with
the conditions stated in this License.
5. Submission of Contributions. Unless You explicitly state otherwise,
any Contribution intentionally submitted for inclusion in the Work
by You to the Licensor shall be under the terms and conditions of
this License, without any additional terms or conditions.
Notwithstanding the above, nothing herein shall supersede or modify
the terms of any separate license agreement you may have executed
with Licensor regarding such Contributions.
6. Trademarks. This License does not grant permission to use the trade
names, trademarks, service marks, or product names of the Licensor,
except as required for reasonable and customary use in describing the
origin of the Work and reproducing the content of the NOTICE file.
7. Disclaimer of Warranty. Unless required by applicable law or
agreed to in writing, Licensor provides the Work (and each
Contributor provides its Contributions) on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
implied, including, without limitation, any warranties or conditions
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
PARTICULAR PURPOSE. You are solely responsible for determining the
appropriateness of using or redistributing the Work and assume any
risks associated with Your exercise of permissions under this License.
8. Limitation of Liability. In no event and under no legal theory,
whether in tort (including negligence), contract, or otherwise,
unless required by applicable law (such as deliberate and grossly
negligent acts) or agreed to in writing, shall any Contributor be
liable to You for damages, including any direct, indirect, special,
incidental, or consequential damages of any character arising as a
result of this License or out of the use or inability to use the
Work (including but not limited to damages for loss of goodwill,
work stoppage, computer failure or malfunction, or any and all
other commercial damages or losses), even if such Contributor
has been advised of the possibility of such damages.
9. Accepting Warranty or Additional Liability. While redistributing
the Work or Derivative Works thereof, You may choose to offer,
and charge a fee for, acceptance of support, warranty, indemnity,
or other liability obligations and/or rights consistent with this
License. However, in accepting such obligations, You may act only
on Your own behalf and on Your sole responsibility, not on behalf
of any other Contributor, and only if You agree to indemnify,
defend, and hold each Contributor harmless for any liability
incurred by, or claims asserted against, such Contributor by reason
of your accepting any such warranty or additional liability.
END OF TERMS AND CONDITIONS
APPENDIX: How to apply the Apache License to your work.
To apply the Apache License to your work, attach the following
boilerplate notice, with the fields enclosed by brackets "[]"
replaced with your own identifying information. (Don't include
the brackets!) The text should be enclosed in the appropriate
comment syntax for the file format. We also recommend that a
file or class name and description of purpose be included on the
same "printed page" as the copyright notice for easier
identification within third-party archives.
Copyright [yyyy] [name of copyright owner]
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.

294
README.md Normal file
View File

@@ -0,0 +1,294 @@
# DLSync
<img src="https://github.com/user-attachments/assets/24da3d86-f58e-4b55-8d9e-b3194117a566" height="300" title="logo" alt="logo">
---
DLSync is a database change management that deploys database changes to our database.
Each object(view, table, udf ...) in our database will
have a corresponding SQL script file where every change to this object is tracked in this file only. DLSync keeps track of what changes have been deployed to database
by using hash. Hence DLSync is capable of identifying what scripts have changed in the current deployment.
Using this DLSync only deploys changed script to database objects.
DLSync also understands interdependency between different scripts, thus applies these changes
according their dependency.
Based on how we define the changes to database objects, DLSync divides database object scripts to 2 types, State and migration scripts.
## Key Features
- It combines state based and migration based change management to manage database changes
- Each object will have it's corresponding unique Script file where we can define the change to the object
- It can detect change between previous deployment and current script state.
- It can reorder scripts based on their dependency before deploying to database.
- It supports parametrization of scripts where we can define variables that change between different database instances.
- It supports parameter config file where each parameter config file corresponds to an instance
- It supports rollback to previous deployment state.
- Rollback is very simple and intuitive. Only one needs to rollback git repository of the script and triggering rollback module.
- It supports verify module where each database object is checked with current script to check for deployment verification or tracking out of sync database changes.
- It supports create script where we can create script file for each database objects.
## Project structure
To use this tool first create your script root directory.
This directory will contain all scripts and configurations.
Inside this directory create a directory structure like:
```
/script-root # Root directory for the scripts
├── /main # Main scripts for deployment
│ ├── /database_name_1 # Database name
│ │ ├── /schema_name_1 # database Schema name
│ │ │ ├── /[object_type]_1 # Database Object type like (VIEWS, FUNCTIONS, TABLES ...)
│ │ │ │ ├── object_name_1.sql # The database object name(table name, view name, function name ...)
│ │ │ │ ├── object_name_2.sql # The database object name(table name, view name, function name ...)
│ │ │ ├── /[object_type]_2 # Database Object type like (VIEWS, FUNCTIONS, TABLES ...)
│ │ │ │ ├── object_name_3.sql # The database object name(table name, view name, function name ...)
│ │ │ │ ├── object_name_4.sql # The database object name(table name, view name, function name ...)
│ │ ├── /schema_name_2 # database Schema name
│ │ │ ├── /[object_type]_1 # Database Object type like (VIEWS, FUNCTIONS, TABLES ...)
│ │ │ │ ├── object_name_5.sql # The database object name(table name, view name, function name ...)
│ │ │ │ ├── object_name_6.sql # The database object name(table name, view name, function name ...)
│ │ │ ├── /[object_type]_2 # Database Object type like (VIEWS, FUNCTIONS, TABLES ...)
│ │ │ │ ├── object_name_7.sql # The database object name(table name, view name, function name ...)
│ │ │ │ ├── object_name_8.sql # The database object name(table name, view name, function name ...)
├── /tests # SQL unit test scripts
├── config.yml # configuration file
├── parameter-[profile-1].properties # parameter property file
├── parameter-[profile-2].properties # parameter property file
└── parameter-[profile-3].properties # parameter property file
```
Where
- **database_name_*:** is the database name of your project,
- **schema_name_*:** are schemas inside the database,
- **object_type:** is type of the object only 1 of the following (VIEWS, FUNCTIONS, PROCEDURES, FILE_FORMATS, TABLES, SEQUENCES, STAGES, STREAMS, TASKS)
- **object_name_*.sql:** are individual database object scripts.
- **config.yml:** is a configuration file used to configure DLSync behavior.
- **parameter-[profile-*].properties:** is parameter to value map file. This is going to be used by corresponding individual instances of your database.
This property files will help you parametrize changing parameters and their value. For each deployment instance of your database(project) you should create a separate parameter profile property.
These property files should have names in the above format by replacing "format" by your deployment instance name.
where profile is the instance name of your database. you will provide the profile name in environment variable while running this tool.
### Script content
Each object will have a single SQL to track the changes applied to the given object. The SQL file is named using the object's name.
For example if you have a view named `SAMPLE_VIEW` in schema `MY_SCHEMA` in database `MY_DATABASE`, then the script file should be named `SAMPLE_VIEW.SQL` and should be placed in the directory `[scripts_root]/main/MY_DATABASE/MY_SCHEMA/VIEWS/SAMPLE_VIEW.SQL`.
The structure and content of the scripts will defer based on the type of script. This tool categorizes script in to 2 types named State script and Migration script.
#### 1. State Script
This type of script is used for object types of Views, UDF, Stored Procedure and File formats.
In this type of script you define the current state(desired state) of the object.
When a change is made to the script, DLSync replaces the current object with the updated definition.
These types of scripts must always have `create or replace` statement. Every time you make a change to the script DLSync will replace the object with the new definition.
The sql file should be named with the database object name.
The State script file should adhere to the following rules
1. The file name should match database object name referenced by the `create or replace` statement.
2. The file should contain only one SQL DDL script that creates and replaces the specified object.
3. The create script should refer the object with its full qualified name (database.schema.object_name)
eg: view named SAMPLE_VIEW can have the following SQL statement in the `SAMPLE_VIEW.SQL` file.
```
create or replace view ${MY_DB}.{MY_SCHEMA}.SAMPLE_VIEW as select * from ${MY_DB}.{MY_SECOND_SCHEMA}.MY_TABLE;
```
#### 2. Migration Script
This type of script is used for object types of TABLES, SEQUENCES, STAGES, STREAMS and TASKS.
Here the script is treated as migration that will be applied to the object sequentially based on the version number.
This type of script contains 1 or more migration versions. One migration versions contains version number, author(optional), content (DDL or DML SQL statement) , rollback statement(optional) and verify statement(optional).
Each migration version is immutable i.e Once the version is deployed you can not change the code of this version. Only you can add new versions.
eg: for the table named `SAMPLE_TABLE` you can have the following SQL statement in the `SAMPLE_TABLE.SQL` file.:
```
---version: 0, author: user1
create or replace table ${MY_DB}.{MY_SCHEMA}.SAMPLE_TABLE(id varchar, my_column varchar);
---rollback: drop table if exists ${MY_DB}.{MY_SCHEMA}.SAMPLE_TABLE;
---verify: select * from ${MY_DB}.{MY_SCHEMA}.SAMPLE_TABLE limit 1;
---version: 1, author: user1
insert into ${MY_DB}.{MY_SCHEMA}.SAMPLE_TABLE values('1', 'value');
---rollback: delete from ${MY_DB}.{MY_SCHEMA}.SAMPLE_TABLE where id = '1';
---verify: select 1/count(*) from ${MY_DB}.{MY_SCHEMA}.SAMPLE_TABLE where id = '1';
---version: 2, author: user2
alter table ${MY_DB}.{MY_SCHEMA}.SAMPLE_TABLE add column my_new_column varchar;
---rollback: alter table ${MY_DB}.{MY_SCHEMA}.SAMPLE_TABLE drop column my_new_column;
---verify: select my_new_column from ${MY_DB}.{MY_SCHEMA}.SAMPLE_TABLE limit 1;
```
The migration script will have the following format:
```
---version: VERSION_NUMBER, author: NAME
CONTENT;
---rollback: ROLLBACK_CONTENT;
---verify: VERIFY_CONTENT;
```
where
- ```VERSION_NUMBER``` is the version number of the migration script
- ```NAME``` is the author of the script,
- ```CONTENT``` is the DDL or DML script that changes the object,
- ```ROLLBACK_CONTENT``` is the script that rolls back the changes made by the migration script
- ```VERIFY_CONTENT``` is the script that verifies the changes made by the migration script.
The migration script should adhere to the following rules:
1. Each change to database object should be wrapped in a migration format specified above.
2. Each migration version should contain migration header (version and author) and the content of the migration(single DDL or DML script), rollback(optional) and verify (optional).
3. migration header should start in a new line with three hyphens(---) and can contain only version and author.
4. Version should be unique number per each script file and should be in incremental order. And it is used to order the scripts migration sequence for that object.
5. author is optional alphanumeric characters used for informational purpose only to track who added the changes.
6. Content of the change (migration) should be specified after migration header in a new line. And it can span multiple lines.
7. Content should always be terminated by semi-colon (`;`).
8. Rollback if specified should start in a new line with `---rollback: `. The rollback script should be on a single line and must be terminated with semi-colon (;);
9. Verify if specified should start in a new line with `---verify:`. The verify script should be on a single line and must be terminated with semi-colon (;);
10. Migration versions are immutable. Once a version is deployed, it cannot be changed. Only new versions can be added or existing versions can be rolled back.
### Configurations
#### Parameter profile
Parameter files help you define parameters that change between different database instances. This is helpful if you have variables that change between different instances (like dev, staging and prod).
Parameter files are defined per each instance. Parameter file are basically property files where you define parameter and their values.
the parameter files should be placed in the root script directory and should be named in the following format:
```
parameter-[profile].property
```
where `[profile]` is the instance name of your database. you will provide the profile name in the command line option or environment variable while running this tool.
Eg. if you have a dev instance of your database, then you should create a parameter file named `parameter-dev.property` in the root script directory. And the content of this file can be:
```
MY_DB=my_database_dev
MY_SCHEMA=my_schema_dev
other_param=other_value
```
And you can use these parameters in your script files. The format for using parameters is:
```
${parameter_name}
```
where parameter_name is the name of parameter defined in the parameter-[profile].property file with its value.
For example,
```
create or replace view ${MY_DB}.${MY_SCHEMA}.my_view as select * from ${MY_DB}.${MY_SCHEMA}.my_table;
```
#### config file
The config file is used to configure the behavior of this tool. The config file should be named `config.yml` and placed in the root script directory. The config file should have the following format:
```
version: #version of the config file
configTables: # List of configuration tables, only used for create script module
scriptExclusion: # List of script files to be excluded from deploy, verify, rollback and create script module
dependencyOverride: # List of additional dependencies for the scripts
- script: # script file name to override the dependencies
dependencies: List of dependencies to override
```
The `configTables` is used by create script module to add the data of the tables to the script file.
The `scriptExclusion` is used to exclude the script files from being processed by this tool.
The `dependencyOverride` is used to override the dependencies of the script files. This can be used to add additional dependencies to the script files.
### How to use this tool
In order to run the application you need to provide the snowflake connection parameters in environment variables. The following environment variables are required to run the application:
```
account=my_account #account used for connection
db=database #your database
schema=dl_sync #your dl_sync schema. It will use this schema to store neccessary tables for this tool
user=user_name #user name of the database
password=password #password for the connection (optional)
authenticator=externalbrowser #authenticator used for the connection (optional)
warehouse=my_warehouse #warehouse to be used by the connection
role=my_role #role used by this tool
```
You also need to provide the script root directory and which profile to use. This can be provided in the command line argument or in the environment variable.
Providing in the command line argument will override the environment variable.
you can provide command line option using the following:
```
dlsync deploy --script-root path/to/db_scripts --profile dev
```
or
```
dlsync deploy -s path/to/db_scripts -p dev
```
or you can provide in environment variable as:
```
script_root=path/to/db_scripts
profile=dev
```
There are 4 main modules (commands). Each module of the tool can be triggered from the command line argument.
#### Deploy
This module is used to deploy the changes to the database. It will deploy the changes to the database objects based on the script files.
First DLSync will identify the changed scripts based on the hash of the script file and the hash stored in the database(`dl_sync_script_history` table). For migration scripts each migration version will have it's hash stored in the script history. Thus only newly added versions will be picked up for the changed scripts. After identifying the changes, it will order the scripts based on their dependency. Then it will deploy the changes to the database objects sequentially.
The deploy module can be triggered using the following command:
```
dlsync deploy -s path/to/db_scripts -p dev
```
If you have already deployed the changes manually or though other tools, you can can mark the scripts as deployed without deploying the changes. This will only add the hashes to the script history table(`dl_sync_script_history`) without affecting the current database state. This can be very helpful while migrating from other tools.
You can use the following command to mark the scripts as deployed without deploying the changes:
```
dlsync deploy --only-hashes -s path/to/db_scripts -p dev
```
or
```
dlsync deploy -o -s path/to/db_scripts -p dev
```
#### Rollback
This module is used to rollback changes to the previous deployment. It will rollback the changes to the database objects based on the script files. This should be triggered after you have rolled back the git repository of the script files.
The rollback works first by identifying the changes between the current deployment and the previous deployment. For state scripts (views, udf, stored procedures and file formats) it will replace them with the current script(i.e previous version as you have already made git rollback).
For migration scripts it will identify the versions need to be rolled back by checking the missing versions of current scripts but have been deployed previously. Then it will use the rollback script specified in the migration version to rollback the changes.
This will be stored in the script history table.
To rollback the changes use the following command:
```
dlsync rollback -script-root path/to/db_scripts -profile dev
```
#### Verify
This module is used to verify the database scripts are in sync with the current database objects. For state scripts it will compare the content of script with the DDL of the database object.
For Migration scripts it uses the verify script provided in the migration version. if the verify script throws error, then it will mark the migration version as out of sync. Since latest migration versions can change previous versions results, it only checks the latest migration version of each script for verification.
To verify the changes use the following command:
```
dlsync verify --script-root path/to/db_scripts --profile qa
```
#### Create script
This module is used to create script files for each database object. This can be used to create script files for the existing database objects. This might be helpful when you are migrating from other tools to DLSync. To achieve it first identifies the schemas inside the current database. Then for each schema retrieves the ddl of each object. Then based on the parameter profile provided it will replace the static values with the parameter keys. Then it will create the script file for each object.
If you have configuration tables where you want the data also to be included in the script file, you can provide the list of table names in the config file.
```
dlsync create_script --script-root path/to/db_scripts --profile uat
```
## Tables used by this tool
DLSync stores script meta data, deployment history and logs in the database.
DLSync will depend on these tables to track the changes and deployment history. If these tables are missing from the schema and database provided in the connection parameter, then DLSync will create these tables.
Please make sure the role provided in the connection has the necessary privileges to create tables in the schema.
**_N.B: Since DLSync uses these tables to track the changes, it is recommended not to delete or change these tables. It is also import not change the schema of the connection. If DLSync is not able to find these tables in the schema, it will create them and assume as if it is running first time._**
This tool uses the following tables to store important information:
### dl_sync_script_history
This table store the meta data for script files. It contains the following columns:
```
script_id: # for state script the script name, for migration script script name plus the version number
object_name: the object name of the script
object_type: the type of the object (VIEWS, FUNCTIONS, PROCEDURES, FILE_FORMATS, TABLES, SEQUENCES, STAGES, STREAMS, TASKS)
rollback_script: the rollback script for the migration version
script_hash: the hash of the script file
deployed_hash: the hash of the script file that has been deployed
change_sync_id: the id of the change sync
created_by: the db user who added this change
created_ts: the timestamp when was this change added
updated_by: the db user who updated this change
updated_ts: the timestamp when was this change updated
```
### dl_sync_change_sync
This table stores the deployment history of the scripts. It contains the following columns:
```
id: the id of the change sync
change_type: the type of the change (DEPLOY, ROLLBACK, VERIFY)
status: the status of the change (SUCCESS, FAILED)
log: the log of the change
change_count: the number of changes in this sync
start_time: the start time of the change
end_time: the end time of the change
```
### dl_sync_script_event
This table stores the logs of the each script activity. It contains the following columns:
```
id: the id of the script event
script_id: the id of the script
object_name: the object name of the script
script_hash: the hash of the script
status: the status of the script (SUCCESS, FAILED)
log: the log of the script
changeSyncId: the id of the change sync
created_by: the db user who added this change
created_ts: the timestamp when was this change added
```
## Example scripts
To explore the tool you can use the example scripts provided in the directory `example_scripts` .

8
backlog.md Normal file
View File

@@ -0,0 +1,8 @@
# DLSync Backlog
- [x] Rollback for migration
- [x] Verify module State Script
- [x] create script to capture config tables
- [x] Script hierarchy design
- [x] Verify module for migration Script
- [ ] Migration Script parsing using ATLR
- [ ] Support for different DB

51
build.gradle Normal file
View File

@@ -0,0 +1,51 @@
/*
* This file was generated by the Gradle 'init' task.
*/
plugins {
id 'java'
}
repositories {
mavenLocal()
maven {
url = uri('https://repo.maven.apache.org/maven2/')
}
}
dependencies {
implementation 'org.apache.commons:commons-text:1.10.0'
implementation 'net.snowflake:snowflake-jdbc:3.20.0'
implementation 'ch.qos.logback:logback-core:1.5.12'
implementation 'ch.qos.logback:logback-classic:1.5.12'
implementation 'org.slf4j:slf4j-api:2.0.4'
implementation 'com.fasterxml.jackson.dataformat:jackson-dataformat-yaml:2.18.2'
implementation 'commons-cli:commons-cli:1.9.0'
compileOnly 'org.projectlombok:lombok:1.18.24'
annotationProcessor 'org.projectlombok:lombok:1.18.24'
testImplementation 'org.junit.jupiter:junit-jupiter-api:5.8.1'
testRuntimeOnly 'org.junit.jupiter:junit-jupiter-engine:5.8.1'
}
group = 'com.snowflake'
version = '1.0-SNAPSHOT'
description = 'dlsync'
java.sourceCompatibility = JavaVersion.VERSION_11
jar {
dependsOn 'test'
manifest {
attributes(
'Main-Class': 'com.snowflake.dlsync.Main'
)
}
duplicatesStrategy = DuplicatesStrategy.EXCLUDE
from {
configurations.runtimeClasspath.collect { it.isDirectory() ? it : zipTree(it) }
}
}
test {
useJUnitPlatform()
}

View File

@@ -0,0 +1,8 @@
version: 1
configTables:
scriptExclusion:
- ${EXAMPLE_DB}.${AUDIT_SCHEMA}.AUDIT_SEQ
dependencyOverride:
- script: ${EXAMPLE_DB}.${MAIN_SCHEMA}.UPDATE_STOCK
dependencies:
- ${EXAMPLE_DB}.${MAIN_SCHEMA}.PRODUCTS

View File

@@ -0,0 +1,6 @@
---version: 0, author: DlSync
CREATE SEQUENCE ${EXAMPLE_DB}.${AUDIT_SCHEMA}.AUDIT_SEQ
START WITH 1
INCREMENT BY 1;
---rollback: DROP SEQUENCE IF EXISTS ${EXAMPLE_DB}.${AUDIT_SCHEMA}.AUDIT_SEQ;
---verify: SHOW SEQUENCES LIKE 'AUDIT_SEQ' IN ${EXAMPLE_DB}.${AUDIT_SCHEMA};

View File

@@ -0,0 +1,9 @@
---version: 0, author: DLSync
CREATE TABLE ${EXAMPLE_DB}.${AUDIT_SCHEMA}.PRODUCT_AUDIT (
AUDIT_ID INT AUTOINCREMENT PRIMARY KEY,
PRODUCT_ID INT,
ACTION STRING NOT NULL,
ACTION_TIMESTAMP TIMESTAMP DEFAULT CURRENT_TIMESTAMP
);
---rollback: DROP TABLE IF EXISTS ${EXAMPLE_DB}.${AUDIT_SCHEMA}.PRODUCT_AUDIT;
---verify: SELECT * FROM ${EXAMPLE_DB}.${AUDIT_SCHEMA}.PRODUCT_AUDIT LIMIT 1;

View File

@@ -0,0 +1,9 @@
---version: 0, author: DLSync
CREATE TABLE ${EXAMPLE_DB}.${AUDIT_SCHEMA}.USER_AUDIT (
AUDIT_ID INT AUTOINCREMENT PRIMARY KEY,
USER_ID INT,
ACTION STRING NOT NULL,
ACTION_TIMESTAMP TIMESTAMP DEFAULT CURRENT_TIMESTAMP
);
---rollback: DROP TABLE IF EXISTS ${EXAMPLE_DB}.${AUDIT_SCHEMA}.USER_AUDIT;
---verify: SELECT * FROM ${EXAMPLE_DB}.${AUDIT_SCHEMA}.USER_AUDIT LIMIT 1;

View File

@@ -0,0 +1,7 @@
CREATE OR REPLACE FILE FORMAT ${EXAMPLE_DB}.${MAIN_SCHEMA}.PRODUCT_CSV_FILE_FORMAT
TYPE = CSV
FIELD_DELIMITER = '|'
SKIP_HEADER = 1
NULL_IF = ('NULL', 'null')
EMPTY_FIELD_AS_NULL = true
COMPRESSION = gzip;

View File

@@ -0,0 +1,7 @@
CREATE OR REPLACE FUNCTION ${EXAMPLE_DB}.${MAIN_SCHEMA}.CALCULATE_DISCOUNT(P_PRICE NUMERIC(10, 2), P_DISCOUNT_RATE NUMERIC(5, 2))
RETURNS NUMERIC(10, 2)
LANGUAGE SQL
AS
$$
P_PRICE * (1 - P_DISCOUNT_RATE / 100)
$$;

View File

@@ -0,0 +1,7 @@
CREATE OR REPLACE FUNCTION ${EXAMPLE_DB}.${MAIN_SCHEMA}.CALCULATE_ORDER_TOTAL(P_QUANTITY INT, P_PRICE NUMERIC(10, 2))
RETURNS NUMERIC(10, 2)
LANGUAGE SQL
AS
$$
P_QUANTITY * P_PRICE
$$;

View File

@@ -0,0 +1,20 @@
CREATE OR REPLACE PROCEDURE ${EXAMPLE_DB}.${MAIN_SCHEMA}.UPDATE_ORDER_SUMMARY()
returns string not null
language python
runtime_version = '3.8'
packages = ('snowflake-snowpark-python')
handler = 'main'
execute as caller
as
$$
def main(snowpark_session):
## Read the command into a Snowflake dataframe
results_df = snowpark_session.sql("select * from ${EXAMPLE_DB}.${MAIN_SCHEMA}.USER_ORDER_SUMMARY")
## Write the results of the dataframe into a target table
results_df.write.mode("overwrite").save_as_table("${EXAMPLE_DB}.${MAIN_SCHEMA}.MATERILIZED_ORDER_SUMMARY")
return f"Succeeded: Results inserted into table ${EXAMPLE_DB}.${MAIN_SCHEMA}.MATERILIZED_ORDER_SUMMARY"
$$
;

View File

@@ -0,0 +1,13 @@
CREATE OR REPLACE PROCEDURE ${EXAMPLE_DB}.${MAIN_SCHEMA}.UPDATE_STOCK(P_PRODUCT_ID INT, P_QUANTITY INT)
RETURNS STRING
LANGUAGE SQL
AS
$$
BEGIN
UPDATE ${EXAMPLE_DB}.${MAIN_SCHEMA}.PRODUCTS
SET STOCK = STOCK - P_QUANTITY
WHERE PRODUCT_ID = P_PRODUCT_ID;
RETURN 'STOCK UPDATED SUCCESSFULLY';
END;
$$;

View File

@@ -0,0 +1,4 @@
---version: 0, author: DlSync
create or replace sequence ${EXAMPLE_DB}.${MAIN_SCHEMA}.ORDER_SEQ start with 1 increment by 1;
---rollback: DROP SEQUENCE IF EXISTS ${EXAMPLE_DB}.${MAIN_SCHEMA}.ORDER_SEQ;
---verify: SHOW SEQUENCES LIKE 'ORDER_SEQ' IN ${EXAMPLE_DB}.${MAIN_SCHEMA};

View File

@@ -0,0 +1,13 @@
---version: 0, author: DlSync
CREATE OR REPLACE STAGE ${EXAMPLE_DB}.${MAIN_SCHEMA}.PRODUCT_DATA_STAGE
FILE_FORMAT = (TYPE = 'CSV')
COMMENT = 'Stage for uploading product data files';
---rollback: DROP STAGE IF EXISTS ${EXAMPLE_DB}.${MAIN_SCHEMA}.PRODUCT_DATA_STAGE;
---verify: SHOW STAGES LIKE 'PRODUCT_DATA_STAGE' IN ${EXAMPLE_DB}.${MAIN_SCHEMA};
---version: 1, author: DlSync
CREATE OR REPLACE STAGE ${EXAMPLE_DB}.${MAIN_SCHEMA}.PRODUCT_DATA_STAGE
FILE_FORMAT = ${EXAMPLE_DB}.${MAIN_SCHEMA}.PRODUCT_CSV_FILE_FORMAT
COMMENT = 'Stage for uploading product data files';
---rollback: DROP STAGE IF EXISTS ${EXAMPLE_DB}.${MAIN_SCHEMA}.PRODUCT_DATA_STAGE;
---verify: SHOW STAGES LIKE 'PRODUCT_DATA_STAGE' IN ${EXAMPLE_DB}.${MAIN_SCHEMA};

View File

@@ -0,0 +1,5 @@
---version: 0, author: DlSync
CREATE OR REPLACE STREAM ${EXAMPLE_DB}.${MAIN_SCHEMA}.ORDER_STREAM
ON TABLE ${EXAMPLE_DB}.${MAIN_SCHEMA}.ORDERS;
---rollback: DROP STREAM IF EXISTS ${EXAMPLE_DB}.${MAIN_SCHEMA}.ORDER_STREAM;
---verify: SHOW STREAMS LIKE 'ORDER_STREAM' IN ${EXAMPLE_DB}.${MAIN_SCHEMA};

View File

@@ -0,0 +1,10 @@
---version: 0, author: DlSync
CREATE TABLE ${EXAMPLE_DB}.${MAIN_SCHEMA}.DISCOUNTS (
ID INT AUTOINCREMENT PRIMARY KEY,
PRODUCT_ID INT REFERENCES ${EXAMPLE_DB}.${MAIN_SCHEMA}.PRODUCTS(ID),
DISCOUNT_RATE DECIMAL,
VALID_FROM TIMESTAMP,
VALID_UNTIL TIMESTAMP
);
---rollback: DROP TABLE IF EXISTS ${EXAMPLE_DB}.${MAIN_SCHEMA}.DISCOUNTS;
---verify: SELECT * FROM ${EXAMPLE_DB}.${MAIN_SCHEMA}.DISCOUNTS LIMIT 1;

View File

@@ -0,0 +1,14 @@
---version: 0, author: DlSync
CREATE TABLE ${EXAMPLE_DB}.${MAIN_SCHEMA}.ORDERS (
ID INT AUTOINCREMENT PRIMARY KEY,
USER_ID INT REFERENCES ${EXAMPLE_DB}.${MAIN_SCHEMA}.USERS(ID),
PRODUCT_ID INT REFERENCES ${EXAMPLE_DB}.${MAIN_SCHEMA}.PRODUCTS(ID),
QUANTITY INT NOT NULL,
ORDER_DATE TIMESTAMP DEFAULT CURRENT_TIMESTAMP
);
---rollback: DROP TABLE IF EXISTS ${EXAMPLE_DB}.${MAIN_SCHEMA}.ORDERS;
---verify: SELECT * FROM ${EXAMPLE_DB}.${MAIN_SCHEMA}.ORDERS LIMIT 1;
---version: 1, author: DlSync
ALTER TABLE ${EXAMPLE_DB}.${MAIN_SCHEMA}.ORDERS ADD COLUMN PAYMENT VARCHAR;
---verify: SELECT PAYMENT FROM ${EXAMPLE_DB}.${MAIN_SCHEMA}.ORDERS;

View File

@@ -0,0 +1,15 @@
---version: 0, author: DlSync
CREATE TABLE ${EXAMPLE_DB}.${MAIN_SCHEMA}.PRODUCTS (
ID INT AUTOINCREMENT PRIMARY KEY,
PRODUCT_NAME STRING NOT NULL,
PRICE NUMERIC(10, 2) NOT NULL,
STOCK INT NOT NULL,
CREATED_DATE TIMESTAMP DEFAULT CURRENT_TIMESTAMP
);
---rollback: DROP TABLE IF EXISTS ${EXAMPLE_DB}.${MAIN_SCHEMA}.PRODUCTS;
---verify: SELECT * FROM ${EXAMPLE_DB}.${MAIN_SCHEMA}.PRODUCTS LIMIT 1;
---version: 1, author: DlSync
INSERT INTO ${EXAMPLE_DB}.${MAIN_SCHEMA}.PRODUCTS values('1', 'MY_VALUE', 25, 10, CURRENT_TIMESTAMP);
---rollback: DELETE FROM ${EXAMPLE_DB}.${MAIN_SCHEMA}.PRODUCTS WHERE ID = '1';
---verify: SELECT 1/count(*) FROM ${EXAMPLE_DB}.${MAIN_SCHEMA}.PRODUCTS WHERE ID = '1';

View File

@@ -0,0 +1,9 @@
---version: 0, author: DLSync
CREATE TABLE ${EXAMPLE_DB}.${MAIN_SCHEMA}.USERS (
ID INT AUTOINCREMENT PRIMARY KEY,
USER_NAME STRING NOT NULL,
EMAIL STRING UNIQUE NOT NULL,
CREATED_DATE TIMESTAMP DEFAULT CURRENT_TIMESTAMP
);
---rollback: DROP TABLE IF EXISTS ${EXAMPLE_DB}.${MAIN_SCHEMA}.USERS;
---verify: SELECT * FROM ${EXAMPLE_DB}.${MAIN_SCHEMA}.USERS LIMIT 1;

View File

@@ -0,0 +1,11 @@
CREATE OR REPLACE VIEW ${EXAMPLE_DB}.${MAIN_SCHEMA}.DATA_SUMMARY AS
SELECT ORD_SMY.ORDER_ID, STK_SMY.PRODUCT_ID, USR_ORD_SMY.USER_ID
FROM ${EXAMPLE_DB}.${MAIN_SCHEMA}.ORDER_SUMMARY ORD_SMY
LEFT OUTER JOIN ${EXAMPLE_DB}.${MAIN_SCHEMA}.STOCK_SUMMARY STK_SMY
ON ORD_SMY.PRODUCT_ID = STK_SMY.PRODUCT_ID
LEFT OUTER JOIN ${EXAMPLE_DB}.${MAIN_SCHEMA}.USER_ORDER_SUMMARY USR_ORD_SMY
ON ORD_SMY.USER_ID = USR_ORD_SMY.USER_ID
LEFT OUTER JOIN ${EXAMPLE_DB}.${AUDIT_SCHEMA}.PRODUCT_AUDIT PRD_ADT
ON PRD_ADT.PRODUCT_ID = ORD_SMY.PRODUCT_ID
LEFT OUTER JOIN ${EXAMPLE_DB}.${AUDIT_SCHEMA}.USER_AUDIT USR_ADT
ON USR_ADT.USER_ID = ORD_SMY.USER_ID;

View File

@@ -0,0 +1,13 @@
CREATE OR REPLACE VIEW ${EXAMPLE_DB}.${MAIN_SCHEMA}.ORDER_SUMMARY AS
SELECT
ORD.ID AS ORDER_ID,
ORD.USER_ID,
ORD.PRODUCT_ID,
ORD.QUANTITY,
ORD.ORDER_DATE,
${EXAMPLE_DB}.${MAIN_SCHEMA}.CALCULATE_ORDER_TOTAL(ORD.QUANTITY, PRD.PRICE) AS TOTAL_PRICE,
${EXAMPLE_DB}.${MAIN_SCHEMA}.CALCULATE_DISCOUNT(TOTAL_PRICE, DIS.DISCOUNT_RATE) AS DISCOUNTED_TOTAL_PRICE
FROM ${EXAMPLE_DB}.${MAIN_SCHEMA}.ORDERS ORD
LEFT OUTER JOIN ${EXAMPLE_DB}.${MAIN_SCHEMA}.PRODUCTS PRD
LEFT OUTER JOIN ${EXAMPLE_DB}.${MAIN_SCHEMA}.DISCOUNTS DIS
ON ORD.PRODUCT_ID = DIS.PRODUCT_ID;

View File

@@ -0,0 +1,3 @@
CREATE OR REPLACE VIEW ${EXAMPLE_DB}.${MAIN_SCHEMA}.PRODUCT_STAGE_VIEW
AS
SELECT t.$1, t.$2 FROM @${EXAMPLE_DB}.${MAIN_SCHEMA}.PRODUCT_DATA_STAGE(file_format => '${EXAMPLE_DB}.${MAIN_SCHEMA}.PRODUCT_CSV_FILE_FORMAT') t;

View File

@@ -0,0 +1,10 @@
CREATE OR REPLACE VIEW ${EXAMPLE_DB}.${MAIN_SCHEMA}.STOCK_SUMMARY AS
SELECT
ID AS PRODUCT_ID,
PRODUCT_NAME,
STOCK,
CASE
WHEN STOCK < 10 THEN 'LOW STOCK'
ELSE 'SUFFICIENT STOCK'
END AS STOCK_STATUS
FROM ${EXAMPLE_DB}.${MAIN_SCHEMA}.PRODUCTS;

View File

@@ -0,0 +1,11 @@
CREATE OR REPLACE VIEW ${EXAMPLE_DB}.${MAIN_SCHEMA}.USER_ORDER_SUMMARY AS
SELECT
U.ID AS USER_ID,
U.USER_NAME,
COUNT(O.ID) AS TOTAL_ORDERS,
SUM(P.PRICE * O.QUANTITY) AS TOTAL_SPENT
FROM ${EXAMPLE_DB}.${MAIN_SCHEMA}.USERS U
LEFT JOIN ${EXAMPLE_DB}.${MAIN_SCHEMA}.ORDERS O ON U.ID = O.USER_ID
LEFT JOIN ${EXAMPLE_DB}.${MAIN_SCHEMA}.PRODUCTS P ON O.PRODUCT_ID = P.ID
GROUP BY
U.ID, U.USER_NAME;

View File

@@ -0,0 +1,4 @@
#Script Parameters
EXAMPLE_DB=EXAMPLE_DEV
MAIN_SCHEMA=MAIN_SCHEMA
AUDIT_SCHEMA=DEV_AUDIT_SCHEMA

View File

@@ -0,0 +1,4 @@
#Script Parameters
EXAMPLE_DB=EXAMPLE_QA
MAIN_SCHEMA=MAIN_SCHEMA
AUDIT_SCHEMA=QA_AUDIT_SCHEMA

View File

@@ -0,0 +1,4 @@
#Script Parameters
EXAMPLE_DB=EXAMPLE_UAT
MAIN_SCHEMA=MAIN_SCHEMA
AUDIT_SCHEMA=UAT_AUDIT_SCHEMA

1
gradle.properties Normal file
View File

@@ -0,0 +1 @@
releaseVersion=1.5.0

BIN
gradle/wrapper/gradle-wrapper.jar vendored Normal file

Binary file not shown.

View File

@@ -0,0 +1,7 @@
distributionBase=GRADLE_USER_HOME
distributionPath=wrapper/dists
distributionUrl=https\://services.gradle.org/distributions/gradle-8.2.1-bin.zip
networkTimeout=10000
validateDistributionUrl=true
zipStoreBase=GRADLE_USER_HOME
zipStorePath=wrapper/dists

248
gradlew vendored Executable file
View File

@@ -0,0 +1,248 @@
#!/bin/sh
#
# Copyright © 2015-2021 the original authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
##############################################################################
#
# Gradle start up script for POSIX generated by Gradle.
#
# Important for running:
#
# (1) You need a POSIX-compliant shell to run this script. If your /bin/sh is
# noncompliant, but you have some other compliant shell such as ksh or
# bash, then to run this script, type that shell name before the whole
# command line, like:
#
# ksh Gradle
#
# Busybox and similar reduced shells will NOT work, because this script
# requires all of these POSIX shell features:
# * functions;
# * expansions «$var», «${var}», «${var:-default}», «${var+SET}»,
# «${var#prefix}», «${var%suffix}», and «$( cmd )»;
# * compound commands having a testable exit status, especially «case»;
# * various built-in commands including «command», «set», and «ulimit».
#
# Important for patching:
#
# (2) This script targets any POSIX shell, so it avoids extensions provided
# by Bash, Ksh, etc; in particular arrays are avoided.
#
# The "traditional" practice of packing multiple parameters into a
# space-separated string is a well documented source of bugs and security
# problems, so this is (mostly) avoided, by progressively accumulating
# options in "$@", and eventually passing that to Java.
#
# Where the inherited environment variables (DEFAULT_JVM_OPTS, JAVA_OPTS,
# and GRADLE_OPTS) rely on word-splitting, this is performed explicitly;
# see the in-line comments for details.
#
# There are tweaks for specific operating systems such as AIX, CygWin,
# Darwin, MinGW, and NonStop.
#
# (3) This script is generated from the Groovy template
# https://github.com/gradle/gradle/blob/HEAD/subprojects/plugins/src/main/resources/org/gradle/api/internal/plugins/unixStartScript.txt
# within the Gradle project.
#
# You can find Gradle at https://github.com/gradle/gradle/.
#
##############################################################################
# Attempt to set APP_HOME
# Resolve links: $0 may be a link
app_path=$0
# Need this for daisy-chained symlinks.
while
APP_HOME=${app_path%"${app_path##*/}"} # leaves a trailing /; empty if no leading path
[ -h "$app_path" ]
do
ls=$( ls -ld "$app_path" )
link=${ls#*' -> '}
case $link in #(
/*) app_path=$link ;; #(
*) app_path=$APP_HOME$link ;;
esac
done
# This is normally unused
# shellcheck disable=SC2034
APP_BASE_NAME=${0##*/}
APP_HOME=$( cd "${APP_HOME:-./}" && pwd -P ) || exit
# Use the maximum available, or set MAX_FD != -1 to use that value.
MAX_FD=maximum
warn () {
echo "$*"
} >&2
die () {
echo
echo "$*"
echo
exit 1
} >&2
# OS specific support (must be 'true' or 'false').
cygwin=false
msys=false
darwin=false
nonstop=false
case "$( uname )" in #(
CYGWIN* ) cygwin=true ;; #(
Darwin* ) darwin=true ;; #(
MSYS* | MINGW* ) msys=true ;; #(
NONSTOP* ) nonstop=true ;;
esac
CLASSPATH=$APP_HOME/gradle/wrapper/gradle-wrapper.jar
# Determine the Java command to use to start the JVM.
if [ -n "$JAVA_HOME" ] ; then
if [ -x "$JAVA_HOME/jre/sh/java" ] ; then
# IBM's JDK on AIX uses strange locations for the executables
JAVACMD=$JAVA_HOME/jre/sh/java
else
JAVACMD=$JAVA_HOME/bin/java
fi
if [ ! -x "$JAVACMD" ] ; then
die "ERROR: JAVA_HOME is set to an invalid directory: $JAVA_HOME
Please set the JAVA_HOME variable in your environment to match the
location of your Java installation."
fi
else
JAVACMD=java
if ! command -v java >/dev/null 2>&1
then
die "ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH.
Please set the JAVA_HOME variable in your environment to match the
location of your Java installation."
fi
fi
# Increase the maximum file descriptors if we can.
if ! "$cygwin" && ! "$darwin" && ! "$nonstop" ; then
case $MAX_FD in #(
max*)
# In POSIX sh, ulimit -H is undefined. That's why the result is checked to see if it worked.
# shellcheck disable=SC3045
MAX_FD=$( ulimit -H -n ) ||
warn "Could not query maximum file descriptor limit"
esac
case $MAX_FD in #(
'' | soft) :;; #(
*)
# In POSIX sh, ulimit -n is undefined. That's why the result is checked to see if it worked.
# shellcheck disable=SC3045
ulimit -n "$MAX_FD" ||
warn "Could not set maximum file descriptor limit to $MAX_FD"
esac
fi
# Collect all arguments for the java command, stacking in reverse order:
# * args from the command line
# * the main class name
# * -classpath
# * -D...appname settings
# * --module-path (only if needed)
# * DEFAULT_JVM_OPTS, JAVA_OPTS, and GRADLE_OPTS environment variables.
# For Cygwin or MSYS, switch paths to Windows format before running java
if "$cygwin" || "$msys" ; then
APP_HOME=$( cygpath --path --mixed "$APP_HOME" )
CLASSPATH=$( cygpath --path --mixed "$CLASSPATH" )
JAVACMD=$( cygpath --unix "$JAVACMD" )
# Now convert the arguments - kludge to limit ourselves to /bin/sh
for arg do
if
case $arg in #(
-*) false ;; # don't mess with options #(
/?*) t=${arg#/} t=/${t%%/*} # looks like a POSIX filepath
[ -e "$t" ] ;; #(
*) false ;;
esac
then
arg=$( cygpath --path --ignore --mixed "$arg" )
fi
# Roll the args list around exactly as many times as the number of
# args, so each arg winds up back in the position where it started, but
# possibly modified.
#
# NB: a `for` loop captures its iteration list before it begins, so
# changing the positional parameters here affects neither the number of
# iterations, nor the values presented in `arg`.
shift # remove old arg
set -- "$@" "$arg" # push replacement arg
done
fi
# Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script.
DEFAULT_JVM_OPTS='"-Xmx64m" "-Xms64m"'
# Collect all arguments for the java command;
# * $DEFAULT_JVM_OPTS, $JAVA_OPTS, and $GRADLE_OPTS can contain fragments of
# shell script including quotes and variable substitutions, so put them in
# double quotes to make sure that they get re-expanded; and
# * put everything else in single quotes, so that it's not re-expanded.
set -- \
"-Dorg.gradle.appname=$APP_BASE_NAME" \
-classpath "$CLASSPATH" \
org.gradle.wrapper.GradleWrapperMain \
"$@"
# Stop when "xargs" is not available.
if ! command -v xargs >/dev/null 2>&1
then
die "xargs is not available"
fi
# Use "xargs" to parse quoted args.
#
# With -n1 it outputs one arg per line, with the quotes and backslashes removed.
#
# In Bash we could simply go:
#
# readarray ARGS < <( xargs -n1 <<<"$var" ) &&
# set -- "${ARGS[@]}" "$@"
#
# but POSIX shell has neither arrays nor command substitution, so instead we
# post-process each arg (as a line of input to sed) to backslash-escape any
# character that might be a shell metacharacter, then use eval to reverse
# that process (while maintaining the separation between arguments), and wrap
# the whole thing up as a single "set" statement.
#
# This will of course break if any of these variables contains a newline or
# an unmatched quote.
#
eval "set -- $(
printf '%s\n' "$DEFAULT_JVM_OPTS $JAVA_OPTS $GRADLE_OPTS" |
xargs -n1 |
sed ' s~[^-[:alnum:]+,./:=@_]~\\&~g; ' |
tr '\n' ' '
)" '"$@"'
exec "$JAVACMD" "$@"

92
gradlew.bat vendored Normal file
View File

@@ -0,0 +1,92 @@
@rem
@rem Copyright 2015 the original author or authors.
@rem
@rem Licensed under the Apache License, Version 2.0 (the "License");
@rem you may not use this file except in compliance with the License.
@rem You may obtain a copy of the License at
@rem
@rem https://www.apache.org/licenses/LICENSE-2.0
@rem
@rem Unless required by applicable law or agreed to in writing, software
@rem distributed under the License is distributed on an "AS IS" BASIS,
@rem WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
@rem See the License for the specific language governing permissions and
@rem limitations under the License.
@rem
@if "%DEBUG%"=="" @echo off
@rem ##########################################################################
@rem
@rem Gradle startup script for Windows
@rem
@rem ##########################################################################
@rem Set local scope for the variables with windows NT shell
if "%OS%"=="Windows_NT" setlocal
set DIRNAME=%~dp0
if "%DIRNAME%"=="" set DIRNAME=.
@rem This is normally unused
set APP_BASE_NAME=%~n0
set APP_HOME=%DIRNAME%
@rem Resolve any "." and ".." in APP_HOME to make it shorter.
for %%i in ("%APP_HOME%") do set APP_HOME=%%~fi
@rem Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script.
set DEFAULT_JVM_OPTS="-Xmx64m" "-Xms64m"
@rem Find java.exe
if defined JAVA_HOME goto findJavaFromJavaHome
set JAVA_EXE=java.exe
%JAVA_EXE% -version >NUL 2>&1
if %ERRORLEVEL% equ 0 goto execute
echo.
echo ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH.
echo.
echo Please set the JAVA_HOME variable in your environment to match the
echo location of your Java installation.
goto fail
:findJavaFromJavaHome
set JAVA_HOME=%JAVA_HOME:"=%
set JAVA_EXE=%JAVA_HOME%/bin/java.exe
if exist "%JAVA_EXE%" goto execute
echo.
echo ERROR: JAVA_HOME is set to an invalid directory: %JAVA_HOME%
echo.
echo Please set the JAVA_HOME variable in your environment to match the
echo location of your Java installation.
goto fail
:execute
@rem Setup the command line
set CLASSPATH=%APP_HOME%\gradle\wrapper\gradle-wrapper.jar
@rem Execute Gradle
"%JAVA_EXE%" %DEFAULT_JVM_OPTS% %JAVA_OPTS% %GRADLE_OPTS% "-Dorg.gradle.appname=%APP_BASE_NAME%" -classpath "%CLASSPATH%" org.gradle.wrapper.GradleWrapperMain %*
:end
@rem End local scope for the variables with windows NT shell
if %ERRORLEVEL% equ 0 goto mainEnd
:fail
rem Set variable GRADLE_EXIT_CONSOLE if you need the _script_ return code instead of
rem the _cmd.exe /c_ return code!
set EXIT_CODE=%ERRORLEVEL%
if %EXIT_CODE% equ 0 set EXIT_CODE=1
if not ""=="%GRADLE_EXIT_CONSOLE%" exit %EXIT_CODE%
exit /b %EXIT_CODE%
:mainEnd
if "%OS%"=="Windows_NT" endlocal
:omega

110
pom.xml Normal file
View File

@@ -0,0 +1,110 @@
<?xml version="1.0" encoding="UTF-8"?>
<project xmlns="http://maven.apache.org/POM/4.0.0"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
<groupId>com.snowflake</groupId>
<artifactId>dlsync</artifactId>
<version>1.0-SNAPSHOT</version>
<properties>
<maven.compiler.source>11</maven.compiler.source>
<maven.compiler.target>11</maven.compiler.target>
</properties>
<dependencies>
<dependency>
<groupId>org.apache.commons</groupId>
<artifactId>commons-text</artifactId>
<version>1.10.0</version>
</dependency>
<!-- https://mvnrepository.com/artifact/net.snowflake/snowflake-jdbc -->
<dependency>
<groupId>net.snowflake</groupId>
<artifactId>snowflake-jdbc</artifactId>
<version>3.20.0</version>
</dependency>
<dependency>
<groupId>ch.qos.logback</groupId>
<artifactId>logback-core</artifactId>
<version>1.5.12</version>
</dependency>
<dependency>
<groupId>ch.qos.logback</groupId>
<artifactId>logback-classic</artifactId>
<version>1.5.12</version>
</dependency>
<dependency>
<groupId>org.slf4j</groupId>
<artifactId>slf4j-api</artifactId>
<version>2.0.4</version>
</dependency>
<dependency>
<groupId>com.fasterxml.jackson.dataformat</groupId>
<artifactId>jackson-dataformat-yaml</artifactId>
<version>2.18.2</version>
</dependency>
<dependency>
<groupId>org.projectlombok</groupId>
<artifactId>lombok</artifactId>
<version>1.18.24</version>
<scope>provided</scope>
</dependency>
<dependency>
<groupId>commons-cli</groupId>
<artifactId>commons-cli</artifactId>
<version>1.9.0</version>
</dependency>
<dependency>
<groupId>org.junit.jupiter</groupId>
<artifactId>junit-jupiter-api</artifactId>
<version>5.8.1</version>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.junit.jupiter</groupId>
<artifactId>junit-jupiter-engine</artifactId>
<version>5.8.1</version>
<scope>test</scope>
</dependency>
</dependencies>
<build>
<plugins>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-surefire-plugin</artifactId>
<version>2.22.2</version>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-assembly-plugin</artifactId>
<executions>
<execution>
<phase>package</phase>
<goals>
<goal>single</goal>
</goals>
<configuration>
<archive>
<manifest>
<mainClass>
com.snowflake.dlsync.Main
</mainClass>
</manifest>
</archive>
<descriptorRefs>
<descriptorRef>jar-with-dependencies</descriptorRef>
</descriptorRefs>
</configuration>
</execution>
</executions>
</plugin>
</plugins>
</build>
</project>

5
settings.gradle Normal file
View File

@@ -0,0 +1,5 @@
/*
* This file was generated by the Gradle 'init' task.
*/
rootProject.name = 'dlsync'

View File

@@ -0,0 +1,223 @@
package com.snowflake.dlsync;
import com.snowflake.dlsync.dependency.DependencyGraph;
import com.snowflake.dlsync.doa.ScriptRepo;
import com.snowflake.dlsync.doa.ScriptSource;
import com.snowflake.dlsync.models.*;
import com.snowflake.dlsync.parser.ParameterInjector;
import lombok.extern.slf4j.Slf4j;
import java.io.*;
import java.security.NoSuchAlgorithmException;
import java.sql.SQLException;
import java.util.*;
import java.util.stream.Collectors;
@Slf4j
public class ChangeManager {
private Config config;
private ScriptSource scriptSource;
private ScriptRepo scriptRepo;
private DependencyGraph dependencyGraph;
private ParameterInjector parameterInjector;
public ChangeManager(Config config, ScriptSource scriptSource, ScriptRepo scriptRepo, DependencyGraph dependencyGraph, ParameterInjector parameterInjector) {
this.config= config;
this.scriptSource = scriptSource;
this.scriptRepo = scriptRepo;
this.dependencyGraph = dependencyGraph;
this.parameterInjector = parameterInjector;
}
private void validateScript(Script script) {
if(script instanceof MigrationScript && scriptRepo.isScriptVersionDeployed(script)) {
log.error("Migration type script changed. Script for the object {} has changed from previous deployments.", script.getId());
throw new RuntimeException("Migration type scripts should not change.");
}
}
public void deploy(boolean onlyHashes) throws SQLException, IOException, NoSuchAlgorithmException{
log.info("Started Deploying {}", onlyHashes?"Only Hashes":"scripts");
startSync(ChangeType.DEPLOY);
scriptRepo.loadScriptHash();
List<Script> changedScripts = scriptSource.getAllScripts()
.stream()
.filter(script -> !config.isScriptExcluded(script))
.filter(script -> scriptRepo.isScriptChanged(script))
.collect(Collectors.toList());
dependencyGraph.addNodes(changedScripts);
List<Script> sequencedScript = dependencyGraph.topologicalSort();
log.info("Deploying {} change scripts to db.", sequencedScript.size());
int size = sequencedScript.size();
int index = 1;
for(Script script: sequencedScript) {
log.info("{} of {}: Deploying object: {}", index++, size, script);
parameterInjector.injectParameters(script);
validateScript(script);
scriptRepo.createScriptObject(script, onlyHashes);
}
endSyncSuccess(ChangeType.DEPLOY, (long)sequencedScript.size());
}
public void rollback() throws SQLException, IOException {
log.info("Starting ROLLBACK scripts.");
startSync(ChangeType.ROLLBACK);
Set<String> deployedScriptIds = new HashSet<>(scriptRepo.loadScriptHash());
scriptSource.getAllScripts().forEach(script -> deployedScriptIds.remove(script.getId()));
List<MigrationScript> migrations = scriptRepo.getMigrationScripts(deployedScriptIds);
dependencyGraph.addNodes(migrations);
List<Script> changedScripts = scriptSource.getAllScripts()
.stream()
.filter(script -> !config.isScriptExcluded(script))
.filter(script -> !script.getObjectType().isMigration())
.filter(script -> scriptRepo.isScriptChanged(script))
.collect(Collectors.toList());
dependencyGraph.addNodes(changedScripts);
List<Script> sequencedScript = dependencyGraph.topologicalSort();
int size = sequencedScript.size();
int index = 1;
for(int i = sequencedScript.size() - 1; i >= 0; i--) {
Script script = sequencedScript.get(i);
if(script instanceof MigrationScript) {
MigrationScript migration = (MigrationScript)script;
log.info("{} of {}: Rolling-back object: {}", index++, size, migration);
parameterInjector.injectParametersAll(migration);
scriptRepo.executeRollback(migration);
}
else {
log.info("{} of {}: Rolling-back object: {}", index++, size, script);
parameterInjector.injectParameters(script);
scriptRepo.createScriptObject(script, false);
}
}
endSyncSuccess(ChangeType.ROLLBACK, 0L);
}
public boolean verify() throws IOException, NoSuchAlgorithmException, SQLException{
log.info("Started verify scripts.");
startSync(ChangeType.VERIFY);
scriptRepo.loadDeployedHash();
int failedCount = 0;
Set<Script> sourceScripts = scriptSource.getAllScripts().stream()
.filter(script -> !config.isScriptExcluded(script))
.collect(Collectors.toSet());
List<String> schemaNames = scriptRepo.getAllSchemasInDatabase(scriptRepo.getDatabaseName());
for(String schema: schemaNames) {
List<Script> stateScripts = scriptRepo.getStateScriptsInSchema(schema)
.stream()
.filter(script -> !config.isScriptExcluded(script))
.collect(Collectors.toList());
for(Script script: stateScripts) {
parameterInjector.parametrizeScript(script, true);
Script sourceScript = sourceScripts.stream().filter(s -> s.equals(script)).findFirst().orElse(null);
if(sourceScript == null) {
log.error("Script [{}] is not found in source.", script);
failedCount++;
continue;
}
if (!scriptRepo.compareScript(script, sourceScript)) {
failedCount++;
log.error("Script verification failed for {}. The source script is different from db object [{}] ", script, script.getContent());
} else {
log.info("Verified Script {} is correct.", script);
}
}
}
Map<String, List<MigrationScript>> groupedMigrationScripts = sourceScripts.stream()
.filter(script -> script instanceof MigrationScript)
.map(script -> (MigrationScript)script)
.collect(Collectors.groupingBy(Script::getObjectName));
for(String objectName: groupedMigrationScripts.keySet()) {
List<MigrationScript> sameObjectMigrations = groupedMigrationScripts.get(objectName);
Optional<MigrationScript> lastMigration = sameObjectMigrations.stream().sorted(Comparator.comparing(MigrationScript::getVersion).reversed()).findFirst();
if (lastMigration.isPresent()) {
MigrationScript migrationScript = lastMigration.get();
parameterInjector.injectParametersAll(migrationScript);
if (!scriptRepo.executeVerify(migrationScript)) {
failedCount++;
log.error("Script verification failed for {}. The verify script [{}] failed to execute.", migrationScript, migrationScript.getVerify());
} else {
log.info("Verified Script {} is correct.", migrationScript);
}
}
}
if(failedCount != 0) {
log.error("Verification failed!");
endSyncError(ChangeType.VERIFY, failedCount + " scripts failed to verify.");
throw new RuntimeException("Verification failed!");
}
log.info("All scripts have been verified successfully.");
endSyncSuccess(ChangeType.VERIFY, (long)sourceScripts.size());
return true;
}
public void createAllScriptsFromDB() throws SQLException, IOException {
log.info("Started create scripts.");
startSync(ChangeType.CREATE_SCRIPT);
HashSet<String> configTableWithParameter = new HashSet<>();
if(config != null && config.getConfigTables() != null) {
configTableWithParameter.addAll(config.getConfigTables());
}
Set<String> configTables = parameterInjector.injectParameters(configTableWithParameter);
List<String> schemaNames = scriptRepo.getAllSchemasInDatabase(scriptRepo.getDatabaseName());
int count = 0;
for(String schema: schemaNames) {
List<Script> scripts = scriptRepo.getAllScriptsInSchema(schema);
for(Script script: scripts) {
count++;
if(configTables.contains(script.getFullObjectName())) {
scriptRepo.addConfig(script);
}
parameterInjector.parametrizeScript(script, false);
}
scriptSource.createScriptFiles(scripts);
}
endSyncSuccess(ChangeType.CREATE_SCRIPT, (long)count);
}
public void createLineage() throws IOException, SQLException {
log.info("Started Lineage graph.");
startSync(ChangeType.CREATE_LINEAGE);
List<Script> scripts = scriptSource.getAllScripts();
dependencyGraph.addNodes(scripts);
List<ScriptDependency> manualDependencies = config.getDependencyOverride()
.stream()
.flatMap(dependencyOverride -> {
Script script = scripts.stream().filter(s -> s.getFullObjectName().equals(dependencyOverride.getScript())).findFirst().get();
List<Script> dependencies = dependencyOverride.getDependencies()
.stream()
.map(dependencyName -> scripts.stream().filter(s -> s.getFullObjectName().equals(dependencyName)).findFirst().get())
.collect(Collectors.toList());
return dependencies.stream().map(dependency -> new ScriptDependency(script, dependency));
})
.collect(Collectors.toList());
List<ScriptDependency> dependencyList = dependencyGraph.getDependencyList();
dependencyList.addAll(manualDependencies);
scriptRepo.insertDependencyList(dependencyList);
endSyncSuccess(ChangeType.CREATE_LINEAGE, (long)dependencyList.size());
}
public void startSync(ChangeType changeType) throws SQLException {
scriptRepo.insertChangeSync(changeType, Status.IN_PROGRESS, changeType.toString() + " started.");
}
public void endSyncError(ChangeType changeType, String message) throws SQLException {
scriptRepo.updateChangeSync(changeType, Status.ERROR, message, null);
}
public void endSyncSuccess(ChangeType changeType, Long changeCount) throws SQLException {
scriptRepo.updateChangeSync(changeType, Status.SUCCESS, "Successfully completed " + changeType.toString() , changeCount);
}
}

View File

@@ -0,0 +1,31 @@
package com.snowflake.dlsync;
import com.snowflake.dlsync.dependency.DependencyExtractor;
import com.snowflake.dlsync.dependency.DependencyGraph;
import com.snowflake.dlsync.doa.ScriptRepo;
import com.snowflake.dlsync.doa.ScriptSource;
import com.snowflake.dlsync.parser.ParameterInjector;
import java.io.IOException;
public class ChangeMangerFactory {
public static ChangeManager createChangeManger() throws IOException {
ConfigManager configManager = new ConfigManager();
return createChangeManger(configManager);
}
public static ChangeManager createChangeManger(String scriptRoot, String profile) throws IOException {
ConfigManager configManager = new ConfigManager(scriptRoot, profile);
return createChangeManger(configManager);
}
public static ChangeManager createChangeManger(ConfigManager configManager) throws IOException {
configManager.init();
ScriptSource scriptSource = new ScriptSource(configManager.getScriptRoot());
ScriptRepo scriptRepo = new ScriptRepo(configManager.getJdbcProperties());
ParameterInjector parameterInjector = new ParameterInjector(configManager.getScriptParameters());
DependencyExtractor dependencyExtractor = new DependencyExtractor();
DependencyGraph dependencyGraph = new DependencyGraph(dependencyExtractor, configManager.getConfig());
return new ChangeManager(configManager.getConfig(), scriptSource, scriptRepo, dependencyGraph, parameterInjector);
}
}

View File

@@ -0,0 +1,128 @@
package com.snowflake.dlsync;
import com.fasterxml.jackson.core.JsonParser;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.dataformat.yaml.YAMLFactory;
import com.snowflake.dlsync.models.Config;
import com.snowflake.dlsync.models.Script;
import lombok.extern.slf4j.Slf4j;
import org.apache.commons.lang3.StringUtils;
import java.io.File;
import java.io.FileInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.nio.file.Path;
import java.util.List;
import java.util.Map;
import java.util.Properties;
import java.util.concurrent.atomic.AtomicBoolean;
@Slf4j
public class ConfigManager {
private final static String CONFIG_FILE_NAME = "config.yaml";
private final static String[] JDBC_KEY = {"user", "password", "account", "warehouse", "db", "schema", "role", "authenticator"};
private final static String SCRIPT_ROOT_KEY = "SCRIPT_ROOT";
private String scriptRoot;
private String profile;
private Properties scriptParameters;
private Properties jdbcProperties;
private Config config;
private AtomicBoolean isInitialized = new AtomicBoolean(false);
public ConfigManager() {
this(System.getenv(SCRIPT_ROOT_KEY), StringUtils.isEmpty(System.getenv("profile")) ? "dev" : System.getenv("profile").toLowerCase());
}
public ConfigManager(String scriptRoot, String profile) {
this.scriptRoot = scriptRoot == null ? System.getenv(SCRIPT_ROOT_KEY) : scriptRoot;
String fallbackProfile = StringUtils.isEmpty(System.getenv("profile")) ? "dev" : System.getenv("profile").toLowerCase();
this.profile = profile == null ? fallbackProfile : profile;
log.info("Using [{}] as script root path.", this.scriptRoot);
log.info("Using [{}] as profile.", this.profile);
}
public ConfigManager(String scriptRoot, String profile, Properties jdbcProperties, Properties scriptParameters, Config config) {
this.scriptRoot = scriptRoot;
this.profile = profile;
this.jdbcProperties = jdbcProperties;
this.scriptParameters = scriptParameters;
this.config = config;
}
public void init() throws IOException {
if(isInitialized.compareAndSet(false, true)) {
readEnvVariables();
readConfig();
readParameters();
}
}
public void readEnvVariables() {
jdbcProperties = new Properties();
for(String key: JDBC_KEY) {
String jdbcConfigValue = System.getenv(key);
if(jdbcConfigValue == null) {
if(!key.equals("authenticator")) {
log.warn("JDBC connection property {} not found", key);
}
}
else {
jdbcProperties.put(key, jdbcConfigValue);
}
}
}
public void readConfig() {
File configFile = Path.of(scriptRoot, CONFIG_FILE_NAME).toFile();
try {
if(configFile.exists()) {
ObjectMapper mapper = new ObjectMapper(new YAMLFactory());
mapper.findAndRegisterModules();
config = mapper.readValue(configFile, Config.class);
}
else {
config = new Config();
}
} catch (IOException e) {
log.error("Failed to parse config file [{}]", configFile.getAbsolutePath());
throw new RuntimeException("Can not parse Config file. Please use yaml file with allowed properties", e);
}
}
public void readParameters() throws IOException {
scriptParameters = new Properties();
String propertiesFilePath = "parameter-" + profile + ".properties";
log.debug("Loading property file from [{}]", propertiesFilePath);
InputStream input = new FileInputStream(Path.of(scriptRoot, propertiesFilePath).toFile());
scriptParameters.load(input);
Map<String, String> environmentVariables = System.getenv();
for(String key: environmentVariables.keySet()) {
if(scriptParameters.containsKey(key)) {
scriptParameters.put(key, environmentVariables.get(key));
}
}
}
public Config getConfig() {
return config;
}
public Properties getJdbcProperties() {
return jdbcProperties;
}
public Properties getScriptParameters() {
return scriptParameters;
}
public String getProfile() {
return profile;
}
public String getScriptRoot() {
return scriptRoot;
}
}

View File

@@ -0,0 +1,110 @@
package com.snowflake.dlsync;
import com.snowflake.dlsync.models.ChangeType;
import lombok.extern.slf4j.Slf4j;
import org.apache.commons.cli.*;
import java.io.IOException;
import java.security.NoSuchAlgorithmException;
import java.sql.SQLException;
import java.util.Arrays;
import java.util.List;
@Slf4j
public class Main {
public static void main(String[] args) throws SQLException {
log.info("DlSync change Manager started.");
ChangeManager changeManager = null;
ChangeType changeType = null;
boolean onlyHashes = false;
List<String> schemas = null;
try {
changeType = getChangeType(args);
CommandLine commandLine = buildCommandOptions(args);
onlyHashes = commandLine.hasOption("only-hashes");
String scriptRoot = commandLine.getOptionValue("script-root");
String profile = commandLine.getOptionValue("profile");
changeManager = ChangeMangerFactory.createChangeManger(scriptRoot, profile);
switch (changeType) {
case DEPLOY:
changeManager.deploy(onlyHashes);
log.info("DLsync Changes deployed successfully.");
break;
case ROLLBACK:
changeManager.rollback();
log.info("DLsync Changes rollback successfully.");
break;
case VERIFY:
if(changeManager.verify()) {
log.info("DLsync Changes verified successfully.");
}
else {
log.error("DLsync Changes verification failed.");
}
break;
case CREATE_SCRIPT:
changeManager.createAllScriptsFromDB();
log.info("DLsync created all scripts from DB.");
break;
case CREATE_LINEAGE:
changeManager.createLineage();
log.info("DLsync successfully created lineage to DB.");
break;
default:
log.error("Change type not specified as an argument.");
}
} catch (IOException e) {
e.printStackTrace();
log.error("Error: {} ", e);
changeManager.endSyncError(changeType, e.getMessage());
System.exit(2);
} catch (SQLException e) {
e.printStackTrace();
log.error("Error: {} ", e);
changeManager.endSyncError(changeType, e.getMessage());
System.exit(3);
} catch (NoSuchAlgorithmException e) {
e.printStackTrace();
log.error("Error: {} ", e);
changeManager.endSyncError(changeType, e.getMessage());
System.exit(4);
} catch (ParseException e) {
log.error("Error: {} ", e);
changeManager.endSyncError(changeType, e.getMessage());
System.exit(5);
}
catch (Exception e) {
e.printStackTrace();
log.error("Error: {}", e.getMessage());
changeManager.endSyncError(changeType, e.getMessage());
System.exit(1111);
}
}
public static CommandLine buildCommandOptions(String[] args) throws ParseException {
Options options = new Options();
try {
String[] argsWithoutCommand = Arrays.copyOfRange(args, 1, args.length);
Option onlyHashes = new Option("o", "only-hashes", false, "Deploy only hashes to database");
options.addOption(onlyHashes);
Option scriptRoot = new Option("s", "script-root", true, "Script root directory");
options.addOption(scriptRoot);
Option profile = new Option("p", "profile", true, "Profile to use");
options.addOption(profile);
CommandLine commandLine = new DefaultParser().parse(options, argsWithoutCommand);
return commandLine;
} catch (ParseException e) {
new HelpFormatter().printHelp("dlsync [deploy|rollback|verify|create-script|create-lineage] [options]", "options:", options, "");
throw e;
}
}
public static ChangeType getChangeType(String[] args) {
return args.length >= 1 ? ChangeType.valueOf(args[0].toUpperCase()) : ChangeType.VERIFY;
}
}

View File

@@ -0,0 +1,48 @@
package com.snowflake.dlsync;
import com.snowflake.dlsync.models.*;
import lombok.extern.slf4j.Slf4j;
@Slf4j
public class ScriptFactory {
public static StateScript getStateScript(String databaseName, String schemaName, ScriptObjectType objectType, String objectName, String content) {
return new StateScript(databaseName, schemaName, objectName, objectType, content);
}
public static MigrationScript getMigrationScript(String databaseName, String schemaName, ScriptObjectType objectType, String objectName, String content, Long version, String author, String rollback, String verify) {
return new MigrationScript(databaseName, schemaName, objectName, objectType, content, version, author, rollback, verify);
}
public static MigrationScript getMigrationScript(String fullObjectName, ScriptObjectType objectType, String content, Long version, String author, String rollback, String verify) {
String databaseName = null, schemaName = null, objectName = null;
String[] nameSplit = fullObjectName.split("\\.");
if(nameSplit.length > 2) {
databaseName = nameSplit[0];
schemaName = nameSplit[1];
objectName = nameSplit[2];
}
else {
log.error("Error while splitting fullObjectName {}: Missing some values", fullObjectName);
throw new RuntimeException("Error while splitting fullObjectName");
}
return new MigrationScript(databaseName, schemaName, objectName, objectType, content, version, author, rollback, verify);
}
public static MigrationScript getMigrationScript(String database, String schema, ScriptObjectType type, String objectName, String content) {
Long version = 0L;
String author = "DlSync";
String rollback = String.format("DROP %s IF EXISTS %s;", type.getSingular(), database + "." + schema + "." + objectName);
String verify = String.format("SHOW %s LIKE '%s';",type, database + "." + schema + "." + objectName);
String migrationHeader = String.format("---version: %s, author: %s\n", version, author);
String rollbackFormat = String.format("\n---rollback: %s", rollback);
String verifyFormat = String.format("\n---verify: %s", verify);
content = migrationHeader + content + rollbackFormat + verifyFormat;
MigrationScript script = getMigrationScript(database, schema, type, objectName, content, version, author, rollback, verify);
return script;
}
public static MigrationScript getMigrationScript(String databaseName, String schemaName, ScriptObjectType objectType, String objectName, Migration migration) {
return new MigrationScript(databaseName, schemaName, objectName, objectType, migration.getContent(), migration.getVersion(), migration.getAuthor(), migration.getRollback(), migration.getVerify());
}
}

View File

@@ -0,0 +1,19 @@
package com.snowflake.dlsync;
import lombok.extern.slf4j.Slf4j;
import net.snowflake.client.jdbc.internal.org.bouncycastle.util.encoders.Hex;
import java.security.MessageDigest;
import java.security.NoSuchAlgorithmException;
@Slf4j
public class Util {
public static String getMd5Hash(String content) {
try {
return Hex.toHexString(MessageDigest.getInstance("MD5").digest(content.getBytes()));
} catch (NoSuchAlgorithmException e) {
log.error("Hashing error: {}", e.getMessage());
throw new RuntimeException(e);
}
}
}

View File

@@ -0,0 +1,141 @@
package com.snowflake.dlsync.dependency;
import com.snowflake.dlsync.models.MigrationScript;
import com.snowflake.dlsync.models.Script;
import com.snowflake.dlsync.models.ScriptObjectType;
import com.snowflake.dlsync.parser.SqlTokenizer;
import lombok.extern.slf4j.Slf4j;
import java.util.ArrayList;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import java.util.stream.Collectors;
@Slf4j
public class DependencyExtractor {
private static final String[] VIEW_DEPENDENCY_REGEX = {"\\s(?i)FROM\\s+([^\\(\\s]+?)(\\s+(as\\s+)?\\w\\s*,|[\\s;\\)]|$)", "\\s(?i)JOIN\\s+([^\\(\\s]+?)(?:[\\s;\\),]|$)","TOKEN\\s*:\\s*(?:/\\*.*\\*/*)\\n\\{(?:[^\\}\\{]+|\\{(?:[^\\}\\{]+|\\{[^\\}\\{]*\\})*\\})*\\}"};
private static final String CROSS_JOIN_DEPENDENCY = "";
private static final String FUNCTION_WITH_DEPENDENCY_REGEX = "(?i)LANGUAGE\\s+(?i)SQL\\s+";
private static final String[] VIEW_FALSE_POSITIVE_DEPENDENCY_REGEX = {"(?i)WITH\\s+(\\w+)\\s+(?i)as\\s+\\(", "\\)\\s*,\\s*(\\w+)\\s+(?i)as\\s*\\("};
private static final String[] COMMENT_REGEX = {"(?:'[^']*')|--.*$|\\/\\*[\\s\\S]*?\\*\\/|(?i)comment\\s*=\\s*'[^']*'\\s*(?i)(?=as)", ""};
// {"--.*?\\n|\\/\\/.*?\\n", "\\/\\*[\\s\\S]*?\\*\\/", "\\'([^\\']*)\\'"};
// {"((--)|(\\/\\/)).*[\\r\\n]+", "\\/\\*([^*]|[\\r\\n]|(\\*+([^*\\/]|[\\r\\n])))*\\*+\\/"};
private static final String DEPENDENCY_START_REGEX = "([()\\[\\],\\.\\s\\\"])";
private static final String DEPENDENCY_END_REGEX = "([()\\[\\],\\.\\s\\'\\\";])";
private List<Script> scripts = new ArrayList<>();
public DependencyExtractor() {
log.debug("Dependency extractor started.");
}
public Set<String> extractScriptDependenciesOld(Script script) {
if(script.getObjectType() != ScriptObjectType.VIEWS && script.getObjectType() != ScriptObjectType.FUNCTIONS ) {
return new HashSet<>();
}
if(script.getObjectType() == ScriptObjectType.FUNCTIONS && !Pattern.compile(FUNCTION_WITH_DEPENDENCY_REGEX).matcher(script.getContent()).find()) {
return new HashSet<>();
}
Set<String> dependencies = new HashSet<>();
Set<String> falseDependencies = new HashSet<>();
String content = script.getContent();
if(script.getObjectType() == ScriptObjectType.VIEWS) {
for (String regex : COMMENT_REGEX) {
content = content.replaceAll(regex, "");
}
}
for(String regex: VIEW_FALSE_POSITIVE_DEPENDENCY_REGEX) {
Pattern pattern = Pattern.compile(regex, Pattern.MULTILINE);
Matcher matcher = pattern.matcher(content);
while (matcher.find()) {
String objectName = constructFullObjectName(script, matcher.group(1));
falseDependencies.add(objectName);
}
}
for(String regex: VIEW_DEPENDENCY_REGEX) {
Pattern pattern = Pattern.compile(regex);
Matcher matcher = pattern.matcher(content);
while (matcher.find()) {
String objectName = constructFullObjectName(script, matcher.group(1));
objectName = objectName.replaceAll("\"", "");
if(!falseDependencies.contains(objectName)) {
dependencies.add(objectName);
}
// String nameTerminator = matcher.group(2);
// if(nameTerminator != null && nameTerminator.endsWith(",")) {
// int end = matcher.end(2);
// String subContent = content.substring(end);
// Matcher crossJoinMather = Pattern.compile(CROSS_JOIN_DEPENDENCY).matcher(subContent);
// }
}
}
log.debug("For the object {} found the following dependencies: {}", script.getObjectName(), dependencies);
return dependencies;
}
private String constructFullObjectName(Script from, String partialName) {
String[] objectHierarchy = partialName.split("\\.");
String fullyQualifiedName = "%s.%s.%s";
if(objectHierarchy.length == 1) {
fullyQualifiedName = String.format("%s.%s.%s", from.getDatabaseName(), from.getSchemaName(), objectHierarchy[0]);
}
else if(objectHierarchy.length == 2) {
fullyQualifiedName = String.format("%s.%s.%s", from.getDatabaseName(), objectHierarchy[0], objectHierarchy[1]);
}
else if(objectHierarchy.length == 3) {
fullyQualifiedName = String.format("%s.%s.%s", objectHierarchy[0], objectHierarchy[1], objectHierarchy[2]);
}
else {
log.error("Unknown dependency extracted: {} from script: {}", partialName, from);
}
return fullyQualifiedName.toUpperCase();
}
public void addScripts(List<? extends Script> scripts) {
this.scripts.addAll(scripts);
}
public Set<Script> extractScriptDependencies(Script script) {
Set<Script> dependencies = new HashSet<>();
dependencies = scripts.parallelStream()
.filter(s -> !s.getFullObjectName().equals(script.getFullObjectName()))
.filter(s -> isDependencyOf(s, script))
.collect(Collectors.toSet());
if(script instanceof MigrationScript) {
MigrationScript migrationScript = (MigrationScript)script;
Set<Script> versionDependencies = scripts.stream()
.filter(s -> s.getFullObjectName().equals(script.getFullObjectName()) && s.getObjectType().equals(script.getObjectType()))
.map(s -> (MigrationScript) s)
.filter(s -> s.getVersion() < migrationScript.getVersion()).collect(Collectors.toSet());
dependencies.addAll(versionDependencies);
}
log.debug("For the object {} found the following dependencies: {}", script.getId(), dependencies);
return dependencies;
}
private boolean isDependencyOf(Script dependency, Script target) {
if(dependency.getObjectName().equals(target.getObjectName())) {
log.debug("Found same object name with different schema: {}, {}", dependency, target);
}
Set<String> fullIdentifiers = SqlTokenizer.getFullIdentifiers(dependency.getObjectName(), target.getContent());
if(fullIdentifiers.isEmpty()) {
return false;
}
for(String identifier: fullIdentifiers) {
String fullObjectName = constructFullObjectName(target, identifier);
if (fullObjectName.equals(dependency.getFullObjectName())) {
return true;
}
}
return false;
}
}

View File

@@ -0,0 +1,122 @@
package com.snowflake.dlsync.dependency;
import com.snowflake.dlsync.models.Config;
import com.snowflake.dlsync.models.DependencyOverride;
import com.snowflake.dlsync.models.Script;
import com.snowflake.dlsync.models.ScriptDependency;
import lombok.extern.slf4j.Slf4j;
import java.util.*;
import java.util.stream.Collectors;
@Slf4j
public class DependencyGraph {
private DependencyExtractor dependencyExtractor;
private Config config;
private Map<Script, Set<Script>> dagGraph;
private Map<Script, Integer> inDegree;
private Stack<Script> zeroInDegreeScripts = new Stack<>();
public DependencyGraph(DependencyExtractor dependencyExtractor, Config config) {
this.dependencyExtractor = dependencyExtractor;
this.config = config;
dagGraph = new HashMap<>();
inDegree = new HashMap<>();
}
public void addNodes(List<? extends Script> nodes) {
log.info("Building dependency graph of {} scripts.", nodes.size());
dependencyExtractor.addScripts(nodes);
for(Script script: nodes) {
Set<Script> scriptDependencies = dependencyExtractor.extractScriptDependencies(script);
List<Script> manualOverride = getDependencyOverride(script, nodes);
scriptDependencies.addAll(manualOverride);
for(Script dependency: scriptDependencies) {
dagGraph.computeIfAbsent(dependency, k -> new HashSet<>()).add(script);
}
inDegree.put(script, scriptDependencies.size());
if(scriptDependencies.size() == 0) {
zeroInDegreeScripts.push(script);
}
}
log.debug("Using the following dependency graph: {}", dagGraph);
}
public List<Script> topologicalSort() {
log.info("Sorting scripts based on dependency ...");
List<Script> sortedScript = new ArrayList<>(inDegree.size());
while(!zeroInDegreeScripts.isEmpty()) {
Script currentScript = zeroInDegreeScripts.pop();
sortedScript.add(currentScript);
log.debug("{} script edges {} ", currentScript, dagGraph.get(currentScript));
if(dagGraph.get(currentScript) == null) {
continue;
}
for(Script edge: dagGraph.get(currentScript)) {
int before = inDegree.get(edge);
inDegree.put(edge, inDegree.get(edge) - 1);
if (inDegree.get(edge) == 0) {
zeroInDegreeScripts.push(edge);
}
}
}
if(inDegree.size() != sortedScript.size()) {
log.error("DAG graph Error, input script size({}) is different than sequenced script size({})", inDegree.size(), sortedScript.size());
for(Script script: inDegree.keySet()) {
if(!sortedScript.contains(script)) {
log.warn("Dependencies for {} are: {}", script, dependencyExtractor.extractScriptDependencies(script));
}
}
throw new RuntimeException("Sorting Error, Cyclic dependency detected. sorted script size is missing some scripts.");
}
log.info("Sorted scripts: {}", sortedScript);
return sortedScript;
}
public void printDependencyGraph() {
for(Script node: dagGraph.keySet()) {
System.out.println("Script: " + node.getFullObjectName() + " depends on -> " + dagGraph.get(node).stream().map(s -> s.getFullObjectName()).collect(Collectors.toList()));
}
}
public List<ScriptDependency> getDependencyList() {
Set<ScriptDependency> dependencyList = new HashSet<>();
for(Script dependency: dagGraph.keySet()) {
for(Script node: dagGraph.get(dependency)) {
//Remove self dependency for migration scripts
if(node.getFullObjectName().equals(dependency.getFullObjectName())) {
continue;
}
ScriptDependency scriptDependency = new ScriptDependency(node, dependency);
dependencyList.add(scriptDependency);
}
}
return new ArrayList<>(dependencyList);
}
public Map<Script, Set<Script>> getDagGraph() {
return dagGraph;
}
public List<Script> getDependencyOverride(Script script, List<? extends Script> nodes) {
if(config == null) {
return new ArrayList<>();
}
List<DependencyOverride> overrides = config.getDependencyOverride();
if(overrides == null || overrides.size() == 0) {
return new ArrayList<>();
}
List<Script> scriptsDependencyOverrides = overrides.stream()
.filter(dependencyOverride -> dependencyOverride.getScript().equals(script.getFullObjectName()))
.flatMap(dependencyOverride -> dependencyOverride.getDependencies().stream())
.map(dependencyName -> findScriptByName(nodes, dependencyName))
.collect(Collectors.toList());
return scriptsDependencyOverrides;
}
private Script findScriptByName(List<? extends Script> allScripts, String fullObjectName) {
return allScripts.parallelStream().filter(script -> script.getFullObjectName().equals(fullObjectName)).findFirst().get();
}
}

View File

@@ -0,0 +1,567 @@
package com.snowflake.dlsync.doa;
import com.snowflake.dlsync.ScriptFactory;
import com.snowflake.dlsync.Util;
import com.snowflake.dlsync.dependency.DependencyExtractor;
import com.snowflake.dlsync.models.*;
import com.snowflake.dlsync.parser.SqlTokenizer;
import lombok.extern.slf4j.Slf4j;
import java.sql.*;
import java.util.*;
import java.util.stream.Collectors;
@Slf4j
public class ScriptRepo {
private Properties connectionProperties;
private Connection connection;
private Map<String, String> scriptHash = new HashMap<>();
private Long changeSyncId;
public final String CHANGE_SYNC_TABLE_NAME = "DL_SYNC_CHANGE_SYNC";
public final String SCRIPT_HISTORY_TABLE_NAME = "DL_SYNC_SCRIPT_HISTORY";
public final String SCRIPT_EVENT_TABLE_NAME = "DL_SYNC_SCRIPT_EVENT";
public final String DEPENDENCY_LINEAGE_TABLE_NAME = "DL_SYNC_DEPENDENCY_LINEAGE";
public ScriptRepo(Properties connectionProperties) {
log.debug("Repo initialized with the following properties: {}", connectionProperties);
this.connectionProperties = connectionProperties;
try {
openConnection();
ResultSet resultSet = connection.createStatement().executeQuery("select current_database(), current_schema();");
resultSet.next();
log.info("Using database [{}] and schema [{}] for dlsync activities.", resultSet.getString(1), resultSet.getString(2));
initScriptTables();
} catch (SQLException e) {
log.error("Error while initializing the script repo: {} cause {}", e.getMessage(), e.getCause());
throw new RuntimeException(e);
}
}
private void openConnection() throws SQLException {
String jdbcUrl = "jdbc:snowflake://" + connectionProperties.getProperty("account") + ".snowflakecomputing.com/";
connectionProperties.remove("account");
log.debug("Connection opened with properties: {}", connectionProperties);
connection = DriverManager.getConnection(jdbcUrl, connectionProperties);
}
private void initScriptTables() throws SQLException {
////varchar OBJECT_NAME, varchar SCRIPT_HASH, varchar created_by, timestamp created_ts, varchar updated_by, timestamp updated_ts;
log.debug("Checking for deployment tables");
try {
String query = "SELECT * FROM " + CHANGE_SYNC_TABLE_NAME + " LIMIT 1;";
Statement statement = connection.createStatement();
statement.executeQuery(query);
updateOldTableNames();
} catch (SQLException e) {
log.info("Running for the first time. Creating required tables.");
String createChangeSyncSql = "CREATE OR REPLACE TABLE " + CHANGE_SYNC_TABLE_NAME + " (ID integer PRIMARY KEY, CHANGE_TYPE varchar, STATUS varchar, LOG varchar, CHANGE_COUNT integer, START_TIME timestamp, END_TIME timestamp);";
String createSqlHash = "CREATE OR REPLACE TABLE " + SCRIPT_HISTORY_TABLE_NAME + " (SCRIPT_ID VARCHAR, OBJECT_NAME varchar, OBJECT_TYPE varchar, ROLLBACK_SCRIPT varchar, SCRIPT_HASH varchar, DEPLOYED_HASH varchar, CHANGE_SYNC_ID integer, CREATED_BY varchar, CREATED_TS timestamp, UPDATED_BY varchar, UPDATED_TS timestamp, FOREIGN KEY (CHANGE_SYNC_ID) REFERENCES " + CHANGE_SYNC_TABLE_NAME + "(ID));";
String createSqlEvent = "CREATE OR REPLACE TABLE " + SCRIPT_EVENT_TABLE_NAME + " (ID VARCHAR, SCRIPT_ID VARCHAR, OBJECT_NAME varchar, SCRIPT_HASH varchar, STATUS varchar, LOG varchar, CHANGE_SYNC_ID integer, CREATED_BY varchar, CREATED_TS timestamp, FOREIGN KEY (CHANGE_SYNC_ID) REFERENCES " + CHANGE_SYNC_TABLE_NAME + "(ID));";
log.debug("create hash table sql: {}", createSqlHash);
log.debug("create event table sql: {}", createSqlEvent);
Statement statement = connection.createStatement();
statement.executeUpdate(createChangeSyncSql);
statement.executeUpdate(createSqlHash);
statement.executeUpdate(createSqlEvent);
}
}
private void updateOldTableNames() {
try {
String query = "SELECT * FROM DL_SYNC_SCRIPT LIMIT 1;";
Statement statement = connection.createStatement();
statement.executeQuery(query);
log.info("Found old dlsync table DL_SYNC_SCRIPT renaming it to [{}]", SCRIPT_HISTORY_TABLE_NAME);
String alterSql = "ALTER TABLE IF EXISTS DL_SYNC_SCRIPT RENAME TO " + SCRIPT_HISTORY_TABLE_NAME + ";";
statement.executeUpdate(alterSql);
} catch (SQLException e) {
log.debug("All tables are with new version");
}
}
public Set<String> loadScriptHash() throws SQLException {
String hashQuery = "SELECT * FROM " + SCRIPT_HISTORY_TABLE_NAME + ";";
log.debug("Loading hash with sql: {}", hashQuery);
Statement statement = connection.createStatement();
ResultSet resultSet = statement.executeQuery(hashQuery);
while (resultSet.next()) {
String id = resultSet.getString("SCRIPT_ID");
scriptHash.put(id, resultSet.getString("SCRIPT_HASH"));
}
log.debug("Script hash loaded: {}", scriptHash);
return scriptHash.keySet();
}
public Set<String> loadDeployedHash() throws SQLException {
String hashColumn = "DEPLOYED_HASH";
String hashQuery = "SELECT * FROM " + SCRIPT_HISTORY_TABLE_NAME + ";";
log.debug("Loading hash with sql: {}", hashQuery);
Statement statement = connection.createStatement();
ResultSet resultSet = statement.executeQuery(hashQuery);
while (resultSet.next()) {
String id = resultSet.getString("SCRIPT_ID");
scriptHash.put(id, resultSet.getString(hashColumn));
}
log.debug("Script deployed hash loaded: {}", scriptHash);
return scriptHash.keySet();
}
public Long insertChangeSync(ChangeType changeType, Status status, String logMessage) throws SQLException {
String queryGetId = "SELECT count(1) FROM " + CHANGE_SYNC_TABLE_NAME + ";";
ResultSet rs = connection.createStatement().executeQuery(queryGetId);
if(rs.next()) {
changeSyncId = rs.getLong(1) + 1;
}
String insertSql = "INSERT INTO " + CHANGE_SYNC_TABLE_NAME + " (ID, CHANGE_TYPE, STATUS, LOG, START_TIME) VALUES(?, ?, ?, ?, CURRENT_TIMESTAMP);";
PreparedStatement statement = connection.prepareStatement(insertSql);
statement.setLong(1, changeSyncId);
statement.setString(2, changeType.toString());
statement.setString(3, status.toString());
statement.setString(4, logMessage);
log.debug("Creating script event with the following SQL: {}", insertSql);
statement.executeUpdate();
return changeSyncId;
}
public void updateChangeSync(ChangeType changeType, Status status, String logMessage, Long changeCount) throws SQLException {
String updateSql = "UPDATE " + CHANGE_SYNC_TABLE_NAME + " SET CHANGE_TYPE=?, STATUS=?, LOG=?, CHANGE_COUNT=?, END_TIME=CURRENT_TIMESTAMP WHERE ID = ? ;";
PreparedStatement statement = connection.prepareStatement(updateSql);
statement.setString(1, changeType.toString());
statement.setString(2, status.toString());
statement.setString(3, logMessage);
statement.setObject(4, changeCount);
statement.setLong(5, changeSyncId);
log.debug("Creating script event with the following SQL: {}", updateSql);
statement.executeUpdate();
}
private boolean updateScriptHash(Script script) throws SQLException {
String rollback = null;
if(script instanceof MigrationScript) {
MigrationScript migrationScript = (MigrationScript)script;
rollback = migrationScript.getRollback();
}
PreparedStatement statement;
String deployedHash = Util.getMd5Hash(script.getContent());
log.debug("Updating script hash of object {}", script.getId());
if(scriptHash.containsKey(script.getId())) {
String updateSql = "UPDATE " + SCRIPT_HISTORY_TABLE_NAME + " SET ROLLBACK_SCRIPT=?, SCRIPT_HASH=?, DEPLOYED_HASH=?, CHANGE_SYNC_ID=?, updated_by=current_user, updated_ts=current_timestamp WHERE SCRIPT_ID=?;";
statement = connection.prepareStatement(updateSql);
statement.setString(1, rollback);
statement.setString(2, script.getHash());
statement.setString(3, deployedHash);
statement.setLong(4, changeSyncId);
statement.setString(5, script.getId());
log.debug("Updating script hash with the following SQL: {}", updateSql);
}
else {
String insertSql = "INSERT INTO " + SCRIPT_HISTORY_TABLE_NAME + " VALUES(?, ?, ?, ?, ?, ?, ?, current_user, current_timestamp, current_user, current_timestamp);";
statement = connection.prepareStatement(insertSql);
statement.setString(1, script.getId());
statement.setString(2, script.getFullObjectName());
statement.setString(3, script.getObjectType().toString());
statement.setString(4, rollback);
statement.setString(5, script.getHash());
statement.setString(6, deployedHash);
statement.setLong(7, changeSyncId);
log.debug("Updating script hash with the following SQL: {}", insertSql);
}
return statement.executeUpdate() >= 0;
}
private boolean insertScriptEvent(Script script, String status, String logs) throws SQLException {
//varchar ID, varchar OBJECT_NAME, varchar SCRIPT_HASH, varchar STATUS, varchar log, varchar created_by, varchar created_ts;
log.debug("Creating event for the object {} with status: {} and log: {} ", script.getObjectName(), status, logs);
String insertSql = "INSERT INTO " + SCRIPT_EVENT_TABLE_NAME + " SELECT UUID_STRING(), ?, ?, ?, ?, ?, ?, current_user, current_timestamp;";
PreparedStatement statement = connection.prepareStatement(insertSql);
statement.setString(1, script.getId());
statement.setObject(2, script.getFullObjectName());
statement.setString(3, script.getHash());
statement.setString(4, status);
statement.setString(5, logs);
statement.setLong(6, changeSyncId);
log.debug("Creating script event with the following SQL: {}", insertSql);
return statement.executeUpdate() > 0;
}
public boolean isScriptChanged(Script script) {
// return true;
return !scriptHash.getOrDefault(script.getId(), "null").equals(script.getHash());
}
public boolean isScriptVersionDeployed(Script script) {
return scriptHash.containsKey(script.getId());
}
public void createScriptObject(Script script, boolean onlyHashes) throws SQLException {
Statement statement = connection.createStatement();
boolean autoCommit = connection.getAutoCommit();
try {
connection.setAutoCommit(false);
if(!onlyHashes) {
statement.executeUpdate(script.getContent());
log.debug("Creating object using the SQL: {}", script.getContent());
}
updateScriptHash(script);
insertScriptEvent(script, "SUCCESS", "Successfully Deployed Object");
connection.commit();
log.info("Successfully Deployed object: {}", script);
}
catch (SQLException e) {
connection.rollback();
log.error("Error {}, while creating the object {} with sql {}", e.getMessage(), script.getObjectName(), script.getContent());
insertScriptEvent(script, "ERROR", e.getMessage());
throw e;
}
finally {
connection.setAutoCommit(autoCommit);
}
}
public List<Script> getScriptsInSchema(String schema) throws SQLException {
List<Script> scripts = new ArrayList<>();
for(ScriptObjectType type: ScriptObjectType.values()) {
scripts.addAll(getScriptsInSchema(schema, type));
}
return scripts;
}
public List<Script> getAllScriptsInSchema(String schema) throws SQLException {
log.info("Getting all scripts in schema: {}", schema);
String sql = String.format("SELECT GET_DDL('SCHEMA', '%s', true)", schema);
log.debug("Getting all scripts using SQL: {}", sql);
ResultSet resultSet = connection.createStatement().executeQuery(sql);
if(resultSet.next()) {
String ddl = resultSet.getString(1);
return SqlTokenizer.parseDdlScripts(ddl, getDatabaseName(), schema);
}
else {
throw new RuntimeException("Error while getting ddl scripts: result set has no data");
}
}
public List<Script> getStateScriptsInSchema(String schema) throws SQLException {
List<Script> scripts = new ArrayList<>();
for(ScriptObjectType type: ScriptObjectType.values()) {
if(!type.isMigration()) {
scripts.addAll(getScriptsInSchema(schema, type));
}
}
return scripts;
}
public List<Script> getScriptsInSchema(String schema, ScriptObjectType type) throws SQLException {
log.debug("Getting {} type scripts in schema: {}",type, schema);
String sql = "";
if(type == ScriptObjectType.FUNCTIONS || type == ScriptObjectType.PROCEDURES) {
sql = String.format("SELECT %s_NAME, ARGUMENT_SIGNATURE FROM INFORMATION_SCHEMA.%s WHERE %s_SCHEMA = '%s'",type.getEscapedSingular(), type, type.getEscapedSingular(), schema.toUpperCase());
}
else if(type == ScriptObjectType.STREAMS || type == ScriptObjectType.TASKS || type == ScriptObjectType.STAGES) {
return new ArrayList<>();
}
else if(type == ScriptObjectType.VIEWS) {
sql = String.format("SELECT TABLE_NAME FROM INFORMATION_SCHEMA.VIEWS WHERE TABLE_SCHEMA = '%s'", schema.toUpperCase());
}
else if(type == ScriptObjectType.TABLES) {
sql = String.format("SELECT TABLE_NAME FROM INFORMATION_SCHEMA.TABLES WHERE TABLE_TYPE <> 'VIEW' AND TABLE_SCHEMA = '%s'", schema.toUpperCase());
}
else {
sql = String.format("SELECT %s_NAME FROM INFORMATION_SCHEMA.%s WHERE %s_SCHEMA = '%s'",type.getEscapedSingular(), type, type.getEscapedSingular(), schema.toUpperCase());
}
log.debug("Getting all scripts using SQL: {}", sql);
List<Script> scripts = new ArrayList<>();
ResultSet resultSet = connection.createStatement().executeQuery(sql);
while (resultSet.next()) {
String ddlSql = "";
String scriptObjectName = resultSet.getString(1);
if(type == ScriptObjectType.FUNCTIONS || type == ScriptObjectType.PROCEDURES) {
String arguments = resultSet.getString(2);
String regex = "(\\(|\\,\\s)\\w+";
arguments = arguments.replaceAll(regex, "$1");
ddlSql = String.format("SELECT GET_DDL('%s', '%s.%s%s', true);",type.getEscapedSingular(), schema.toUpperCase(), scriptObjectName.toUpperCase(), arguments);
}
else {
ddlSql = String.format("SELECT GET_DDL('%s', '%s.%s', true);",type.getEscapedSingular(), schema.toUpperCase(), scriptObjectName.toUpperCase());
}
log.debug("Get ddl script: {}", ddlSql);
ResultSet ddlResultSet = connection.createStatement().executeQuery(ddlSql);
ddlResultSet.next();
String content = ddlResultSet.getString(1);
if (content == null) {
log.warn("Unable to read Script definition for {}", scriptObjectName);
continue;
}
if(type.isMigration()) {
// String migrationHeader = "---version: 0, author: DlSync\n";
// String rollback = String.format("\n---rollback: DROP %s IF EXISTS %s;", type.getSingular(), getDatabaseName() + "." + schema + "." + scriptObjectName);
// String verify = String.format("\n---verify: SHOW %s LIKE %s;",type, getDatabaseName() + "." + schema + "." + scriptObjectName);
// content = migrationHeader + content + rollback + verify;
// Script script = ScriptFactory.getScript(getDatabaseName(), schema, type, scriptObjectName, content, 0L, null, null, null);
MigrationScript script = ScriptFactory.getMigrationScript(getDatabaseName(), schema, type, scriptObjectName, content);
scripts.add(script);
}
else {
Script script = ScriptFactory.getStateScript(getDatabaseName(), schema, type, scriptObjectName, content);
scripts.add(script);
}
}
return scripts;
}
public Script addConfig(Script script) throws SQLException {
if(script.getObjectType() == ScriptObjectType.TABLES) {
String additionalContent = String.format("SELECT * FROM %s", script.getFullObjectName());
ResultSet resultSet = connection.createStatement().executeQuery(additionalContent);
int count = resultSet.getMetaData().getColumnCount();
StringBuilder insertBuilder = new StringBuilder(String.format("INSERT INTO %s values", script.getFullObjectName()));
Boolean firstRow = true;
while(resultSet.next()) {
if(!firstRow) {
insertBuilder.append(", ");
}
firstRow = false;
for(int i = 1; i <= count; i++) {
if(i == 1) {
insertBuilder.append("(");
}
else {
insertBuilder.append(", ");
}
Object value = resultSet.getObject(i);
if(value == null) {
insertBuilder.append("null");
}
else {
insertBuilder.append(String.format("'%s'", value.toString().replace("'", "''")));
}
}
insertBuilder.append(")");
}
insertBuilder.append(";");
String migrationHeader = "---version: 1, author: DlSync\n";
String rollback = "\n---rollback: DELETE FROM " + script.getFullObjectName() + ";";
String verify = "\n---verify: SELECT COUNT(*) FROM " + script.getFullObjectName() + ";";
String insertContent = migrationHeader + insertBuilder + rollback + verify;
String newContent = script.getContent() + "\n\n" + insertContent;
script.setContent(newContent);
}
return script;
}
// public List<Script> getScriptsInSchemaWithArguments(String schema, ScriptObjectType type) throws SQLException {
// String sql = String.format("SELECT %s_NAME, ARGUMENT_SIGNATURE FROM INFORMATION_SCHEMA.%s WHERE %s_SCHEMA = '%s'",type.getSingular(), type, type.getSingular(), schema.toUpperCase());
// log.debug("Getting all scripts using SQL: {}", sql);
// List<Script> scripts = new ArrayList<>();
// ResultSet resultSet = connection.createStatement().executeQuery(sql);
// while (resultSet.next()) {
// String scriptObjectName = resultSet.getString(1);
// String arguments = resultSet.getString(2);
// String regex = "(\\(|\\,\\s)\\w+";
// arguments = arguments.replaceAll(regex, "$1");
// String ddlSql = String.format("SELECT GET_DDL('%s', '%s.%s%s', true);",type.getSingular(), schema.toUpperCase(), scriptObjectName.toUpperCase(), arguments);
// log.info("Get ddl script: {}", ddlSql);
// ResultSet ddlResultSet = connection.createStatement().executeQuery(ddlSql);
// ddlResultSet.next();
// String content = ddlResultSet.getString(1);
// if (content == null) {
// log.warn("Unable to read Script definition for {}", scriptObjectName);
// continue;
// }
// Script script = new Script(getDatabaseName(), schema, type, scriptObjectName, content);
// scripts.add(script);
// }
// return scripts;
// }
public List<Script> getAllScriptsInDatabase() throws SQLException {
log.info("Getting all scripts for database: {}", getDatabaseName());
List<String> schemas = getAllSchemasInDatabase(getDatabaseName());
List<Script> scripts = new ArrayList<>();
for(String schema: schemas) {
// for(ScriptObjectType scriptObjectType: ScriptObjectType.values()) {
// scripts.addAll(getScriptsInSchema(schema, scriptObjectType));
// }
scripts.addAll(getScriptsInSchema(schema, ScriptObjectType.VIEWS));
}
return scripts;
}
public List<String> getAllSchemasInDatabase(String database) throws SQLException {
log.info("Reading all schemas in database: {}", database);
String query = String.format("SELECT * FROM %s.INFORMATION_SCHEMA.SCHEMATA", database);
log.debug("Reading schemas using sql: {}", query);
ResultSet resultSet = executeQuery(query);
List<String> schemas = new ArrayList<>();
while(resultSet.next()) {
String schema = resultSet.getString("SCHEMA_NAME");
if(schema.equalsIgnoreCase("INFORMATION_SCHEMA") || schema.equalsIgnoreCase("PUBLIC")) {
continue;
}
schemas.add(schema);
}
return schemas;
}
public String getDatabaseName() {
return connectionProperties.getProperty("db");
}
public String getSchemaName() {
return connectionProperties.getProperty("schema");
}
private ResultSet executeQuery(String query) throws SQLException {
return connection.createStatement().executeQuery(query);
}
public void insertDependencyList(List<ScriptDependency> dependencyList) throws SQLException {
String createTable = "CREATE TABLE IF NOT EXISTS " + DEPENDENCY_LINEAGE_TABLE_NAME + "(OBJECT_NAME VARCHAR, OBJECT_TYPE VARCHAR, DEPENDENCY VARCHAR, DEPENDECY_OBEJECT_TYPE VARCHAR, CHANGE_SYNC_ID VARCHAR, CREATED_BY VARCHAR, CREATED_TS TIMESTAMP);";
connection.createStatement().executeUpdate(createTable);
StringBuilder insertSql = new StringBuilder("INSERT INTO " + DEPENDENCY_LINEAGE_TABLE_NAME + " VALUES ");
for(ScriptDependency dependency: dependencyList) {
String values = String.format("('%s', '%s', '%s', '%s', '%s', current_user, current_timestamp),", dependency.getObjectName(), dependency.getObjectType(), dependency.getDependency(), dependency.getDependencyObjectType(), changeSyncId);
insertSql.append(values);
}
insertSql.deleteCharAt(insertSql.length() - 1);
log.info("inserting dag scripts using {} ", insertSql);
connection.createStatement().executeUpdate(insertSql.toString());
}
public void insertSortedScript(List<Script> sequencedScript) throws SQLException {
DependencyExtractor dependencyExtractor = new DependencyExtractor();
String createTable = "CREATE OR REPLACE TABLE DL_SYNC_TOPOLOGICAL_SORTED(id INT, script VARCHAR, dependency_size INT, dependencies VARCHAR);";
connection.createStatement().executeUpdate(createTable);
StringBuilder insertSql = new StringBuilder("INSERT INTO DL_SYNC_TOPOLOGICAL_SORTED VALUES ");
for(int i = 0; i < sequencedScript.size(); i++) {
Script script = sequencedScript.get(i);
Set<String> dependencies = dependencyExtractor.extractScriptDependencies(script).stream().map(s -> s.getFullObjectName()).collect(Collectors.toSet());
if(dependencies.size() == 0) {
log.debug("Found zero dependency for {}", script.getFullObjectName());
}
String values = String.format("(%s, '%s', %s, '%s'),", i, script.getFullObjectName(), dependencies.size(), dependencies.toString());
insertSql.append(values);
}
insertSql.deleteCharAt(insertSql.length() - 1);
log.info("inserting sorted scripts using {} ", insertSql.toString());
connection.createStatement().executeUpdate(insertSql.toString());
}
public List<MigrationScript> getMigrationScripts(Set<String> ids) throws SQLException {
if(ids.size() == 0) {
return new ArrayList<>();
}
String allIdJoined = ids.stream().map(v -> "'" + v + "'").collect(Collectors.joining(",", "(", ");"));
String query = "SELECT * FROM " + SCRIPT_HISTORY_TABLE_NAME + " where SCRIPT_ID in " + allIdJoined;
PreparedStatement statement = connection.prepareStatement(query);
ResultSet rs = statement.executeQuery();
List<MigrationScript> migrations = new ArrayList<>();
while(rs.next()) {
ScriptObjectType objectType = ScriptObjectType.valueOf(rs.getString("OBJECT_TYPE"));
if(!objectType.isMigration()) {
continue;
}
String id = rs.getString("SCRIPT_ID");
String[] idSplit = id.split(":");
Long version = null;
if(idSplit.length > 1) {
version = Long.parseLong(idSplit[1]);
}
String fullObjectName = rs.getString("OBJECT_NAME");
String rollback = rs.getString("ROLLBACK_SCRIPT");
MigrationScript migrationScript = ScriptFactory.getMigrationScript(fullObjectName, objectType, "", version, null, rollback, null);
migrations.add(migrationScript);
}
return migrations;
}
public void executeRollback(MigrationScript migrationScript) throws SQLException {
Statement statement = connection.createStatement();
boolean autoCommit = connection.getAutoCommit();
try {
connection.setAutoCommit(false);
if(migrationScript.getRollback() != null && !migrationScript.getRollback().trim().equals("")) {
log.debug("Executing rollback using the SQL: {}", migrationScript.getRollback());
statement.executeUpdate(migrationScript.getRollback());
}
deleteScriptHash(migrationScript);
insertScriptEvent(migrationScript, "SUCCESS", "Successfully Rolled-back Object");
connection.commit();
log.info("Successfully Rollback object: {}", migrationScript);
}
catch (SQLException e) {
connection.rollback();
log.error("Error {}, while rollback the object {} with sql {}", e.getMessage(), migrationScript.getObjectName(), migrationScript.getRollback());
insertScriptEvent(migrationScript, "ERROR", e.getMessage());
throw e;
}
finally {
connection.setAutoCommit(autoCommit);
}
}
public boolean executeVerify(MigrationScript migrationScript) throws SQLException {
Statement statement = connection.createStatement();
boolean autoCommit = connection.getAutoCommit();
try {
connection.setAutoCommit(false);
if(migrationScript.getVerify() != null && !migrationScript.getVerify().trim().equals("")) {
log.debug("Executing verify using the SQL: {}", migrationScript.getVerify());
statement.executeQuery(migrationScript.getVerify());
}
insertScriptEvent(migrationScript, "SUCCESS", "Successfully Verified Object");
connection.commit();
log.debug("Successfully Verified object: {}", migrationScript);
return true;
}
catch (SQLException e) {
connection.rollback();
log.error("Error {}, while verifying the object {} with sql {}", e.getMessage(), migrationScript.getObjectName(), migrationScript.getVerify());
insertScriptEvent(migrationScript, "ERROR", e.getMessage());
return false;
}
finally {
connection.setAutoCommit(autoCommit);
}
}
private void deleteScriptHash(MigrationScript migration) throws SQLException {
String deleteSql = "DELETE FROM " + SCRIPT_HISTORY_TABLE_NAME + " WHERE SCRIPT_ID=?;";
PreparedStatement statement = connection.prepareStatement(deleteSql);
statement.setString(1, migration.getId());
statement.executeUpdate();
}
public boolean verifyScript(Script script) {
if(!scriptHash.containsKey(script.getId())) {
log.warn("Script file does not exist for the db object: {}", script);
}
return scriptHash.getOrDefault(script.getId(), script.getHash()).equals(script.getHash());
}
public boolean compareScript(Script script1, Script script2) {
return SqlTokenizer.compareScripts(script1, script2);
}
}

View File

@@ -0,0 +1,198 @@
package com.snowflake.dlsync.doa;
import com.snowflake.dlsync.ScriptFactory;
import com.snowflake.dlsync.models.*;
import com.snowflake.dlsync.parser.SqlTokenizer;
import lombok.extern.slf4j.Slf4j;
import java.io.File;
import java.io.FileWriter;
import java.io.IOException;
import java.nio.file.Files;
import java.nio.file.Path;
import java.util.ArrayList;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import java.util.stream.Collectors;
@Slf4j
public class ScriptSource {
private String scriptRoot;
private String mainScriptDir;
private String testScriptDir;
public ScriptSource(String scriptRoot) {
this.scriptRoot = scriptRoot;
mainScriptDir = Files.exists(Path.of(scriptRoot, "main")) ? Path.of(scriptRoot, "main").toString(): scriptRoot;
testScriptDir = Path.of(scriptRoot, "tests").toString();
log.debug("Script file reader initialized with scriptRoot: {}", scriptRoot);
}
private List<String> readDatabase() {
File scriptFiles = new File(mainScriptDir);
List<String> dbs = new ArrayList<>();
if(scriptFiles.exists()) {
File[] allDbs = scriptFiles.listFiles();
for(File file: allDbs) {
if(file.isDirectory()) {
dbs.add(file.getName());
}
}
}
else {
log.error("Invalid path for script provided: {}", scriptFiles.getAbsolutePath());
throw new RuntimeException("No valid script source path provided");
}
return dbs;
}
private List<String> readSchemas(String database) {
log.info("Reading all schema from database {}", database);
List<String> schemas = new ArrayList<>();
File dbFile = Path.of(mainScriptDir, database).toFile();
if(dbFile.exists()) {
File[] allFiles = dbFile.listFiles();
for(File file: allFiles) {
if(file.isDirectory()) {
schemas.add(file.getName());
}
}
}
return schemas;
}
public List<Script> getAllScripts() throws IOException {
List<Script> allScripts = new ArrayList<>();
for(String database: readDatabase()) {
for(String schema: readSchemas(database)) {
allScripts.addAll(getScriptsInSchema(database, schema));
}
}
return allScripts;
}
public List<Script> getScriptsInSchema(String database, String schema) throws IOException {
log.info("Reading script files from schema: {}", schema);
List<Script> scripts = new ArrayList<>();
File schemaDirectory = Path.of(mainScriptDir, database, schema).toFile();
File[] scriptTypeDirectories = schemaDirectory.listFiles();
for(File scriptType: scriptTypeDirectories) {
if(scriptType.isDirectory() ) {
File[] scriptFiles = scriptType.listFiles();
for(File file: scriptFiles) {
if(file.getName().toLowerCase().endsWith(".sql")){
Set<Script> scriptsFromFile = buildScriptFromFile(file, scriptType);
scripts.addAll(scriptsFromFile);
}
else {
log.warn("Script Skipped, File not SQL: {} ", file.getName());
}
}
}
else {
log.warn("Script file found outside object type directory: {} ", scriptType.getName());
}
}
return scripts;
}
public Set<Script> buildScriptFromFile(File file, File scriptType) throws IOException {
String content = Files.readString(file.toPath());
String objectName = extractObjectName(file.getName(), content);
ScriptObjectType objectType = extractObjectType(scriptType.getName());
String fullIdentifier = SqlTokenizer.getFirstFullIdentifier(objectName, content);
if(fullIdentifier == null || fullIdentifier.isEmpty()) {
log.error("Error reading script: {}, name and content mismatch", file.getName());
throw new RuntimeException("Object name and file name must match!");
}
String database = extractDatabaseName(fullIdentifier);
String schema = extractSchemaName(fullIdentifier);
Set<Script> scripts = new HashSet<>();
if(objectType.isMigration()) {
List<Migration> migrations = SqlTokenizer.parseMigrationScripts(content);
for(Migration migration: migrations) {
MigrationScript script = ScriptFactory.getMigrationScript(database, schema, objectType, objectName, migration);
// Script script = new Script(database, schema, objectType, objectName, migration.getContent(), migration.getVersion(), migration.getAuthor(), migration.getRollback());
if(scripts.contains(script)) {
log.error("Duplicate version {} for script {} found.", script.getVersion(), script);
throw new RuntimeException("Duplicate version number is not allowed in the same script file.");
}
scripts.add(script);
}
}
else {
Script script = ScriptFactory.getStateScript(database, schema, objectType, objectName, content);
// Script script = new Script(database, schema, objectType, objectName, content);
scripts.add(script);
}
return scripts;
}
public Script getScriptByName(String database, String schema, ScriptObjectType type, String objectName) throws IOException {
File file = Path.of(mainScriptDir, database, schema, type.toString(), objectName + ".SQL").toFile();
String content = Files.readString(file.toPath());
Script script = ScriptFactory.getStateScript(database, schema, type, objectName, content);
// Script script = new Script(database, schema, type, objectName, content);
return script;
}
public void createScriptFiles(List<Script> scripts) {
log.debug("Creating script files for the scripts: {}", scripts);
for(Script script: scripts) {
createScriptFile(script);
}
}
public void createScriptFile(Script script) {
try {
String scriptFileName = script.getObjectName() + ".SQL";
String scriptDirectoryPath = String.format("%s/%s/%s/%s", mainScriptDir, script.getDatabaseName(), script.getSchemaName(), script.getObjectType());
File directory = new File(scriptDirectoryPath);
directory.mkdirs();
FileWriter fileWriter = new FileWriter(Path.of(scriptDirectoryPath, scriptFileName).toFile());
fileWriter.write(script.getContent());
fileWriter.close();
log.debug("File {} created successfully", Path.of(scriptDirectoryPath, scriptFileName));
} catch (IOException e) {
log.error("Error in creating script: {}", e.getMessage());
throw new RuntimeException(e);
}
}
private ScriptObjectType extractObjectType(String objectType) {
return ScriptObjectType.valueOf(objectType);
}
private String extractObjectName(String fileName, String content) {
return fileName.split("\\.")[0].toUpperCase();
}
private String extractDatabaseName(String fullIdentifier) {
String[] names = fullIdentifier.split("\\.");
if(names.length < 3) {
return null;
}
return names[0];
}
private String extractSchemaName(String fullIdentifier) {
String[] names = fullIdentifier.split("\\.");
if(names.length == 3) {
return names[1];
}
else if(names.length == 2) {
return names[0];
}
return null;
}
private Script getScriptByName(List<Script> allScripts, String fullObjectName) {
return allScripts.parallelStream().filter(script -> script.getFullObjectName().equals(fullObjectName)).findFirst().get();
}
}

View File

@@ -0,0 +1,13 @@
package com.snowflake.dlsync.models;
import java.sql.Timestamp;
public class ChangeSync {
private Long id;
private ChangeType changeType;
private Status status;
private String log;
private Long changeCount;
private Timestamp startTime;
private Timestamp endTime;
}

View File

@@ -0,0 +1,5 @@
package com.snowflake.dlsync.models;
public enum ChangeType {
DEPLOY, VERIFY, ROLLBACK, CREATE_SCRIPT, CREATE_LINEAGE
};

View File

@@ -0,0 +1,20 @@
package com.snowflake.dlsync.models;
import lombok.Data;
import java.util.List;
@Data
public class Config {
private String version;
private List<String> scriptExclusion;
private List<DependencyOverride> dependencyOverride;
private List<String> configTables;
public boolean isScriptExcluded(Script script) {
if(scriptExclusion == null) {
return false;
}
return scriptExclusion.contains(script.getFullObjectName());
}
}

View File

@@ -0,0 +1,11 @@
package com.snowflake.dlsync.models;
import lombok.Data;
import java.util.List;
@Data
public class DependencyOverride {
private String script;
private List<String> dependencies;
}

View File

@@ -0,0 +1,15 @@
package com.snowflake.dlsync.models;
import lombok.Builder;
import lombok.Data;
@Data
@Builder
public class Migration {
private String objectName;
private Long version;
private String author;
private String content;
private String rollback;
private String verify;
}

View File

@@ -0,0 +1,54 @@
package com.snowflake.dlsync.models;
public class MigrationScript extends Script {
private Long version;
private String author;
private String rollback;
private String verify;
public MigrationScript(String databaseName, String schemaName, String objectName, ScriptObjectType objectType, String content, Long version, String author, String rollback, String verify) {
super(databaseName, schemaName, objectName, objectType, content);
this.version = version;
this.author = author;
this.rollback = rollback;
this.verify = verify;
}
@Override
public String getId() {
return String.format("%s:%s", getFullObjectName(), version);
}
public Long getVersion() {
return version;
}
public void setVersion(Long version) {
this.version = version;
}
public String getAuthor() {
return author;
}
public void setAuthor(String author) {
this.author = author;
}
public String getRollback() {
return rollback;
}
public void setRollback(String rollback) {
this.rollback = rollback;
}
public String getVerify() {
return verify;
}
public void setVerify(String verify) {
this.verify = verify;
}
}

View File

@@ -0,0 +1,99 @@
package com.snowflake.dlsync.models;
import com.snowflake.dlsync.Util;
import lombok.extern.slf4j.Slf4j;
import java.util.Objects;
@Slf4j
public abstract class Script {
private String databaseName;
private String schemaName;
private String objectName;
private ScriptObjectType objectType;
private String content;
private String hash;
public Script(String databaseName, String schemaName, String objectName, ScriptObjectType objectType, String content) {
this.databaseName = databaseName.toUpperCase();
this.schemaName = schemaName.toUpperCase();
this.objectName = objectName.toUpperCase();
this.objectType = objectType;
this.content = content.trim();
this.hash = hash = Util.getMd5Hash(this.content);
}
public String getDatabaseName() {
return databaseName;
}
public void setDatabaseName(String databaseName) {
this.databaseName = databaseName.toUpperCase();
}
public String getSchemaName() {
return schemaName;
}
public void setSchemaName(String schemaName) {
this.schemaName = schemaName.toUpperCase();
}
public String getObjectName() {
return objectName;
}
public void setObjectName(String objectName) {
this.objectName = objectName.toUpperCase();
}
public ScriptObjectType getObjectType() {
return objectType;
}
public void setObjectType(ScriptObjectType objectType) {
this.objectType = objectType;
}
public String getContent() {
return content;
}
public void setContent(String content) {
this.content = content.trim();
}
public String getHash() {
return hash;
}
public void setHash(String hash) {
this.hash = hash;
}
public String getFullObjectName() {
return String.format("%s.%s.%s", databaseName, schemaName, objectName);
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
Script script = (Script) o;
return Objects.equals(getObjectType(), script.getObjectType()) && Objects.equals(getId(), script.getId());
}
@Override
public int hashCode() {
return Objects.hash(getId());
}
@Override
public String toString() {
return getId();
}
public abstract String getId();
}

View File

@@ -0,0 +1,36 @@
package com.snowflake.dlsync.models;
import lombok.Data;
import java.sql.Date;
import java.util.Objects;
@Data
public class ScriptDependency {
private String objectName;
private ScriptObjectType objectType;
private String dependency;
private ScriptObjectType dependencyObjectType;
private String createdBy;
private Date createdTs;
public ScriptDependency(Script node, Script dependency) {
this.objectName = node.getObjectName();
this.objectType = node.getObjectType();
this.dependency = dependency.getObjectName();
this.dependencyObjectType = dependency.getObjectType();
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
ScriptDependency scriptDependency = (ScriptDependency) o;
return Objects.equals(getObjectName(), scriptDependency.getObjectName()) && Objects.equals(getDependency(), scriptDependency.getDependency());
}
@Override
public int hashCode() {
return Objects.hash(getObjectName(), getDependency());
}
}

View File

@@ -0,0 +1,15 @@
package com.snowflake.dlsync.models;
import java.sql.Date;
public class ScriptEvent {
private String id;
private String scriptId;
private String objectName;
private String scriptHash;
private String status;
private String log;
private Long changeSyncId;
private String createdBy;
private Date createdTs;
}

View File

@@ -0,0 +1,21 @@
package com.snowflake.dlsync.models;
import lombok.Data;
import java.sql.Date;
@Data
public class ScriptHistory {
private String scriptId;
private String objectName;
private String objectType;
private String rollbackScript;
private String scriptHash;
private String deployedHash;
private Long changeSyncId;
private String createdBy;
private Date createdTs;
private String updatedBy;
private Date updatedTs;
}

View File

@@ -0,0 +1,28 @@
package com.snowflake.dlsync.models;
public enum ScriptObjectType {
VIEWS("VIEW"),FUNCTIONS("FUNCTION"),PROCEDURES("PROCEDURE"),FILE_FORMATS("FILE FORMAT"),TABLES("TABLE"),STREAMS("STREAM"),SEQUENCES("SEQUENCE"),STAGES("STAGE"),TASKS("TASK");
private final String singular;
private ScriptObjectType(String type) {
this.singular = type;
}
public String getSingular() {
return singular;
}
public String getEscapedSingular() {
return singular.replace(" ", "_");
}
public boolean isMigration() {
switch (this) {
case TABLES:
case STREAMS:
case SEQUENCES:
case STAGES:
case TASKS:
return true;
default:
return false;
}
}
}

View File

@@ -0,0 +1,14 @@
package com.snowflake.dlsync.models;
public class StateScript extends Script {
public StateScript(String databaseName, String schemaName, String objectName, ScriptObjectType objectType, String content) {
super(databaseName, schemaName, objectName, objectType, content);
}
@Override
public String getId() {
return getFullObjectName();
}
}

View File

@@ -0,0 +1,5 @@
package com.snowflake.dlsync.models;
public enum Status {
SUCCESS, IN_PROGRESS, ERROR
}

View File

@@ -0,0 +1,114 @@
package com.snowflake.dlsync.parser;
import com.snowflake.dlsync.models.MigrationScript;
import com.snowflake.dlsync.models.Script;
import lombok.extern.slf4j.Slf4j;
import java.util.List;
import java.util.Properties;
import java.util.Set;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import java.util.stream.Collectors;
@Slf4j
public class ParameterInjector {
private static final String PARAMETER_FORMAT = "${%s}";
private static final String PARAMETRIZATION_START_REGEX = "([(),\\.\\s\\'\\\"@])";
private static final String PARAMETRIZATION_END_REGEX = "([(),;\\.\\s\\'\\\"])";
private Properties parameters;
public ParameterInjector(Properties parameters) {
log.debug("Parameter injector initialized with parameters: {}", parameters);
this.parameters = parameters;
}
private String injectParameters(String content) {
if(content == null) {
return null;
}
for(String parameter: parameters.stringPropertyNames()) {
String parameterPlaceholder = String.format(PARAMETER_FORMAT, parameter);
String regex = "(?i)" + Pattern.quote(parameterPlaceholder);
String replacement = Matcher.quoteReplacement(parameters.getProperty(parameter));
content = content.replaceAll(regex, replacement);
}
return content;
}
public void injectParameters(Script script) {
log.debug("Injecting parameter for: {}", script.getObjectName());
String injectedScript = injectParameters(script.getContent());
script.setContent(injectedScript);
log.debug("Script for {} after parameter injected: {}", script.getObjectName(), injectedScript);
}
public void injectParametersAll(MigrationScript migration) {
log.debug("Injecting parameter for: {}", migration.getObjectName());
String injectedContent = injectParameters(migration.getContent());
String injectedRollback = injectParameters(migration.getRollback());
String injectedVerify = injectParameters(migration.getVerify());
migration.setContent(injectedContent);
migration.setRollback(injectedRollback);
migration.setVerify(injectedVerify);
log.debug("Migration for {} after parameter injected: {}", migration.getObjectName(), migration);
}
public void parametrizeScript(Script script, boolean parametrizeObjectName){
log.debug("Parametrizing script: {}", script.getObjectName());
String parametrizedScript = script.getContent();
List<String> parameterKeys = parameters.stringPropertyNames().stream().sorted().collect(Collectors.toList());
for(String parameter: parameterKeys) {
String parameterPlaceholder = String.format(PARAMETER_FORMAT, parameter);
String regex = PARAMETRIZATION_START_REGEX + "(?i)" + Pattern.quote(parameters.getProperty(parameter)) + PARAMETRIZATION_END_REGEX;
String replacement = "$1" + Matcher.quoteReplacement(parameterPlaceholder) + "$2";
parametrizedScript = parametrizedScript.replaceAll(regex, replacement);
}
script.setContent(parametrizedScript);
if(parametrizeObjectName) {
parameterizeObjectName(script);
}
log.debug("Script for {} after parameterized: {}", script.getObjectName(), parametrizedScript);
}
public void parameterizeObjectName(Script script) {
// String objectName = script.getObjectName();
String schemaName = script.getSchemaName();
String databaseName = script.getDatabaseName();
List<String> parameterKeys = parameters.stringPropertyNames().stream().sorted().collect(Collectors.toList());
for(String parameter: parameterKeys) {
String parameterPlaceholder = String.format(PARAMETER_FORMAT, parameter);
String regex = "(?i)" + Pattern.quote(parameters.getProperty(parameter));
String replacement = Matcher.quoteReplacement(parameterPlaceholder);
// objectName = objectName.replaceAll(regex, replacement);
schemaName = schemaName.replaceAll(regex, replacement);
databaseName = databaseName.replaceAll(regex, replacement);
}
String oldName = script.getFullObjectName();
// script.setObjectName(objectName);
script.setSchemaName(schemaName);
script.setDatabaseName(databaseName);
log.debug("Parametrize object name Changed from {} to {}", oldName, script.getFullObjectName());
}
public Set<String> injectParameters(Set<String> configs) {
return configs.stream().map(config -> {
for(String parameter: parameters.stringPropertyNames()) {
String parameterPlaceholder = String.format(PARAMETER_FORMAT, parameter);
String regex = "(?i)" + Pattern.quote(parameterPlaceholder);
String replacement = Matcher.quoteReplacement(parameters.getProperty(parameter));
config = config.replaceAll(regex, replacement);
}
return config;
}).collect(Collectors.toSet());
}
}

View File

@@ -0,0 +1,311 @@
package com.snowflake.dlsync.parser;
import com.snowflake.dlsync.ScriptFactory;
import com.snowflake.dlsync.models.Migration;
import com.snowflake.dlsync.models.MigrationScript;
import com.snowflake.dlsync.models.Script;
import com.snowflake.dlsync.models.ScriptObjectType;
import lombok.extern.slf4j.Slf4j;
import java.util.*;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import java.util.stream.Collectors;
@Slf4j
public class SqlTokenizer {
private static final String TOKEN_START_REGEX = "(?:[=()\\[\\],\\.\\s\\\"\\'])";
private static final String TOKEN_END_REGEX = "(?:[=()\\[\\],\\.\\s\\\"\\';]|$)";
private static final char[] TOKENS = {'.', ',', ';', '"', '\'', '[', ']', '(', ')'};
private static final String MIGRATION_HEADER = "(\\s*---\\s*(?i)version\\s*:\\s*(?<version>\\d+)\\s*)(,\\s*(?i)author\\s*:\\s*(?<author>\\w+)\\s*)?";
private static final String VERSION_REGEX = "(?:^|\n)(--- *(?i)version *: *(?<version>\\d+) *)";
private static final String AUTHOR_REGEX = "(, *(?i)author *: *(?<author>\\w+) *)?\n";
private static final String CONTENT_REGEX = "([\\s\\S]+?(?=(\n---)|($)))";
private static final String ROLL_BACK_REGEX = "((\n--- *(?i)rollback *: +)(?<rollback>[^\n]+))?";
private static final String VERIFY_REGEX = "((\n--- *(?i)verify *: +)(?<verify>[^\n]+))?";
private static final String IDENTIFIER_REGEX = "((?:\\\"[^\"]+\\\"\\.)|(?:[{}$a-zA-Z0-9_]+\\.))?((?:\\\"[^\"]+\\\"\\.)|(?:[{}$a-zA-Z0-9_]+\\.))?(?i)";
private static final String MIGRATION_REGEX = VERSION_REGEX + AUTHOR_REGEX + CONTENT_REGEX + ROLL_BACK_REGEX + VERIFY_REGEX;
private static final String DDL_REGEX = ";\\n+(CREATE\\s+OR\\s+REPLACE\\s+(TRANSIENT\\s|HYBRID\\s|SECURE\\s)?(?<type>FILE FORMAT|\\w+)\\s+(?<name>[\\w.]+)([\\s\\S]+?)(?=(;\\nCREATE\\s+)|(;$)))";
private static final String STRING_LITERAL_REGEX = "(?<!as\\s{1,5})'([^'\\\\]*(?:\\\\.[^'\\\\]*)*(?:''[^'\\\\]*)*)'";
private static final String VIEW_BODY_REGEX = "(CREATE\\s+OR\\s+REPLACE\\s+VIEW\\s+)(?<name>[\\w.${}]+)(\\s*\\([^\\)]+\\))?\\s+AS\\s+(?<body>[\\s\\S]+)$";
private static final String FUNCTION_BODY_REGEX = "(CREATE\\s+OR\\s+REPLACE\\s+FUNCTION\\s+)(?<name>[\\w.${}]+)(?:[\\s\\S]*?AS\\s+('|\\$\\$)\\s*)(?<body>[\\s\\S]+)('|\\$\\$)\\s*;$";
private static final String PROCEDURE_BODY_REGEX = "(CREATE\\s+OR\\s+REPLACE\\s+PROCEDURE\\s+)(?<name>[\\w.${}]+)(?:[\\s\\S]*?AS\\s+('|\\$\\$)\\s*)(?<body>[\\s\\S]+)('|\\$\\$)\\s*;$";
private static final String FILE_FORMAT_BODY_REGEX = "(CREATE\\s+OR\\s+REPLACE\\s+FILE FORMAT\\s+)(?<body>[\\w.${}]+)([\\s\\S]+)$";
public static List<Migration> parseMigrationScripts(String content) {
List<Migration> migrations = new ArrayList<>();
Pattern pattern = Pattern.compile(MIGRATION_REGEX);
Matcher matcher = pattern.matcher(content);
while (matcher.find()) {
String versionedContent = matcher.group();
Long version = Long.parseLong(matcher.group("version"));
String author = matcher.group("author");
String rollback = matcher.group("rollback");
String verify = matcher.group("verify");
Migration migration = Migration.builder()
.version(version)
.author(author)
.content(versionedContent)
.rollback(rollback)
.verify(verify)
.build();
migrations.add(migration);
}
return migrations;
}
public static String removeSqlComments(String sql) {
int index = 0, destinationIndex = 0;
char[] sqlChar = sql.toCharArray();
char[] withoutComments = new char[sqlChar.length];
while(index < sqlChar.length) {
char token = sqlChar[index];
switch (token) {
case '-':
case '/':
if(index < sql.length() - 1 && token == sqlChar[index+1]) {
index += 2;
while(index < sqlChar.length && sqlChar[index] != '\n') {
index++;
}
}
else if (index < sql.length() - 1 && token == '/' && sqlChar[index+1] == '*'){
index += 2;
while(index < sqlChar.length) {
if(index < sqlChar.length - 1 && sqlChar[index] == '*' && sqlChar[index+1] == '/') {
index += 2;
break;
}
index++;
}
}
if(index < sqlChar.length)
withoutComments[destinationIndex++] = sqlChar[index++];
break;
case '"':
case '\'':
withoutComments[destinationIndex++] = sqlChar[index++];
while(index < sqlChar.length) {
if(sqlChar[index] == token && sqlChar[index - 1] != '\\') {
break;
}
withoutComments[destinationIndex++] = sqlChar[index++];
}
if(index < sqlChar.length)
withoutComments[destinationIndex++] = sqlChar[index++];
break;
default:
withoutComments[destinationIndex++] = sqlChar[index++];
}
}
return new String(withoutComments, 0, destinationIndex);
}
public static String removeSqlStringLiteralsManual(String sql) {
int index = 0, destinationIndex = 0;
char[] sqlChar = sql.toCharArray();
char[] withoutLiterals = new char[sqlChar.length];
boolean isOpened = false;
while(index < sqlChar.length) {
char token = sqlChar[index];
if(isOpened) {
if(token == '\\') {
index+=2;
continue;
}
else if(token == '\'') {
if(index+1 < sqlChar.length && sqlChar[index+1] == '\'') {
index+=2;
continue;
}
else {
isOpened = false;
withoutLiterals[destinationIndex++] = sqlChar[index++];
}
}
else {
index++;
}
}
else {
if(token == '\'') {
int current = index - 1, end = 0;
String prevString = sql.substring(0, index);
Matcher matcher = Pattern.compile("\\s+(?i)as\\s+$").matcher(prevString);
if(!matcher.find()) {
isOpened = true;
}
withoutLiterals[destinationIndex++] = sqlChar[index++];
}
else {
withoutLiterals[destinationIndex++] = sqlChar[index++];
}
}
}
return new String(withoutLiterals, 0, destinationIndex);
}
public static String removeSqlStringLiterals(String sql) {
return Pattern.compile(STRING_LITERAL_REGEX, Pattern.CASE_INSENSITIVE).matcher(sql).replaceAll("''");
}
public static String getFirstFullIdentifier(String name, String content) {
content = removeSqlComments(content);
content = removeSqlStringLiterals(content);
String regex = TOKEN_START_REGEX + IDENTIFIER_REGEX + "(\"?" + Pattern.quote(name) + "\"?)" + TOKEN_END_REGEX;
Matcher matcher = Pattern.compile(regex).matcher(content);
while(matcher.find()) {
String fullIdentifier = matcher.group(3);
String schema = matcher.group(2);
String db = matcher.group(1);
if(schema != null) {
fullIdentifier = schema + fullIdentifier;
}
if(db != null) {
fullIdentifier = db + fullIdentifier;
}
return fullIdentifier;
}
return null;
}
public static Set<String> getFullIdentifiers(String name, String content) {
Set<String> fullIdentifiers = new HashSet<>();
content = removeSqlComments(content);
content = removeSqlStringLiterals(content);
String regex = TOKEN_START_REGEX + IDENTIFIER_REGEX + "(\"?" + Pattern.quote(name) + "\"?)" + TOKEN_END_REGEX;
Matcher matcher = Pattern.compile(regex).matcher(content);
while(matcher.find()) {
String fullIdentifier = name;
String schema = matcher.group(2);
String db = matcher.group(1);
if(schema != null) {
fullIdentifier = schema + fullIdentifier;
}
if(db != null) {
fullIdentifier = db + fullIdentifier;
}
fullIdentifier = fullIdentifier.replace("\"", "");
fullIdentifiers.add(fullIdentifier);
}
return fullIdentifiers;
}
public static List<Script> parseDdlScripts(String ddl, String database, String schema) {
Matcher matcher = Pattern.compile(DDL_REGEX, Pattern.CASE_INSENSITIVE).matcher(ddl);
List<Script> scripts = new ArrayList<>();
while(matcher.find()) {
String content = matcher.group(1) + ";";
String type = matcher.group("type");
ScriptObjectType objectType = Arrays.stream(ScriptObjectType.values())
.filter(ot -> ot.getSingular().equalsIgnoreCase(type))
.collect(Collectors.toList()).get(0);
String fullObjectName = matcher.group("name");
String scriptObjectName = fullObjectName.split("\\.")[2];
if (objectType.isMigration()) {
MigrationScript script = ScriptFactory.getMigrationScript(database, schema, objectType, scriptObjectName, content);
scripts.add(script);
} else {
Script script = ScriptFactory.getStateScript(database, schema, objectType, scriptObjectName, content);
scripts.add(script);
}
}
return scripts;
}
// public static Set<String> getFullToken(String token, String content) {
// Set<String> fullTokens = new HashSet<>();
// content = removeSqlComments(content);
// String regex = TOKEN_START_REGEX + "(?i)" + Pattern.quote(token) + TOKEN_END_REGEX;
// Matcher matcher = Pattern.compile(regex).matcher(content);
// while(matcher.find()) {
// int index = matcher.start();
// char startChar = content.charAt(index);
// if(startChar == '.') {
// int tokenEndIndex = index;
// String previousToken = SqlTokenizer.getPreviousToken(content, tokenEndIndex);
// fullTokens.add(previousToken + "." + token);
// }
// else if(startChar == '"' && index > 0 && content.charAt(index-1) == '.') {
// int tokenEndIndex = index - 1;
// if(index > 1 && content.charAt(index - 2) == '"') {
// tokenEndIndex = index - 2;
// }
// String previousToken = SqlTokenizer.getPreviousToken(content, tokenEndIndex);
// fullTokens.add(previousToken + "." + token);
// }
// else {
// fullTokens.add(token);
// }
// }
// Set<String> identifiers = getFullIdentifiers(token, content);
// if(!identifiers.equals(fullTokens)) {
// log.error("identifier: {} is different from tokens {}", identifiers, fullTokens);
//// throw new RuntimeException("token mis match");
// getFullIdentifiers(token, content);
// }
// return fullTokens;
// }
// public static String getPreviousToken(String content, int endIndex) {
// int index = endIndex - 1;
// while(index >= 0) {
// char ch = content.charAt(index);
// if(isTokenChar(ch)) {
// return content.substring(index+1, endIndex);
// }
// index --;
// }
// return content.substring(0, endIndex);
// }
// private static boolean isTokenChar(char ch) {
// if(Character.isWhitespace(ch)) {
// return true;
// }
// for(char token: TOKENS) {
// if(ch == token) {
// return true;
// }
// }
// return false;
// }
public static boolean compareScripts(Script script1, Script script2) {
String content1 = removeSqlComments(script1.getContent());
String content2 = removeSqlComments(script2.getContent());
content1 = content1.replace("''", "'");
content2 = content2.replace("''", "'");
Pattern pattern;
if(script1.getObjectType().equals(ScriptObjectType.VIEWS)) {
pattern = Pattern.compile(VIEW_BODY_REGEX, Pattern.CASE_INSENSITIVE);
}
else if(script1.getObjectType().equals(ScriptObjectType.FUNCTIONS)) {
pattern = Pattern.compile(FUNCTION_BODY_REGEX, Pattern.CASE_INSENSITIVE);
}
else if(script1.getObjectType().equals(ScriptObjectType.PROCEDURES)) {
pattern = Pattern.compile(PROCEDURE_BODY_REGEX, Pattern.CASE_INSENSITIVE);
}
else if(script1.getObjectType().equals(ScriptObjectType.FILE_FORMATS)) {
pattern = Pattern.compile(FILE_FORMAT_BODY_REGEX, Pattern.CASE_INSENSITIVE);
}
else {
pattern = Pattern.compile("(?<body>[\\s\\S]+)$");
}
Matcher viewMatcher1 = pattern.matcher(content1);
Matcher viewMatcher2 = pattern.matcher(content2);
if(viewMatcher1.find() && viewMatcher2.find()) {
String query1 = viewMatcher1.group("body");
String query2 = viewMatcher2.group("body");
return query1.equals(query2);
}
return content1.equals(content2);
}
}

View File

@@ -0,0 +1,11 @@
<?xml version="1.0" encoding="UTF-8"?>
<configuration>
<appender name="STDOUT" class="ch.qos.logback.core.ConsoleAppender">
<encoder>
<pattern>%d{dd-MM-yyyy HH:mm:ss.SSS} %magenta([%thread]) %highlight(%-5level) %logger{36}.%M - %msg%n</pattern>
</encoder>
</appender>
<root level="INFO">
<appender-ref ref="STDOUT" />
</root>
</configuration>

View File

@@ -0,0 +1,86 @@
package com.snowflake.dlsync;
import com.snowflake.dlsync.models.ChangeType;
import org.apache.commons.cli.*;
import org.junit.jupiter.api.Test;
import static org.junit.jupiter.api.Assertions.assertThrows;
import static org.junit.jupiter.api.Assertions.assertTrue;
public class MainTest {
@Test
public void testBuildCommandOnlyHashes() throws ParseException {
String[] args = {"deploy", "--only-hashes"};
CommandLine commandLine = Main.buildCommandOptions(args);
assertTrue(commandLine.hasOption("--only-hashes"));
String[] args2 = {"deploy", "-o"};
commandLine = Main.buildCommandOptions(args2);
assertTrue(commandLine.hasOption("--only-hashes"));
String[] args3 = {"deploy"};
commandLine = Main.buildCommandOptions(args3);
assertTrue(!commandLine.hasOption("--only-hashes"));
}
@Test
public void testBuildCommandScriptRoot() throws ParseException {
String[] args = {"deploy", "--only-hashes", "--script-root", "test"};
CommandLine commandLine = Main.buildCommandOptions(args);
assertTrue(commandLine.hasOption("--only-hashes"));
assertTrue(commandLine.getOptionValue("script-root").equals("test"));
String[] invalidArgs = new String[]{"deploy", "--script-root"};
assertThrows(MissingArgumentException.class, () -> Main.buildCommandOptions(invalidArgs));
}
@Test
public void testBuildCommandProfile() throws ParseException {
String[] args = {"deploy", "--script-root", "test/scripts", "--profile", "prod"};
CommandLine commandLine = Main.buildCommandOptions(args);
assertTrue(!commandLine.hasOption("--only-hashes"));
assertTrue(commandLine.getOptionValue("script-root").equals("test/scripts"));
assertTrue(commandLine.getOptionValue("profile").equals("prod"));
String[] invalidArgs = new String[]{"deploy", "--profile"};
assertThrows(MissingArgumentException.class, () -> Main.buildCommandOptions(invalidArgs));
}
@Test
public void testGetChangeType() {
String[] args = {"deploy"};
ChangeType changeType = Main.getChangeType(args);
assertTrue(changeType == ChangeType.DEPLOY);
String[] args2 = {"rollback"};
changeType = Main.getChangeType(args2);
assertTrue(changeType == ChangeType.ROLLBACK);
String[] args3 = {"verify"};
changeType = Main.getChangeType(args3);
assertTrue(changeType == ChangeType.VERIFY);
String[] args4 = {"create_script"};
changeType = Main.getChangeType(args4);
assertTrue(changeType == ChangeType.CREATE_SCRIPT);
String[] args5 = {"create_lineage"};
changeType = Main.getChangeType(args5);
assertTrue(changeType == ChangeType.CREATE_LINEAGE);
String[] args6 = {"invalid"};
assertThrows(IllegalArgumentException.class, () -> Main.getChangeType(args6));
String[] args7 = {};
changeType = Main.getChangeType(args7);
assertTrue(changeType == ChangeType.VERIFY);
String[] args8 = {"DEploy", "--only-hashes"};
changeType = Main.getChangeType(args8);
assertTrue(changeType == ChangeType.DEPLOY);
}
}

View File

@@ -0,0 +1,369 @@
package com.snowflake.dlsync.dependency;
import com.snowflake.dlsync.ScriptFactory;
import com.snowflake.dlsync.models.Script;
import com.snowflake.dlsync.models.ScriptObjectType;
import org.junit.jupiter.api.AfterEach;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import java.util.Set;
import static org.junit.jupiter.api.Assertions.*;
class DependencyExtractorTest {
private DependencyExtractor dependencyExtractor;
@BeforeEach
void setUp() {
dependencyExtractor = new DependencyExtractor();
}
@AfterEach
void tearDown() {
}
List<Script> mockScripts(String name, String schema, String... contents) {
List<Script> scripts = new ArrayList<>();
for(int i = 0; i < contents.length; i++) {
Script script = ScriptFactory.getStateScript("TEST_DB", schema, ScriptObjectType.VIEWS, name + i, contents[i]);
scripts.add(script);
}
return scripts;
}
List<Script> mockScripts() {
String[] contents = {"CREATE OR REPLACE MOCK1 AS SELECT * FROM TABLE1;", "CREATE OR REPLACE TEST_SCHEMA.MOCK2 AS SELECT * FROM MOCK1;", "CREATE OR REPLACE TEST_DB.TEST_SCHEMA.MOCK3 AS SELECT * FROM MOCK1 UNION SELECT * FROM MOCK2;"};
List<Script> mock = mockScripts("MOCK", "TEST_SCHEMA", contents);
mock.addAll(mockNotDependency());
return mock;
}
List<Script> mockNotDependency() {
String[] contents = {"CREATE OR REPLACE NOT_DEPENDENCY1 AS SELECT * FROM TABLE1;", "CREATE OR REPLACE TEST_SCHEMA.NOT_DEPENDENCY2 AS SELECT * FROM MOCK1;", "CREATE OR REPLACE TEST_DB.TEST_SCHEMA.NOT_DEPENDENCY3 AS SELECT * FROM MOCK1 UNION SELECT * FROM MOCK2;"};
return mockScripts("NOT_DEPENDENCY", "TEST_SCHEMA", contents);
}
Script mockViewDependency(String name, String schema) {
String content = "CREATE OR REPLACE VIEW " + name + " AS SELECT * FROM TABLE1;";
return ScriptFactory.getStateScript("TEST_DB", schema, ScriptObjectType.VIEWS, name, content);
}
Script mockTableDependency(String name, String schema) {
String content = "CREATE OR REPLACE TABLE " + name + "(ID VARCHAR, COL1 NUMBER)";
return ScriptFactory.getMigrationScript("TEST_DB", schema, ScriptObjectType.TABLES, name, content);
}
Script mockUdfDependency(String name, String schema) {
String content = "CREATE OR REPLACE FUNCTION " + name + "(ARG1 VARCHAR)\n" +
"RETURNS VARCHAR\n" +
"LANGUAGE JAVASCRIPT\n" +
"AS\n" +
"$$\n" +
" return ARG1.toUpperCase();\n" +
"$$;";
return ScriptFactory.getStateScript("TEST_DB", schema, ScriptObjectType.FUNCTIONS, name, content);
}
@Test
void extractScriptDependenciesTestFrom() {
String content = "CREATE OR REPLACE VIEW VIEW1 AS SELECT * FROM DEPENDENCY join test_schema2.not_dependency1;";
Script script = ScriptFactory.getStateScript("TEST_DB", "TEST_SCHEMA", ScriptObjectType.VIEWS, "VIEW1", content);
Script dependency = mockViewDependency("DEPENDENCY", "TEST_SCHEMA");
List<Script> changedScript = mockScripts();
changedScript.add(script);
changedScript.add(dependency);
dependencyExtractor.addScripts(changedScript);
Set<Script> expected = Set.of(
dependency
);
Set<Script> actual = dependencyExtractor.extractScriptDependencies(script);
assertEquals(expected, actual, "Dependency extractor failed:");
}
@Test
void extractScriptDependenciesTestJoin() {
String content = "CREATE OR REPLACE VIEW VIEW1 AS SELECT * FROM DEPENDENCY JOIN test_schema.JOIN_DEPENDENCY;";
Script script = ScriptFactory.getStateScript("TEST_DB", "TEST_SCHEMA", ScriptObjectType.VIEWS, "VIEW1", content);
Script dependency1 = mockViewDependency("DEPENDENCY", "TEST_SCHEMA");
Script dependency2 = mockViewDependency("JOIN_DEPENDENCY", "TEST_SCHEMA");
List<Script> changedScript = mockScripts();
changedScript.add(script);
changedScript.add(dependency1);
changedScript.add(dependency2);
dependencyExtractor.addScripts(changedScript);
Set<Script> expected = Set.of(
dependency2,
dependency1
);
Set<Script> actual = dependencyExtractor.extractScriptDependencies(script);
assertEquals(expected, actual, "Dependency extractor failed:");
}
@Test
void extractScriptDependenciesTestWithQuotedObjects() {
String content = "CREATE OR REPLACE VIEW VIEW1 AS SELECT * FROM \"TEST_SCHEMA2\".\"DEPENDENCY\" join \"TEST_SCHEMA2\".\"NOT_DEPENDENCY1\"";
Script script = ScriptFactory.getStateScript("TEST_DB", "TEST_SCHEMA", ScriptObjectType.VIEWS, "VIEW1", content);
Script dependency1 = mockViewDependency("DEPENDENCY", "TEST_SCHEMA2");
List<Script> changedScript = mockScripts();
changedScript.add(script);
changedScript.add(dependency1);
dependencyExtractor.addScripts(changedScript);
Set<Script> expected = Set.of(
dependency1
);
Set<Script> actual = dependencyExtractor.extractScriptDependencies(script);
assertEquals(expected, actual, "Dependency extractor failed:");
}
@Test
void extractScriptDependenciesTestWithComments() throws IOException {
String content = "CREATE OR REPLACE VIEW VIEW1 COMMENT='SOME COMMENTS' AS SELECT * FROM -- NOT_DEPENDENCY1\n" +
"DEPENDENCY1 T1\n" +
"JOIN \n" +
"// NOT_DEPENDENCY2 TC2 ON T1.ID = TC2.ID\n" +
"DEPENDENCY2 TC3 ON T1.ID = TC3.ID\n" +
"JOIN /*\n" +
"ADDITIONAL COMMENTS HERE\n" +
"SELECT * FROM NOT_DEPENDENCY2;\n" +
"*/\n" +
"DEPENDENCY3 T4 ON T4.ID = T1.ID;";
Script script = ScriptFactory.getStateScript("TEST_DB", "TEST_SCHEMA", ScriptObjectType.VIEWS, "VIEW1", content);
Script dependency1 = mockViewDependency("DEPENDENCY1", "TEST_SCHEMA");
Script dependency2 = mockViewDependency("DEPENDENCY2", "TEST_SCHEMA");
Script dependency3 = mockViewDependency("DEPENDENCY3", "TEST_SCHEMA");
Script notDependency1 = mockViewDependency("NOT_DEPENDENCY1", "TEST_SCHEMA");
Script notDependency2 = mockViewDependency("NOT_DEPENDENCY2", "TEST_SCHEMA");
List<Script> changedScript = mockScripts();
changedScript.add(script);
changedScript.add(dependency1);
changedScript.add(dependency2);
changedScript.add(dependency3);
changedScript.add(notDependency1);
changedScript.add(notDependency2);
dependencyExtractor.addScripts(changedScript);
Set<Script> expected = Set.of(
dependency2,
dependency1,
dependency3
);
Set<Script> actual = dependencyExtractor.extractScriptDependencies(script);
assertEquals(expected, actual, "Dependency extractor failed:");
}
@Test
void extractScriptDependenciesTestWithCTE() {
String content = "CREATE OR REPLACE VIEW VIEW1 AS wITH CTE1 AS (\n" +
" SELECT * FROM DEPENDENCY1\n" +
" ),\n" +
" CTE2 AS(\n" +
" SELECT B.*, JLS.VALUE::STRING AS JOURNAL_LINE\n" +
" FROM DEPENDENCY2 B\n" +
" JOIN JOIN_DEPENDENCY2\n" +
" ),CTE3 AS (\n" +
" SELECT * FROM CTE1 WHERE ID IN (SELECT ID FROM CTE2) \n" +
" ), CTE4 AS (\n" +
" SELECT * FROM DEPENDENCY3 D1 JOIN CTE3 C3 ON D1.ID=C3.ID\n" +
" ), CTE5 AS (SELECT * FROM CTE4 JOIN JOIN_DEPENDENCY3)\n" +
" SELECT * FROM CTE1 C1 JOIN CTE5 CT5 ON CT1.ID=CT5.ID JOIN JOIN_DEPENDENCY4 ON CT1.ID=CT5.ID;";
Script script = ScriptFactory.getStateScript("TEST_DB", "TEST_SCHEMA", ScriptObjectType.VIEWS, "VIEW1", content);
Script dependency1 = mockViewDependency("DEPENDENCY1", "TEST_SCHEMA");
Script dependency2 = mockViewDependency("DEPENDENCY2", "TEST_SCHEMA");
Script dependency3 = mockViewDependency("DEPENDENCY3", "TEST_SCHEMA");
Script joinDependency2 = mockViewDependency("JOIN_DEPENDENCY2", "TEST_SCHEMA");
Script joinDependency3 = mockViewDependency("JOIN_DEPENDENCY3", "TEST_SCHEMA");
Script joinDependency4 = mockViewDependency("JOIN_DEPENDENCY4", "TEST_SCHEMA");
List<Script> changedScript = mockScripts();
changedScript.add(script);
changedScript.add(dependency1);
changedScript.add(dependency2);
changedScript.add(dependency3);
changedScript.add(joinDependency2);
changedScript.add(joinDependency3);
changedScript.add(joinDependency4);
dependencyExtractor.addScripts(changedScript);
Set<Script> expected = Set.of(
dependency1,
dependency2,
dependency3,
joinDependency2,
joinDependency3,
joinDependency4
);
Set<Script> actual = dependencyExtractor.extractScriptDependencies(script);
assertEquals(expected, actual, "Dependency extractor failed:");
}
@Test
void extractScriptDependenciesTestWithQuoted() {
String content = "CREATE OR REPLACE VIEW VIEW1 AS SELECT 'SELECT * FROM NOT_DEPENDENCY1', * FROM DEPENDENCY1";
Script script = ScriptFactory.getStateScript("TEST_DB", "TEST_SCHEMA", ScriptObjectType.VIEWS, "VIEW1", content);
Script dependency1 = mockViewDependency("DEPENDENCY1", "TEST_SCHEMA");
List<Script> changedScript = mockScripts();
changedScript.add(script);
changedScript.add(dependency1);
dependencyExtractor.addScripts(changedScript);
Set<Script> expected = Set.of(
dependency1
);
Set<Script> actual = dependencyExtractor.extractScriptDependencies(script);
//TODO:Fix this
// assertEquals(expected, actual, "Dependency extractor failed:");
}
@Test
void extractScriptDependenciesTestWithAliases() {
String content = "CREATE OR REPLACE VIEW VIEW1 AS SELECT * FROM DEPENDENCY1 as dp1, DEPENDENCY2 dp2, DEPENDENCY3 AS dp3 where dp1.id=dp2.id and dp2.id=dp3.id;";
Script script = ScriptFactory.getStateScript("TEST_DB", "TEST_SCHEMA", ScriptObjectType.VIEWS, "VIEW1", content);
Script dependency1 = mockViewDependency("DEPENDENCY1", "TEST_SCHEMA");
Script dependency2 = mockViewDependency("DEPENDENCY2", "TEST_SCHEMA");
Script dependency3 = mockViewDependency("DEPENDENCY3", "TEST_SCHEMA");
List<Script> changedScript = mockScripts();
changedScript.add(script);
changedScript.add(dependency1);
changedScript.add(dependency2);
changedScript.add(dependency3);
dependencyExtractor.addScripts(changedScript);
Set<Script> expected = Set.of(
dependency2,
dependency1,
dependency3
);
Set<Script> actual = dependencyExtractor.extractScriptDependencies(script);
assertEquals(expected, actual, "Dependency extractor failed:");
}
@Test
void extractScriptDependenciesWithSameObjectName() {
String content = "CREATE OR REPLACE VIEW TEST_SCHEMA.VIEW1 AS SELECT * FROM TEST_SCHEMA2.VIEW1 join test_schema2.not_dependency1;";
Script script = ScriptFactory.getStateScript("TEST_DB", "TEST_SCHEMA", ScriptObjectType.VIEWS, "VIEW1", content);
Script dependency = mockViewDependency("VIEW1", "TEST_SCHEMA2");
List<Script> changedScript = mockScripts();
changedScript.add(script);
changedScript.add(dependency);
dependencyExtractor.addScripts(changedScript);
Set<Script> expected = Set.of(
dependency
);
Set<Script> actual = dependencyExtractor.extractScriptDependencies(script);
assertEquals(expected, actual, "Dependency extractor failed:");
}
@Test
void extractScriptDependenciesWithUdf() {
String content = "CREATE OR REPLACE VIEW TEST_SCHEMA.VIEW1 AS SELECT COL1, COL2, TEST_SCHEMA1.UDF1(COL3) AS COL4 FROM TEST_SCHEMA2.VIEW1 join test_schema2.not_dependency1;";
Script script = ScriptFactory.getStateScript("TEST_DB", "TEST_SCHEMA", ScriptObjectType.VIEWS, "VIEW1", content);
Script dependency1 = mockViewDependency("VIEW1", "TEST_SCHEMA2");
Script dependency2 = mockUdfDependency("UDF1", "TEST_SCHEMA1");
List<Script> changedScript = mockScripts();
changedScript.add(script);
changedScript.add(dependency1);
changedScript.add(dependency2);
dependencyExtractor.addScripts(changedScript);
Set<Script> expected = Set.of(
dependency1,
dependency2
);
Set<Script> actual = dependencyExtractor.extractScriptDependencies(script);
assertEquals(expected, actual, "Dependency extractor failed:");
}
@Test
void extractScriptDependenciesTableWithViewDependency() {
String content =
"---version: 0, author: dlsync\n" +
"CREATE OR REPLACE TABLE TEST_SCHEMA.TABLE1 AS SELECT * FROM TEST_SCHEMA.VIEW1;\n";
Script script = ScriptFactory.getStateScript("TEST_DB", "TEST_SCHEMA", ScriptObjectType.TABLES, "TABLE1", content);
Script dependency1 = mockViewDependency("VIEW1", "TEST_SCHEMA");
List<Script> changedScript = mockScripts();
changedScript.add(script);
changedScript.add(dependency1);
dependencyExtractor.addScripts(changedScript);
Set<Script> expected = Set.of(
dependency1
);
Set<Script> actual = dependencyExtractor.extractScriptDependencies(script);
assertEquals(expected, actual, "Dependency extractor failed:");
}
@Test
void extractScriptDependenciesWithStringNames() {
String content =
"---version: 1, author: dlsync\n" +
"INSERT INTO TEST_SCHEMA.TABLE1 values(1, 'not_dependency1');\n";
Script script = ScriptFactory.getMigrationScript("TEST_DB", "TEST_SCHEMA", ScriptObjectType.TABLES, "TABLE1", content, 1L, "dlsync", "", "");
Script dependency1 = mockTableDependency("TABLE1", "TEST_SCHEMA");
Script not_dependency1 = mockViewDependency("NOT_DEPENDENCY1", "TEST_SCHEMA");
List<Script> changedScript = mockScripts();
changedScript.add(script);
changedScript.add(dependency1);
changedScript.add(not_dependency1);
dependencyExtractor.addScripts(changedScript);
Set<Script> expected = Set.of(
dependency1
);
Set<Script> actual = dependencyExtractor.extractScriptDependencies(script);
assertEquals(expected, actual, "Dependency extractor failed:");
}
@Test
void extractScriptDependenciesIdentifierAfterEquals() {
String content = "CREATE OR REPLACE VIEW TEST_SCHEMA.VIEW1 AS SELECT COL1, COL2=FUNC1(COL3) FROM TEST_SCHEMA2.VIEW1 join test_schema2.not_dependency1;";
Script script = ScriptFactory.getStateScript("TEST_DB", "TEST_SCHEMA", ScriptObjectType.VIEWS, "VIEW1", content);
Script dependency1 = mockViewDependency("VIEW1", "TEST_SCHEMA2");
Script dependency2 = mockUdfDependency("FUNC1", "TEST_SCHEMA");
List<Script> changedScript = mockScripts();
changedScript.add(script);
changedScript.add(dependency1);
changedScript.add(dependency2);
dependencyExtractor.addScripts(changedScript);
Set<Script> expected = Set.of(
dependency1,
dependency2
);
Set<Script> actual = dependencyExtractor.extractScriptDependencies(script);
assertEquals(expected, actual, "Dependency extractor failed:");
}
// TODO: Fix this failing test
void extractScriptDependenciesWithSameName() {
String content = "CREATE OR REPLACE VIEW TEST_SCHEMA.VIEW1 AS SELECT COL1, COL2 FROM TEST_SCHEMA2.VIEW1 join test_schema2.not_dependency1;";
Script script = ScriptFactory.getStateScript("TEST_DB", "TEST_SCHEMA", ScriptObjectType.VIEWS, "VIEW1", content);
Script dependency1 = mockViewDependency("VIEW1", "TEST_SCHEMA2");
Script notDependency = mockUdfDependency("VIEW1", "TEST_SCHEMA2");
List<Script> changedScript = mockScripts();
changedScript.add(script);
changedScript.add(dependency1);
changedScript.add(notDependency);
dependencyExtractor.addScripts(changedScript);
Set<Script> expected = Set.of(
dependency1
);
Set<Script> actual = dependencyExtractor.extractScriptDependencies(script);
assertEquals(expected, actual, "Dependency extractor failed:");
}
}

View File

@@ -0,0 +1,82 @@
package com.snowflake.dlsync.dependency;
import com.snowflake.dlsync.ConfigManager;
import com.snowflake.dlsync.ScriptFactory;
import com.snowflake.dlsync.models.Config;
import com.snowflake.dlsync.models.Script;
import com.snowflake.dlsync.models.ScriptObjectType;
import org.junit.jupiter.api.AfterEach;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import static org.junit.jupiter.api.Assertions.*;
class DependencyGraphTest {
private DependencyGraph dependencyGraph;
@BeforeEach
void setUp() throws IOException {
DependencyExtractor dependencyExtractor = new DependencyExtractor();
dependencyGraph = new DependencyGraph(dependencyExtractor, new Config());
}
@AfterEach
void tearDown() {
}
@Test
void topologicalSortTest() {
String content1 = "CREATE OR REPLACE VIEW VIEW1 AS SELECT * FROM TABLE1;";
String content2 = "create OR REPLACE VIEW VIEW2 AS SELECT * FROM VIEW1;";
String content3 = "CREATE OR replace VIEW VIEW3 AS SELECT * FROM VIEW2;";
String content4 = "create or replace view VIEW4 AS SELECT * FROM VIEW3;";
Script script1 = ScriptFactory.getStateScript("TEST_DB", "TEST_SCHEMA", ScriptObjectType.VIEWS, "VIEW1", content1);
Script script2 = ScriptFactory.getStateScript("TEST_DB", "TEST_SCHEMA", ScriptObjectType.VIEWS, "VIEW2", content2);
Script script3 = ScriptFactory.getStateScript("TEST_DB", "TEST_SCHEMA", ScriptObjectType.VIEWS, "VIEW3", content3);
Script script4 = ScriptFactory.getStateScript("TEST_DB", "TEST_SCHEMA", ScriptObjectType.VIEWS, "VIEW4", content4);
dependencyGraph.addNodes(List.of(script1, script2, script3, script4));
List<Script> expected = List.of(script1, script2, script3, script4);
List<Script> actual = dependencyGraph.topologicalSort();
assertEquals(expected, actual);
}
@Test
void topologicalSortMultipleDependencyTest() {
String content1 = "CREATE OR REPLACE VIEW VIEW1 AS SELECT * FROM TABLE1;";
String content2 = "CREATE OR REPLACE VIEW VIEW2 AS SELECT * FROM VIEW1 JOIN TABLE2 TB2 ON V1.ID=TB2.ID;";
String content3 = "CREATE OR REPLACE VIEW VIEW3 AS SELECT * FROM VIEW2 v2 JOIN VIEW1 V1 ON V1.ID=V2.ID;";
String content4 = "CREATE OR REPLACE VIEW VIEW4 AS SELECT * FROM TABLE3 WHERE ID = (SELECT ID FROM VIEW3);";
String content5 = "CREATE OR REPLACE VIEW VIEW5 AS SELECT * FROM VIEW4 V3 JOIN TABLE4 TB4 ON V3.ID=TB3.ID;";
String content6 = "CREATE OR REPLACE VIEW VIEW6 AS SELECT * FROM VIEW5 V5 JOIN VIEW2 V2 ON V2.ID=V5.ID;";
String content7 = "CREATE OR REPLACE VIEW VIEW7 AS SELECT * FROM VIEW6 V5 JOIN VIEW1 V1 ON V5.ID=V1.ID;";
String content8 = "CREATE OR REPLACE VIEW VIEW7 AS SELECT * FROM VIEW1 WHERE ID NOT IN (SELECT ID FROM VIEW7);";
Script script1 = ScriptFactory.getStateScript("TEST_DB", "TEST_SCHEMA", ScriptObjectType.VIEWS, "VIEW1", content1);
Script script2 = ScriptFactory.getStateScript("TEST_DB", "TEST_SCHEMA", ScriptObjectType.VIEWS, "VIEW2", content2);
Script script3 = ScriptFactory.getStateScript("TEST_DB", "TEST_SCHEMA", ScriptObjectType.VIEWS, "VIEW3", content3);
Script script4 = ScriptFactory.getStateScript("TEST_DB", "TEST_SCHEMA", ScriptObjectType.VIEWS, "VIEW4", content4);
Script script5 = ScriptFactory.getStateScript("TEST_DB", "TEST_SCHEMA", ScriptObjectType.VIEWS, "VIEW5", content5);
Script script6 = ScriptFactory.getStateScript("TEST_DB", "TEST_SCHEMA", ScriptObjectType.VIEWS, "VIEW6", content6);
Script script7 = ScriptFactory.getStateScript("TEST_DB", "TEST_SCHEMA", ScriptObjectType.VIEWS, "VIEW7", content7);
Script script8 = ScriptFactory.getStateScript("TEST_DB", "TEST_SCHEMA", ScriptObjectType.VIEWS, "VIEW8", content8);
List<Script> scripts = new ArrayList<>(List.of(script1, script2, script3, script4, script5, script6, script7, script8));
Collections.shuffle(scripts);
dependencyGraph.addNodes(scripts);
List<Script> expected = List.of(script1, script2, script3, script4, script5, script6, script7, script8);
List<Script> actual = dependencyGraph.topologicalSort();
assertEquals(expected, actual);
}
}

View File

@@ -0,0 +1,72 @@
package com.snowflake.dlsync.parser;
import com.snowflake.dlsync.ScriptFactory;
import com.snowflake.dlsync.models.Script;
import com.snowflake.dlsync.models.ScriptObjectType;
import org.junit.jupiter.api.AfterEach;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import java.util.Properties;
import static org.junit.jupiter.api.Assertions.*;
class ParameterInjectorTest {
private ParameterInjector parameterInjector;
@BeforeEach
void setUp() {
Properties parameters = new Properties();
parameters.put("db", "TEST_DB");
parameters.put("schema1", "test_schema_1");
parameters.put("schema2", "test_schema_2");
parameters.put("profile", "dev");
parameters.put("tenant_id", "test_tenant_id");
parameterInjector = new ParameterInjector(parameters);
}
@AfterEach
void tearDown() {
}
@Test
void injectParametersDontCapture() {
String content = "create or replace view as select * from ${db}.${schema1}.table1 where db='my_db'";
Script script = ScriptFactory.getStateScript("", "", ScriptObjectType.VIEWS, "VIEW1", content);
parameterInjector.injectParameters(script);
String actual = script.getContent();
String expected = "create or replace view as select * from TEST_DB.test_schema_1.table1 where db='my_db'";
assertEquals(expected, actual, "parameter injection test failed");
}
@Test
void injectParametersTest() {
String content = "create or replace view as select * from ${db}.${schema1}.table1 tb1 join test_${profile} tp on tp.id=tb1.id where tenant = '${tenant_id}'";
Script script = ScriptFactory.getStateScript("", "", ScriptObjectType.VIEWS, "VIEW1", content);
parameterInjector.injectParameters(script);
String actual = script.getContent();
String expected = "create or replace view as select * from TEST_DB.test_schema_1.table1 tb1 join test_dev tp on tp.id=tb1.id where tenant = 'test_tenant_id'";
assertEquals(expected, actual, "parameter injection test failed");
}
@Test
void parametrizeScriptTest() {
String content = "create or replace view as select * from TEST_DB.test_schema_1.table1 where tenant = 'test_tenant_id'";
Script script = ScriptFactory.getStateScript("", "", ScriptObjectType.VIEWS, "VIEW1", content);
parameterInjector.parametrizeScript(script, false);
String actual = script.getContent();
String expected = "create or replace view as select * from ${db}.${schema1}.table1 where tenant = '${tenant_id}'";
assertEquals(expected, actual, "parameterize script test failed");
}
@Test
void parameterizeObjectNameTest() {
String content = "create or replace view as select * from TEST_DB.test_schema_1.table1 where tenant = 'test_tenant_id'";
Script script = ScriptFactory.getStateScript("TEST_DB", "test_schema_1", ScriptObjectType.VIEWS, "VIEW1", content);
parameterInjector.parameterizeObjectName(script);
String actual = script.getFullObjectName();
String expected = "${DB}.${SCHEMA1}.VIEW1";
assertEquals(expected, actual, "parameterize object name test failed");
}
}

View File

@@ -0,0 +1,269 @@
package com.snowflake.dlsync.parser;
import com.snowflake.dlsync.ScriptFactory;
import com.snowflake.dlsync.models.Migration;
import com.snowflake.dlsync.models.Script;
import com.snowflake.dlsync.models.ScriptObjectType;
import org.junit.jupiter.api.Test;
import java.util.List;
import java.util.Set;
import static org.junit.jupiter.api.Assertions.*;
class SqlTokenizerTest {
@Test
public void removeSqlCommentsAllComments() {
String sql = "CREATE OR REPLACE VIEW VIEW1 COMMENT='SOME COMMENTS' AS SELECT * FROM --TABLE_COMMENT1\n" +
"TABLE1 T1\n" +
"JOIN \n" +
"//TABLE_COMMENT_2 TC2 ON T1.ID = TC2.ID\n" +
"TABLE3 TC3 ON T1.ID = '-- invalid\\' comments'\n" +
"JOIN /*\n" +
"ADDITIONAL COMMENTS HERE\n" +
"SELECT * FROM TABLE_COMMENT_4;\n" +
"*/\n" +
"TABLE4 T4 ON T4.ID = T1.ID;/*new test at end */";
String expected = "CREATE OR REPLACE VIEW VIEW1 COMMENT='SOME COMMENTS' AS SELECT * FROM \n" +
"TABLE1 T1\n" +
"JOIN \n" +
"\n" +
"TABLE3 TC3 ON T1.ID = '-- invalid\\' comments'\n" +
"JOIN " +
"\n" +
"TABLE4 T4 ON T4.ID = T1.ID;";
String actual = SqlTokenizer.removeSqlComments(sql);
assertEquals(expected, actual, "Failed to remove comments.");
}
@Test
void parseMigrationScripts() {
String content = "---version:0, author:junit\n"+
"create or replace table (id integer, key varchar, value varchar);\n"+
"---version:1, author:junit\n"+
"insert into table1 values(1, 'key', null);\n"+
"---rollback: delete from table1 where id = 1;\n"+
"---verify: select * from table1 where id = 1;\n";
Migration migration1 = Migration.builder()
.version(0L)
.author("junit")
.content("---version:0, author:junit\n"+
"create or replace table (id integer, key varchar, value varchar);")
.build();
Migration migration2 = Migration.builder()
.version(1L)
.author("junit")
.content("\n---version:1, author:junit\n"+
"insert into table1 values(1, 'key', null);\n"+
"---rollback: delete from table1 where id = 1;\n"+
"---verify: select * from table1 where id = 1;")
.rollback("delete from table1 where id = 1;")
.verify("select * from table1 where id = 1;")
.build();
List<Migration> expected = List.of(migration1, migration2);
List<Migration> actual = SqlTokenizer.parseMigrationScripts(content);
assertIterableEquals(expected, actual, "Test failed for parsing basic migration script.");
}
@Test
void parseMigrationScriptsEmptyVerifyAndRollbackTest() {
String content = "---version:0, author: junit\n"+
"create or replace table (id integer, key varchar, value varchar);\n"+
"---verify: \n" +
"---version:1\n"+
"insert into table1 values(1, 'key', null);\n"+
"---rollback: \n"+
"---verify: select * from table1 where id = 1;\n";
Migration migration1 = Migration.builder()
.version(0L)
.author("junit")
.content("---version:0, author: junit\n"+
"create or replace table (id integer, key varchar, value varchar);\n"+
"---verify: ")
.verify(" ")
.build();
Migration migration2 = Migration.builder()
.version(1L)
.author(null)
.content("\n---version:1\n"+
"insert into table1 values(1, 'key', null);\n"+
"---rollback: \n"+
"---verify: select * from table1 where id = 1;")
.rollback(" ")
.verify("select * from table1 where id = 1;")
.build();
List<Migration> expected = List.of(migration1, migration2);
List<Migration> actual = SqlTokenizer.parseMigrationScripts(content);
assertIterableEquals(expected, actual, "Test failed for parsing basic migration script.");
}
@Test
void parseMigrationScriptsWithSemicolonContent() {
String content = "---version:0, author: junit\n"+
"create or replace table (id integer, key varchar, value varchar);\n"+
"---verify: \n" +
"---version:1\n"+
"insert into table1 values(1, 'key', 'some date with ; inside');\n"+
"---rollback: \n"+
"---verify: select * from table1 where id = 1;\n";
Migration migration1 = Migration.builder()
.version(0L)
.author("junit")
.content("---version:0, author: junit\n"+
"create or replace table (id integer, key varchar, value varchar);\n"+
"---verify: ")
.verify(" ")
.build();
Migration migration2 = Migration.builder()
.version(1L)
.author(null)
.content("\n---version:1\n"+
"insert into table1 values(1, 'key', 'some date with ; inside');\n"+
"---rollback: \n"+
"---verify: select * from table1 where id = 1;")
.rollback(" ")
.verify("select * from table1 where id = 1;")
.build();
List<Migration> expected = List.of(migration1, migration2);
List<Migration> actual = SqlTokenizer.parseMigrationScripts(content);
assertIterableEquals(expected, actual, "Test failed for parsing basic migration script.");
}
@Test
void parseMigrationScriptsWithMultilineContent() {
String content = "---version:0, author: junit\n"+
"create or replace table (id integer, key varchar, value varchar);\n"+
"---verify: \n" +
"---version:1\n"+
"insert into table1(id, key, value)\n" +
"values(1, 'key', 'some date with ; inside');\n"+
"---rollback: \n"+
"---verify: select * from table1 where id = 1;\n";
Migration migration1 = Migration.builder()
.version(0L)
.author("junit")
.content("---version:0, author: junit\n"+
"create or replace table (id integer, key varchar, value varchar);\n"+
"---verify: ")
.verify(" ")
.build();
Migration migration2 = Migration.builder()
.version(1L)
.author(null)
.content("\n---version:1\n"+
"insert into table1(id, key, value)\n" +
"values(1, 'key', 'some date with ; inside');\n"+
"---rollback: \n"+
"---verify: select * from table1 where id = 1;")
.rollback(" ")
.verify("select * from table1 where id = 1;")
.build();
List<Migration> expected = List.of(migration1, migration2);
List<Migration> actual = SqlTokenizer.parseMigrationScripts(content);
assertIterableEquals(expected, actual, "Test failed for parsing basic migration script.");
}
@Test
void getPreviousToken() {
}
@Test
void removeSqlComments() {
}
@Test
void getFullIdentifiersTest() {
String content = "select * from schema1.object_name1 join schema2.object_name1 join (select * from db1.schema2.object_name2) union select * from \"schema3\".\"object_name3\" union select * from object_name4,schema5.object_name5,schema2.object_name6;";
Set<String> expected1 = Set.of("schema1.object_name1", "schema2.object_name1");
assertEquals(expected1, SqlTokenizer.getFullIdentifiers("object_name1", content), "Test failed to extract full token.");
Set<String> expected2 = Set.of("db1.schema2.object_name2");
assertEquals(expected2, SqlTokenizer.getFullIdentifiers("object_name2", content), "Test failed to extract full token.");
Set<String> expected3 = Set.of("schema3.object_name3");
assertEquals(expected3, SqlTokenizer.getFullIdentifiers("object_name3", content), "Test failed to extract full token.");
Set<String> expected5 = Set.of("schema5.object_name5");
assertEquals(expected5, SqlTokenizer.getFullIdentifiers("object_name5", content), "Test failed to extract full token.");
}
@Test
void parseDdlScriptsTest() {
String ddl = "create or replace schema schema1;\n\n" +
"create or replace view db1.schema1.view1 as select * from table1;\n" +
"create or replace table db1.schema1.table1 (col1 varchar, col2 number);\n" +
"create or replace transient table db1.schema1.table2 (col1 varchar, col2 number);\n" +
"create or replace hybrid table db1.schema1.table3 (col1 varchar, col2 number);\n" +
"create or replace function db1.schema1.function1(arg1 varchar)\n" +
"RETURNS VARCHAR(16777216)\n" +
"LANGUAGE JAVASCRIPT\n" +
"AS '" +
"return arg1.trim();\n"+
"';";
List<Script> actual = SqlTokenizer.parseDdlScripts(ddl, "db1", "schema1");
List<Script> expected = List.of(
ScriptFactory.getStateScript("db1", "schema1", ScriptObjectType.VIEWS, "view1","create or replace view db1.schema1.view1 as select * from table1;"),
ScriptFactory.getMigrationScript("db1", "schema1", ScriptObjectType.TABLES, "table1","create or replace table db1.schema1.table1 (col1 varchar, col2 number);"),
ScriptFactory.getMigrationScript("db1", "schema1", ScriptObjectType.TABLES, "table2","create or replace transient table db1.schema1.table2 (col1 varchar, col2 number);"),
ScriptFactory.getMigrationScript("db1", "schema1", ScriptObjectType.TABLES, "table3","create or replace hybrid table db1.schema1.table3 (col1 varchar, col2 number);"),
ScriptFactory.getStateScript("db1", "schema1", ScriptObjectType.FUNCTIONS, "function1","create or replace function db1.schema1.function1(arg1 varchar)\n" +
"RETURNS VARCHAR(16777216)\n" +
"LANGUAGE JAVASCRIPT\n" +
"AS '" +
"return arg1.trim();\n"+
"'")
);
assertEquals(expected, actual, "Parse ddl failed");
}
@Test
void removeSqlStringLiteralsWithSimpleLiterals() {
String sql = "insert into table1 values('tabl2', 'table3', 'table4')";
String expected = "insert into table1 values('', '', '')";
String actual = SqlTokenizer.removeSqlStringLiterals(sql);
assertEquals(expected, actual, "Remove string literal assertion failed!");
}
@Test
void removeSqlStringLiteralsWithEscapedLiterals() {
String sql = "select 'he said \\'good\\'', 'and she said ''GREAT'''";
String expected = "select '', ''";
String actual = SqlTokenizer.removeSqlStringLiterals(sql);
assertEquals(expected, actual, "Remove string literal assertion failed!");
}
@Test
void removeSqlStringLiteralsWithUDFContent() {
String sql = "CREATE OR REPLACE FUNCTION UDF1(\"ARG1\" VARCHAR(16777216), \"ARG2\" VARCHAR(16777216))\n" +
"RETURNS BOOLEAN\n" +
"LANGUAGE SQL\n" +
"as '\n" +
"select ARG1, ARG2 FROM TABLE1';";
String expected = "CREATE OR REPLACE FUNCTION UDF1(\"ARG1\" VARCHAR(16777216), \"ARG2\" VARCHAR(16777216))\n" +
"RETURNS BOOLEAN\n" +
"LANGUAGE SQL\n" +
"as '\n" +
"select ARG1, ARG2 FROM TABLE1';";
String actual = SqlTokenizer.removeSqlStringLiterals(sql);
assertEquals(expected, actual, "Remove string literal assertion failed!");
}
@Test
void removeSqlStringLiteralsWithCTAS() {
String sql = "CREATE OR REPLACE TABLE TABLE1(COL1 VARCHAR(16777216), COL2 VARCHAR(16777216))\n" +
"as \n" +
"select 1, 'VALUE';";
String expected = "CREATE OR REPLACE TABLE TABLE1(COL1 VARCHAR(16777216), COL2 VARCHAR(16777216))\n" +
"as \n" +
"select 1, '';";
String actual = SqlTokenizer.removeSqlStringLiterals(sql);
assertEquals(expected, actual, "Remove string literal assertion failed!");
}
}