mirror of
https://github.com/sqlfluff/sqlfluff
synced 2025-12-17 19:31:32 +00:00
Compare commits
767 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
7783c3014e | ||
|
|
8e26bd4ff7 | ||
|
|
787b58e58d | ||
|
|
65ef3d0ec3 | ||
|
|
ed9d0c98ad | ||
|
|
a4658769b3 | ||
|
|
190c4946f5 | ||
|
|
4dc390f077 | ||
|
|
80f4fc2d3b | ||
|
|
50a1c4b6ff | ||
|
|
391449eb47 | ||
|
|
79b168374b | ||
|
|
171c71e6e3 | ||
|
|
0e333c3246 | ||
|
|
02bca484a6 | ||
|
|
5cb916ac74 | ||
|
|
e864e174ae | ||
|
|
c6b39aae57 | ||
|
|
b9bcb9bc71 | ||
|
|
fb1be11571 | ||
|
|
cbcbe52676 | ||
|
|
f0602da8df | ||
|
|
b5ef93c06e | ||
|
|
2106716132 | ||
|
|
ffc7af43c0 | ||
|
|
51e8cf43c2 | ||
|
|
6b8faf86fc | ||
|
|
8422eb7826 | ||
|
|
e63c337b17 | ||
|
|
b8d9c2b0a5 | ||
|
|
c039d2f28d | ||
|
|
48fcb75967 | ||
|
|
1db76206c7 | ||
|
|
a2f773d9f4 | ||
|
|
e12c5d0100 | ||
|
|
36ad37b2a3 | ||
|
|
baa32b8836 | ||
|
|
c83c73535e | ||
|
|
8c63de4277 | ||
|
|
052019e206 | ||
|
|
62ee9d6abb | ||
|
|
bc2da1a54c | ||
|
|
34f51497bc | ||
|
|
9f6a6c3b7c | ||
|
|
069be3a01d | ||
|
|
64baf05302 | ||
|
|
693381882a | ||
|
|
6fb5a84e6d | ||
|
|
a5e0549195 | ||
|
|
17ce19e743 | ||
|
|
cc2be1475e | ||
|
|
54e4304e77 | ||
|
|
cd75984ad3 | ||
|
|
f483159b0d | ||
|
|
2ffb5ec851 | ||
|
|
514690958e | ||
|
|
160361d79a | ||
|
|
e21abac164 | ||
|
|
e92210c4f6 | ||
|
|
50420fb6ac | ||
|
|
eb77eb1a5d | ||
|
|
4e7367596e | ||
|
|
d3c084b9b9 | ||
|
|
86645c6cbd | ||
|
|
faa6c070dc | ||
|
|
abe9a0caba | ||
|
|
48fc88cd73 | ||
|
|
0bc27a0e6f | ||
|
|
70d4060087 | ||
|
|
3f5d53e102 | ||
|
|
37bffc387c | ||
|
|
1007f00c74 | ||
|
|
1031a3342f | ||
|
|
015b1301c1 | ||
|
|
109fa6ffd0 | ||
|
|
2b76370a40 | ||
|
|
b8a5d533ea | ||
|
|
6f808ffdcb | ||
|
|
e3aa1995a0 | ||
|
|
914d930244 | ||
|
|
8c83583a6b | ||
|
|
5c90dfa7ef | ||
|
|
74a19bb555 | ||
|
|
0a32baa44f | ||
|
|
8ca4e0a1dc | ||
|
|
3c9648a661 | ||
|
|
9cf325302c | ||
|
|
ca0c0fbe28 | ||
|
|
7030b5b6aa | ||
|
|
b0f9412127 | ||
|
|
2088f4cd0c | ||
|
|
06e1fff328 | ||
|
|
f5d98615e3 | ||
|
|
642ee248c7 | ||
|
|
00f865f037 | ||
|
|
73823d4237 | ||
|
|
64f80278be | ||
|
|
563d2dc817 | ||
|
|
9ceff0a3ee | ||
|
|
76ceb29a30 | ||
|
|
e267cbebfe | ||
|
|
057972bde8 | ||
|
|
2ab8424302 | ||
|
|
e48729fc4b | ||
|
|
e7b7af65ec | ||
|
|
b6c101a66c | ||
|
|
8f8b5d3f9d | ||
|
|
f78289e361 | ||
|
|
c86e1f76ab | ||
|
|
8251ffc9d5 | ||
|
|
0c53a13694 | ||
|
|
dd6c667eae | ||
|
|
5658f2f36c | ||
|
|
f1455c7ce1 | ||
|
|
236e986827 | ||
|
|
7b7df01fc5 | ||
|
|
8f26aa0736 | ||
|
|
99b3f9261b | ||
|
|
8b5854febc | ||
|
|
252d4b4d4b | ||
|
|
885d50defc | ||
|
|
158054cf39 | ||
|
|
dbbf765e8e | ||
|
|
8abc2f49f5 | ||
|
|
98cd54f54f | ||
|
|
d2c94a0bc3 | ||
|
|
3805139cb0 | ||
|
|
b75f51103e | ||
|
|
d69a5119a2 | ||
|
|
0539c78741 | ||
|
|
6dd6433a51 | ||
|
|
0cc067c78e | ||
|
|
49ceab1d33 | ||
|
|
d0e8b271d3 | ||
|
|
d7ed7026d7 | ||
|
|
0623bbb9d3 | ||
|
|
2537a84e25 | ||
|
|
33f1deb8f1 | ||
|
|
efcf0691bf | ||
|
|
372f7ffc2f | ||
|
|
868fc75a83 | ||
|
|
9113369fd0 | ||
|
|
9db1622e5b | ||
|
|
9da0f625a0 | ||
|
|
9f0e3d9625 | ||
|
|
2605df0f9a | ||
|
|
9463a7af75 | ||
|
|
d6789562dc | ||
|
|
178a0c990c | ||
|
|
b619b4fa75 | ||
|
|
91690be6f9 | ||
|
|
712556bb2e | ||
|
|
59d89dfb57 | ||
|
|
261633c17c | ||
|
|
b3b78f588a | ||
|
|
75a83f28d0 | ||
|
|
fa1459eac2 | ||
|
|
db505172ee | ||
|
|
ed3e1f3f76 | ||
|
|
0fa176b680 | ||
|
|
4ea97b5141 | ||
|
|
92db7ffcbd | ||
|
|
b7404fb29a | ||
|
|
ac9e30ce05 | ||
|
|
9cf3603942 | ||
|
|
99af2834b1 | ||
|
|
e75931c8e5 | ||
|
|
c54f99b7a3 | ||
|
|
791e0ad411 | ||
|
|
e731b0f6eb | ||
|
|
f5cace3c65 | ||
|
|
67fbabe2d7 | ||
|
|
df8b47ab69 | ||
|
|
d5f84846c7 | ||
|
|
86f937752d | ||
|
|
4afa68539c | ||
|
|
c376ddf314 | ||
|
|
f24b9d6a7f | ||
|
|
2464d36c28 | ||
|
|
f4229899ec | ||
|
|
bb13ee9edb | ||
|
|
fd4aeb9724 | ||
|
|
bf97994096 | ||
|
|
c76818c65f | ||
|
|
7ca1c2a430 | ||
|
|
9b1b138fe1 | ||
|
|
2a9e3a109b | ||
|
|
9438f29867 | ||
|
|
efbe57f962 | ||
|
|
8ed84cb071 | ||
|
|
2ea52947f2 | ||
|
|
a833aa153b | ||
|
|
7f4a1cf26f | ||
|
|
31e0a5bc82 | ||
|
|
ba2570262c | ||
|
|
caa827c743 | ||
|
|
0ee95b654a | ||
|
|
16915b5248 | ||
|
|
763e453e7f | ||
|
|
be6105d9ad | ||
|
|
ed9646f0fc | ||
|
|
b80f145c96 | ||
|
|
4d0185a153 | ||
|
|
60a612f437 | ||
|
|
1a8f884737 | ||
|
|
18293ce419 | ||
|
|
617942d0f5 | ||
|
|
6ce5026c51 | ||
|
|
ae9dc4666b | ||
|
|
b1cf6c3964 | ||
|
|
3b3c7dae4d | ||
|
|
92e1bf376f | ||
|
|
7eae679074 | ||
|
|
35ffbfb8f4 | ||
|
|
f7557854de | ||
|
|
8bbd42f355 | ||
|
|
861b9824d5 | ||
|
|
1e4876bd4a | ||
|
|
16006dd0f0 | ||
|
|
6aa2cfbd2e | ||
|
|
3dd233513f | ||
|
|
e754d282d4 | ||
|
|
733636d53f | ||
|
|
32e7704aa8 | ||
|
|
1de8109abb | ||
|
|
dc18175db0 | ||
|
|
dfca944024 | ||
|
|
473b6f1a08 | ||
|
|
d26cc1658f | ||
|
|
5eeb7cd4ca | ||
|
|
d11f39a5d1 | ||
|
|
898fe9458f | ||
|
|
2516b5d636 | ||
|
|
e8042b0ae9 | ||
|
|
c7e791d5ff | ||
|
|
cc40b83919 | ||
|
|
0d20542509 | ||
|
|
498bc632da | ||
|
|
5210291d2d | ||
|
|
079e8487f3 | ||
|
|
9252ea96fe | ||
|
|
7336b7dc3a | ||
|
|
7d6d6a2555 | ||
|
|
f08a86e455 | ||
|
|
585ecf1299 | ||
|
|
9d0a4990e3 | ||
|
|
5ff8cbe27a | ||
|
|
b05161decd | ||
|
|
3836b3d92a | ||
|
|
4a9ad582b8 | ||
|
|
a3043fec09 | ||
|
|
1dd8200596 | ||
|
|
9ec348ccd6 | ||
|
|
565b073660 | ||
|
|
60e3a34dfb | ||
|
|
583d89989a | ||
|
|
68f8f6548a | ||
|
|
68f09b1629 | ||
|
|
819c8b5161 | ||
|
|
775681d4d5 | ||
|
|
e52e7e1268 | ||
|
|
1e75212f5e | ||
|
|
3ef765df7a | ||
|
|
dae258eade | ||
|
|
77d40d23c3 | ||
|
|
9d656921f0 | ||
|
|
6a753df728 | ||
|
|
cfcc3df00c | ||
|
|
6db990ca7d | ||
|
|
bca33b72a8 | ||
|
|
8552291f1a | ||
|
|
4f53b45fdb | ||
|
|
6a11cafd08 | ||
|
|
1e2e54e1fd | ||
|
|
0b669600ee | ||
|
|
dd487f9d9d | ||
|
|
23ef79c290 | ||
|
|
d4ec93f11f | ||
|
|
91bddc100e | ||
|
|
d1b640623e | ||
|
|
a01e3172f8 | ||
|
|
9f8dbce96b | ||
|
|
be1df0a981 | ||
|
|
e962fc1e62 | ||
|
|
dde5eda7fa | ||
|
|
69b4f968b7 | ||
|
|
6951328233 | ||
|
|
9f1446fea8 | ||
|
|
b01895aa73 | ||
|
|
e969fd32d7 | ||
|
|
dc9f74e73d | ||
|
|
5976a2c6ce | ||
|
|
f87d46c35e | ||
|
|
600a31c6cf | ||
|
|
0349fa7178 | ||
|
|
11026bdca5 | ||
|
|
aed2060788 | ||
|
|
67615e77bb | ||
|
|
cb958d93cd | ||
|
|
85c85079c6 | ||
|
|
580d5cc983 | ||
|
|
cd30f3c9f1 | ||
|
|
804a38af74 | ||
|
|
67e9ce031a | ||
|
|
06b22f56a8 | ||
|
|
9b17c57cab | ||
|
|
b813cc68ab | ||
|
|
7c41272e39 | ||
|
|
6c22f5ecd9 | ||
|
|
b4318273fa | ||
|
|
3aae379b24 | ||
|
|
a2b55e7d32 | ||
|
|
d649e97240 | ||
|
|
dd04c83c9e | ||
|
|
c05b0f99b3 | ||
|
|
5040383080 | ||
|
|
78af450129 | ||
|
|
bd67302ef3 | ||
|
|
1d4fd8d288 | ||
|
|
d574b46e3e | ||
|
|
edce4978d9 | ||
|
|
75cf64841b | ||
|
|
4838e89e7a | ||
|
|
99b09d42ea | ||
|
|
49d48a5960 | ||
|
|
d48ded5606 | ||
|
|
019922a90b | ||
|
|
4f6760e4d8 | ||
|
|
d2e448a1aa | ||
|
|
b326066d1a | ||
|
|
baceed9907 | ||
|
|
442a583886 | ||
|
|
66eca75e07 | ||
|
|
965d08789e | ||
|
|
9579a2418b | ||
|
|
d78c8daa8f | ||
|
|
22fc89ee7b | ||
|
|
3844bbc1ca | ||
|
|
fa54b61843 | ||
|
|
4830bbc45a | ||
|
|
7ccbcd2d8f | ||
|
|
56095d4910 | ||
|
|
b5efbff16b | ||
|
|
baaea2e8ed | ||
|
|
697eda1633 | ||
|
|
9be4505df8 | ||
|
|
81af63d31c | ||
|
|
d28db5732d | ||
|
|
6b77cecc29 | ||
|
|
1362505951 | ||
|
|
da072050a4 | ||
|
|
feccf0000c | ||
|
|
8d4611bdc2 | ||
|
|
7de3f0885c | ||
|
|
65895ca382 | ||
|
|
c168f35905 | ||
|
|
7435b0d3f7 | ||
|
|
fbf8fadf81 | ||
|
|
1f651ef573 | ||
|
|
bd07b605cd | ||
|
|
f2b0c89fc8 | ||
|
|
26630cef9d | ||
|
|
4722f992bb | ||
|
|
cbddd6d121 | ||
|
|
422a92b646 | ||
|
|
cda058e552 | ||
|
|
a88478c5d2 | ||
|
|
217c706bc9 | ||
|
|
594782a8a2 | ||
|
|
c42c0434ce | ||
|
|
199a6cd071 | ||
|
|
24a4b12578 | ||
|
|
f1bd34b2fb | ||
|
|
032ce0fb2a | ||
|
|
081a526670 | ||
|
|
ca60440d91 | ||
|
|
57b4266d7a | ||
|
|
e20d906fdf | ||
|
|
127e07e8ad | ||
|
|
f400d6d1b2 | ||
|
|
ae0a45f3b2 | ||
|
|
68a3d12a64 | ||
|
|
c6a4c7f5b0 | ||
|
|
a9606c9f1f | ||
|
|
7197f3fedf | ||
|
|
50c4dcec76 | ||
|
|
1e65dcd4dc | ||
|
|
125a4ab4ed | ||
|
|
4f41700817 | ||
|
|
48ee5e261b | ||
|
|
cc7a37eed7 | ||
|
|
bab50f35a1 | ||
|
|
cf60a4a402 | ||
|
|
0ce1ab9542 | ||
|
|
c69d4c54cb | ||
|
|
cd3c511a5a | ||
|
|
40bdc4b95e | ||
|
|
c994a3d926 | ||
|
|
41a63f4755 | ||
|
|
b2a4eab26f | ||
|
|
13a438187c | ||
|
|
ff9bfd799b | ||
|
|
fcd9d02b47 | ||
|
|
d996e9256f | ||
|
|
10382dd5b7 | ||
|
|
5ac7e6e7bb | ||
|
|
c3bbd71816 | ||
|
|
a8117b49cd | ||
|
|
9fdf539a55 | ||
|
|
5b39b51049 | ||
|
|
1f13d9ab13 | ||
|
|
85c85bf0de | ||
|
|
a1e28b733d | ||
|
|
5b5c6e1e24 | ||
|
|
3fa56eeb05 | ||
|
|
4572998ddc | ||
|
|
d09b2de661 | ||
|
|
cc4a84cfc1 | ||
|
|
f8fd0fd2db | ||
|
|
f8fcd8b146 | ||
|
|
09fcb696b2 | ||
|
|
ac42cd09f1 | ||
|
|
ee8e5365a2 | ||
|
|
55c411d900 | ||
|
|
27cebd93ae | ||
|
|
8029b1865c | ||
|
|
c534d50c2f | ||
|
|
4548f0c46c | ||
|
|
cf47b769e6 | ||
|
|
b06d3f5a99 | ||
|
|
a1ec67514b | ||
|
|
5d4c1d9775 | ||
|
|
d5375a6613 | ||
|
|
e6cac8b526 | ||
|
|
83a662a838 | ||
|
|
97d8600522 | ||
|
|
b61abf094c | ||
|
|
c6bff3afaf | ||
|
|
5e5b6929fd | ||
|
|
19650febcf | ||
|
|
3ec5a0c898 | ||
|
|
f4712a09fe | ||
|
|
da223d238a | ||
|
|
83894f769f | ||
|
|
bc222cffd7 | ||
|
|
93c7429e26 | ||
|
|
cb10b8cbc5 | ||
|
|
b7f6933ac5 | ||
|
|
304e974d04 | ||
|
|
cdf013c8b9 | ||
|
|
ddc3b46f38 | ||
|
|
36c918597c | ||
|
|
fa8a237118 | ||
|
|
018e881e1a | ||
|
|
9b0a0bc011 | ||
|
|
9ae31ff0aa | ||
|
|
5d5ffb1983 | ||
|
|
d44bcbbdf7 | ||
|
|
273c3d86c3 | ||
|
|
16c28afd40 | ||
|
|
6f44680bad | ||
|
|
c738fc7fd0 | ||
|
|
de7bf4213e | ||
|
|
0591b0ace9 | ||
|
|
8b61b2d4de | ||
|
|
c37deca8cb | ||
|
|
a524ba9cb1 | ||
|
|
ba10957fb4 | ||
|
|
f8c4651270 | ||
|
|
bd611d0966 | ||
|
|
5b8335e66a | ||
|
|
68736e3142 | ||
|
|
c8ee7fcd4f | ||
|
|
ee9a5b8205 | ||
|
|
3a5cc7e4df | ||
|
|
9b08260b6a | ||
|
|
b56ad469d9 | ||
|
|
f09c9c8225 | ||
|
|
4c349be745 | ||
|
|
b274ffc920 | ||
|
|
671bd91503 | ||
|
|
206265891f | ||
|
|
c736639d0a | ||
|
|
ea10860c4f | ||
|
|
e49248cd7b | ||
|
|
6789bbedae | ||
|
|
5df81e6da1 | ||
|
|
4668f1de8c | ||
|
|
c9fa71b3f9 | ||
|
|
ec6f46d865 | ||
|
|
cd725c61a9 | ||
|
|
100c628bba | ||
|
|
44143cfdad | ||
|
|
cae3f932ca | ||
|
|
9225f27463 | ||
|
|
a4f389916f | ||
|
|
2574709112 | ||
|
|
46f467d759 | ||
|
|
b883304c68 | ||
|
|
e41767058a | ||
|
|
4e36eafaf8 | ||
|
|
e9a40e3794 | ||
|
|
4433a18735 | ||
|
|
2a4f7d5d12 | ||
|
|
38939ac454 | ||
|
|
83d57f16b7 | ||
|
|
a9c2072127 | ||
|
|
bc794fb504 | ||
|
|
f31a0b0f11 | ||
|
|
29025f1262 | ||
|
|
fd9b297be6 | ||
|
|
04192fabd7 | ||
|
|
58741def84 | ||
|
|
4d8f7471de | ||
|
|
4c387d1ad9 | ||
|
|
3aa12f80ec | ||
|
|
6741b98f34 | ||
|
|
ef7ac7ac20 | ||
|
|
e7715718c6 | ||
|
|
a76d3a5d86 | ||
|
|
14d0cb8625 | ||
|
|
dff75d2442 | ||
|
|
b5b5f31f06 | ||
|
|
24c7ed2cee | ||
|
|
a36477cc91 | ||
|
|
005a78ed3c | ||
|
|
6e62fd789c | ||
|
|
323f78d11f | ||
|
|
a8f5f4501b | ||
|
|
5168f2faba | ||
|
|
d7455bfeff | ||
|
|
472f9b8998 | ||
|
|
ebba18a462 | ||
|
|
25edad331f | ||
|
|
75a40c1d0b | ||
|
|
8a04e90bdf | ||
|
|
92ceb23c99 | ||
|
|
65328e6b70 | ||
|
|
2f50f68c7e | ||
|
|
a0024d556f | ||
|
|
77fd115ae8 | ||
|
|
b3a96e56e8 | ||
|
|
1b6ef3773e | ||
|
|
358b0c2b3b | ||
|
|
9579127595 | ||
|
|
10c38653a5 | ||
|
|
5843929898 | ||
|
|
84a54fc827 | ||
|
|
7299f23a4f | ||
|
|
67d86af41a | ||
|
|
c694d05b02 | ||
|
|
7dcee95f04 | ||
|
|
8bf49bc7ef | ||
|
|
294e8440b4 | ||
|
|
7b7631da73 | ||
|
|
2447559c30 | ||
|
|
3b57280f7d | ||
|
|
086a36ca29 | ||
|
|
8e172dc6ae | ||
|
|
dd00a49815 | ||
|
|
d481756a59 | ||
|
|
b36755d2a3 | ||
|
|
0953a15171 | ||
|
|
e2fc45e9bb | ||
|
|
e616b06aa1 | ||
|
|
031962d67e | ||
|
|
b9ea5733b7 | ||
|
|
c117afa07e | ||
|
|
c93f42a67b | ||
|
|
3d9135ba70 | ||
|
|
ca155845d6 | ||
|
|
1ae18fde9c | ||
|
|
8ebe8c5de4 | ||
|
|
faefb69ef0 | ||
|
|
6683bb4bc8 | ||
|
|
56e45e2aba | ||
|
|
4401ef9782 | ||
|
|
eb6b96ba64 | ||
|
|
4d5af85abe | ||
|
|
8b3f320149 | ||
|
|
7bc9b2e98f | ||
|
|
106cac1c43 | ||
|
|
a705248cb6 | ||
|
|
2de0f6438b | ||
|
|
838b25efb4 | ||
|
|
57c4139309 | ||
|
|
0e62388de6 | ||
|
|
eda2f03253 | ||
|
|
2d62fd7df8 | ||
|
|
fd209f746e | ||
|
|
3fdd67e527 | ||
|
|
7cf7b883ac | ||
|
|
f4c5dde372 | ||
|
|
5680394b82 | ||
|
|
72dde7ded4 | ||
|
|
8e14e5ab3f | ||
|
|
595ad1f470 | ||
|
|
ac7834621b | ||
|
|
64502c81cf | ||
|
|
330f218514 | ||
|
|
35d1bf5273 | ||
|
|
a36f7eace0 | ||
|
|
3b09e669e7 | ||
|
|
ea3d2c7c66 | ||
|
|
ba91e8ca88 | ||
|
|
5ea25f5681 | ||
|
|
efec603685 | ||
|
|
2594d71819 | ||
|
|
c754a70511 | ||
|
|
08f67bab72 | ||
|
|
5a12ad600f | ||
|
|
b6c71f9499 | ||
|
|
60bbdc73aa | ||
|
|
fb8defaa4c | ||
|
|
40bb777e31 | ||
|
|
81698cb5fe | ||
|
|
ede5ceedc5 | ||
|
|
6948754600 | ||
|
|
16abd6b93e | ||
|
|
2e9b94f976 | ||
|
|
3625934f16 | ||
|
|
895e668a00 | ||
|
|
50848508d4 | ||
|
|
50c4c51f1a | ||
|
|
fe5585789b | ||
|
|
715d184bdc | ||
|
|
ec514ebbdb | ||
|
|
200504906d | ||
|
|
23f3b83edb | ||
|
|
53262b3d80 | ||
|
|
b6c74836b2 | ||
|
|
394a95a90e | ||
|
|
2b40063b46 | ||
|
|
b8e1311d99 | ||
|
|
fbb7119f5d | ||
|
|
855a3094ba | ||
|
|
5166b748d3 | ||
|
|
d7a7087655 | ||
|
|
b9db9b9198 | ||
|
|
48ab0cac22 | ||
|
|
5dbc0aa71a | ||
|
|
4428162b5b | ||
|
|
f6c0758ebd | ||
|
|
78eae3a710 | ||
|
|
71e933f247 | ||
|
|
4885d808a5 | ||
|
|
8aaa34ed7c | ||
|
|
41b1ae28b5 | ||
|
|
7e09355713 | ||
|
|
100aea0b28 | ||
|
|
9b48b61d45 | ||
|
|
d5a0ba0838 | ||
|
|
74ff82b5ae | ||
|
|
9fbb26b8e5 | ||
|
|
5eca1c809a | ||
|
|
b2d1b45afc | ||
|
|
c060c36ade | ||
|
|
b151329c5d | ||
|
|
4703717013 | ||
|
|
f7016a3960 | ||
|
|
a2b43a4b59 | ||
|
|
2d4e5a094b | ||
|
|
fe07a8e1d8 | ||
|
|
91f024e62d | ||
|
|
635c6274a1 | ||
|
|
a06cdb0e75 | ||
|
|
fc471d9860 | ||
|
|
d8a1be87c7 | ||
|
|
d5013742d9 | ||
|
|
b85f1ad0ef | ||
|
|
e59afee740 | ||
|
|
39e04def14 | ||
|
|
0a4ecf5072 | ||
|
|
001693179a | ||
|
|
a66da90890 | ||
|
|
f89012d2c5 | ||
|
|
b67a7ae68e | ||
|
|
ba5f38c955 | ||
|
|
d4c9843315 | ||
|
|
caf646835e | ||
|
|
5840f68992 | ||
|
|
70c4f8b058 | ||
|
|
8fb27a3a33 | ||
|
|
35503ba753 | ||
|
|
9d049bcc38 | ||
|
|
6e5c2fdc95 | ||
|
|
ff340b84b0 | ||
|
|
44b5bc85b4 | ||
|
|
68ef48051d | ||
|
|
3b62564894 | ||
|
|
af97c4a809 | ||
|
|
df4ef7d4fb | ||
|
|
263dc519ef | ||
|
|
13280dd48c | ||
|
|
97cfc311b5 | ||
|
|
7e28d59d55 | ||
|
|
376feb01a3 | ||
|
|
2b73b6850b | ||
|
|
b4d53a51ec | ||
|
|
d7e14359c6 | ||
|
|
11320a36d7 | ||
|
|
cd52dcaaf5 | ||
|
|
fdf2f68ab3 | ||
|
|
950c1d741e | ||
|
|
1dc547e672 | ||
|
|
d67bc27d13 | ||
|
|
a334084bb7 | ||
|
|
33916fc72f | ||
|
|
fc91dcd94b | ||
|
|
1d99f416e7 | ||
|
|
0ba4e4aba8 | ||
|
|
0b99ea642a | ||
|
|
7b7fd603a1 | ||
|
|
8627a2e8c8 | ||
|
|
eb4119f60c | ||
|
|
483734de23 | ||
|
|
6c2b4c15b3 | ||
|
|
1a72257df5 | ||
|
|
7d6550376e | ||
|
|
8630b6e689 | ||
|
|
6f9f62d9b0 | ||
|
|
e8953b81f5 | ||
|
|
b4a9a695e7 | ||
|
|
1270eecea3 | ||
|
|
623d764b52 | ||
|
|
43b6d344d4 | ||
|
|
094b05948a | ||
|
|
2e3c60f41b | ||
|
|
11f6257b24 | ||
|
|
7141488b7d | ||
|
|
b156d4d722 | ||
|
|
427bdae97f | ||
|
|
8cec5cb8a0 | ||
|
|
201eaea28d | ||
|
|
961e3c77a3 | ||
|
|
8b7061f585 | ||
|
|
f9a4352356 | ||
|
|
a4563fa30b | ||
|
|
bd336c1269 | ||
|
|
1925c59a86 | ||
|
|
fc535b19e2 | ||
|
|
dcbff69333 | ||
|
|
2166221a0f | ||
|
|
fff3e1ab20 | ||
|
|
223f22405e | ||
|
|
99d044aa9f | ||
|
|
771a3a6e1d | ||
|
|
aadffef368 | ||
|
|
ae8571d68b | ||
|
|
7252cbf38a | ||
|
|
e55d6178b6 | ||
|
|
ffa06550e4 | ||
|
|
22c32e72f1 | ||
|
|
7f1ac610f8 | ||
|
|
ee363704af | ||
|
|
05ff81c408 | ||
|
|
d6e9e50da4 | ||
|
|
3629c3e702 | ||
|
|
6fdaaed764 | ||
|
|
d8305b8d55 | ||
|
|
bcf29d539a | ||
|
|
c210834471 | ||
|
|
50bbffd467 | ||
|
|
b90620caeb | ||
|
|
9d18a256ce | ||
|
|
8294440e0a | ||
|
|
20f317eb5b |
@@ -1,14 +0,0 @@
|
||||
version = 1
|
||||
|
||||
test_patterns = [
|
||||
'test/**',
|
||||
]
|
||||
|
||||
exclude_patterns = [
|
||||
'docs/**',
|
||||
'util.py', # not part of the core sqlfluff code
|
||||
]
|
||||
|
||||
[[ analyzers ]]
|
||||
name = 'python'
|
||||
enabled = true
|
||||
@@ -20,6 +20,9 @@ indent_style = unset
|
||||
|
||||
# Some specific tests with trailing newlines
|
||||
# If adding any exceptions here, make sure to add them to .pre-commit-config.yaml as well
|
||||
[test/fixtures/templater/jinja_l_metas/0{01,03,04,05,07,08,11}.sql]
|
||||
indent_style = unset
|
||||
insert_final_newline = unset
|
||||
[test/fixtures/linter/sqlfluffignore/*/*.sql]
|
||||
indent_style = unset
|
||||
trim_trailing_whitespace = unset
|
||||
@@ -32,6 +35,9 @@ insert_final_newline = unset
|
||||
indent_style = unset
|
||||
trim_trailing_whitespace = unset
|
||||
insert_final_newline = unset
|
||||
[plugins/sqlfluff-templater-dbt/test/fixtures/dbt/dbt_project/models/my_new_project/multiple_trailing_newline.sql]
|
||||
indent_style = unset
|
||||
insert_final_newline = unset
|
||||
[plugins/sqlfluff-templater-dbt/test/fixtures/dbt/templated_output/macro_in_macro.sql]
|
||||
indent_style = unset
|
||||
trim_trailing_whitespace = unset
|
||||
|
||||
25
.github/codecov.yml
vendored
25
.github/codecov.yml
vendored
@@ -1,25 +0,0 @@
|
||||
# Configuration file for codecov.io
|
||||
# https://docs.codecov.io/docs/codecovyml-reference
|
||||
|
||||
codecov:
|
||||
branch: main
|
||||
bot: "codecov-io"
|
||||
ci:
|
||||
- "github.com"
|
||||
max_report_age: 24
|
||||
disable_default_path_fixes: no # yamllint disable-line rule:truthy
|
||||
require_ci_to_pass: yes # yamllint disable-line rule:truthy
|
||||
notify:
|
||||
after_n_builds: 3
|
||||
wait_for_ci: yes # yamllint disable-line rule:truthy
|
||||
|
||||
coverage:
|
||||
precision: 2
|
||||
round: down
|
||||
range: "70...100"
|
||||
status:
|
||||
project:
|
||||
default:
|
||||
# basic
|
||||
target: auto
|
||||
threshold: 0%
|
||||
18
.github/labeler.yml
vendored
18
.github/labeler.yml
vendored
@@ -28,6 +28,15 @@ greenplum:
|
||||
hive:
|
||||
- "/(hive)/i"
|
||||
|
||||
impala:
|
||||
- "/(impala)/i"
|
||||
|
||||
mariadb:
|
||||
- "/(mariadb)/i"
|
||||
|
||||
materialize:
|
||||
- "/(materialize)/i"
|
||||
|
||||
mysql:
|
||||
- "/(mysql)/i"
|
||||
|
||||
@@ -52,8 +61,17 @@ sparksql:
|
||||
sqlite:
|
||||
- "/(sqlite)/i"
|
||||
|
||||
starrocks:
|
||||
- "/(starrocks)/i"
|
||||
|
||||
t-sql:
|
||||
- "/(t-sql|tsql)/i"
|
||||
|
||||
teradata:
|
||||
- "/(teradata)/i"
|
||||
|
||||
trino:
|
||||
- "/(trino)/i"
|
||||
|
||||
vertica:
|
||||
- "/(vertica)/i"
|
||||
|
||||
7
.github/workflows/add-to-release-notes.yml
vendored
7
.github/workflows/add-to-release-notes.yml
vendored
@@ -7,11 +7,16 @@ on:
|
||||
branches:
|
||||
- main
|
||||
jobs:
|
||||
release-notes:
|
||||
draft-release:
|
||||
runs-on: ubuntu-20.04
|
||||
if: github.repository == 'sqlfluff/sqlfluff'
|
||||
steps:
|
||||
- name: Update release notes
|
||||
uses: release-drafter/release-drafter@v5
|
||||
# If it's a release PR, then we should also update
|
||||
# the header and title here too.
|
||||
# with:
|
||||
# name: ${{ }}
|
||||
# header: ${{ }}
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
107
.github/workflows/ci-pr-comments.yml
vendored
Normal file
107
.github/workflows/ci-pr-comments.yml
vendored
Normal file
@@ -0,0 +1,107 @@
|
||||
# This Workflow runs in a more secure context and comments
|
||||
# on pull requests.
|
||||
# https://securitylab.github.com/research/github-actions-preventing-pwn-requests/
|
||||
name: Comment on the pull request
|
||||
|
||||
# Run on completion of the CI job.
|
||||
# This workflow has access to write comments on PRs event when
|
||||
# that PR is triggered by a forked repo.
|
||||
on:
|
||||
workflow_run:
|
||||
workflows:
|
||||
- CI
|
||||
types:
|
||||
- completed
|
||||
|
||||
jobs:
|
||||
comment-on-pr:
|
||||
runs-on: ubuntu-latest
|
||||
if: >
|
||||
github.event.workflow_run.event == 'pull_request'
|
||||
steps:
|
||||
- name: 'Download txt artifact'
|
||||
uses: actions/github-script@v6
|
||||
with:
|
||||
script: |
|
||||
const artifacts = await github.rest.actions.listWorkflowRunArtifacts({
|
||||
owner: context.repo.owner,
|
||||
repo: context.repo.repo,
|
||||
run_id: ${{github.event.workflow_run.id }},
|
||||
});
|
||||
const matchArtifact = artifacts.data.artifacts.filter((artifact) => {
|
||||
return artifact.name == "txt-report"
|
||||
})[0];
|
||||
const download = await github.rest.actions.downloadArtifact({
|
||||
owner: context.repo.owner,
|
||||
repo: context.repo.repo,
|
||||
artifact_id: matchArtifact.id,
|
||||
archive_format: 'zip',
|
||||
});
|
||||
var fs = require('fs');
|
||||
fs.writeFileSync('${{github.workspace}}/cov-report.zip', Buffer.from(download.data));
|
||||
|
||||
- name: Unzip Downloaded Artifact
|
||||
run: unzip cov-report.zip
|
||||
|
||||
- name: Update PR comment with coverage report.
|
||||
uses: actions/github-script@v6
|
||||
with:
|
||||
github-token: ${{ secrets.GITHUB_TOKEN }}
|
||||
script: |
|
||||
// First list the existing comments
|
||||
const trigger_str = 'Coverage Results';
|
||||
console.log("Getting existing comments...");
|
||||
|
||||
const { promises: fs } = require('fs');
|
||||
const issue_number = await fs.readFile('pr-number.txt', 'utf8');
|
||||
console.log("Issue number: " + issue_number);
|
||||
|
||||
const comments = await github.paginate(
|
||||
github.rest.issues.listComments,
|
||||
{
|
||||
owner: 'sqlfluff',
|
||||
repo: 'sqlfluff',
|
||||
issue_number: Number(issue_number)
|
||||
}
|
||||
);
|
||||
|
||||
let comment_id = null;
|
||||
console.log("Got %d comments", comments.length);
|
||||
|
||||
comments.forEach(comment => {
|
||||
if (comment.body.indexOf(trigger_str) >= 0) {
|
||||
console.log("Found target comment ID: %d", comment.id);
|
||||
comment_id = comment.id;
|
||||
} else {
|
||||
console.log("Comment ID %d not valid with body:\n%s.", comment.id, comment.body);
|
||||
}
|
||||
});
|
||||
|
||||
const previous_outcome = await fs.readFile('outcome.txt', 'utf8');
|
||||
console.log("Previous coverage step outcome: %s", previous_outcome);
|
||||
if (previous_outcome == "success\n") {
|
||||
status_emoji = "✅";
|
||||
} else {
|
||||
status_emoji = "⚠️";
|
||||
}
|
||||
|
||||
const content = await fs.readFile('coverage-report.txt', 'utf8');
|
||||
body = "# " + trigger_str + " " + status_emoji + "\n```\n" + content + "\n```\n";
|
||||
|
||||
if (comment_id > 0) {
|
||||
console.log("Updating comment id: %d", comment_id);
|
||||
await github.rest.issues.updateComment({
|
||||
owner: context.repo.owner,
|
||||
repo: context.repo.repo,
|
||||
comment_id: comment_id,
|
||||
body: body
|
||||
});
|
||||
} else {
|
||||
console.log("No existing comment matched, creating a new one...");
|
||||
await github.rest.issues.createComment({
|
||||
issue_number: Number(issue_number),
|
||||
owner: context.repo.owner,
|
||||
repo: context.repo.repo,
|
||||
body: body
|
||||
});
|
||||
}
|
||||
24
.github/workflows/ci-test-dbt.yml
vendored
24
.github/workflows/ci-test-dbt.yml
vendored
@@ -32,6 +32,8 @@ jobs:
|
||||
modular-python-test:
|
||||
name: py${{ inputs.python-version }}-${{ inputs.dbt-version }}
|
||||
runs-on: ubuntu-latest
|
||||
env:
|
||||
FORCE_COLOR: 1
|
||||
|
||||
services:
|
||||
# Label used to access the service container
|
||||
@@ -52,12 +54,16 @@ jobs:
|
||||
- 5432:5432
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- name: Set up Python
|
||||
uses: actions/setup-python@v4
|
||||
uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: ${{ inputs.python-version }}
|
||||
cache: 'pip'
|
||||
cache-dependency-path: |
|
||||
setup.cfg
|
||||
requirements_dev.txt
|
||||
|
||||
- name: Install dependencies
|
||||
run: pip install tox
|
||||
@@ -70,19 +76,11 @@ jobs:
|
||||
if: ${{ !inputs.coverage }}
|
||||
run: tox -e ${{ inputs.dbt-version }} -- plugins/sqlfluff-templater-dbt
|
||||
|
||||
- name: Coveralls Parallel (coveralls)
|
||||
uses: coverallsapp/github-action@master
|
||||
if: ${{ inputs.coverage }}
|
||||
with:
|
||||
path-to-lcov: coverage.lcov
|
||||
github-token: ${{ secrets.gh_token }}
|
||||
flag-name: run-${{ inputs.dbt-version }}
|
||||
parallel: true
|
||||
|
||||
- name: Upload coverage data (github)
|
||||
uses: actions/upload-artifact@v3
|
||||
uses: actions/upload-artifact@v4
|
||||
if: ${{ inputs.coverage }}
|
||||
with:
|
||||
name: coverage-data
|
||||
name: coverage-data-py${{ inputs.python-version }}-${{ inputs.dbt-version }}
|
||||
path: ".coverage.*"
|
||||
if-no-files-found: ignore
|
||||
include-hidden-files: true
|
||||
|
||||
22
.github/workflows/ci-test-python.yml
vendored
22
.github/workflows/ci-test-python.yml
vendored
@@ -34,12 +34,16 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
name: py${{ inputs.python-version }}
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- name: Set up Python
|
||||
uses: actions/setup-python@v4
|
||||
uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: ${{ inputs.python-version }}
|
||||
cache: 'pip'
|
||||
cache-dependency-path: |
|
||||
setup.cfg
|
||||
requirements_dev.txt
|
||||
|
||||
- name: Install dependencies
|
||||
run: pip install tox
|
||||
@@ -76,19 +80,11 @@ jobs:
|
||||
echo "COVSUFFIX=$COVSUFFIX" >> $GITHUB_OUTPUT
|
||||
for file in .coverage.*; do mv "$file" "$file.$COVSUFFIX"; done;
|
||||
|
||||
- name: Coveralls Parallel (coveralls)
|
||||
uses: coverallsapp/github-action@master
|
||||
if: ${{ inputs.coverage }}
|
||||
with:
|
||||
path-to-lcov: coverage.lcov
|
||||
github-token: ${{ secrets.gh_token }}
|
||||
flag-name: run-${{ inputs.python-version }}-${{ steps.cov_suffix.outputs.COVSUFFIX }}
|
||||
parallel: true
|
||||
|
||||
- name: Upload coverage data (github)
|
||||
uses: actions/upload-artifact@v3
|
||||
uses: actions/upload-artifact@v4
|
||||
if: ${{ inputs.coverage }}
|
||||
with:
|
||||
name: coverage-data
|
||||
name: coverage-data-py${{ inputs.python-version }}-${{ inputs.marks }}
|
||||
path: ".coverage.*"
|
||||
if-no-files-found: ignore
|
||||
include-hidden-files: true
|
||||
|
||||
310
.github/workflows/ci-tests.yml
vendored
310
.github/workflows/ci-tests.yml
vendored
@@ -8,11 +8,14 @@
|
||||
# (images, markdown files, )
|
||||
#
|
||||
name: CI
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
schedule:
|
||||
# 2am each night
|
||||
- cron: '00 2 * * *'
|
||||
# Don't use pull_request_target here. See:
|
||||
# https://securitylab.github.com/research/github-actions-preventing-pwn-requests/
|
||||
pull_request:
|
||||
push:
|
||||
branches:
|
||||
@@ -29,175 +32,143 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
strategy:
|
||||
matrix:
|
||||
jobs: [ 'linting', 'doclinting', 'docbuild', 'yamllint' ]
|
||||
name: ${{ matrix.jobs }} tests
|
||||
job:
|
||||
[
|
||||
"linting",
|
||||
"doclinting",
|
||||
"docbuild",
|
||||
"yamllint",
|
||||
"mypy",
|
||||
"mypyc",
|
||||
"doctests",
|
||||
]
|
||||
include:
|
||||
# Default to most recent python version
|
||||
- python-version: "3.13"
|
||||
# As at 2024-10-10, docbuild fails on 3.13, so fall back to 3.12
|
||||
- job: docbuild
|
||||
python-version: "3.12"
|
||||
name: ${{ matrix.job }} tests
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/checkout@v4
|
||||
- name: Set up Python
|
||||
uses: actions/setup-python@v4
|
||||
uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: '3.11'
|
||||
python-version: ${{ matrix.python-version }}
|
||||
- name: Install dependencies
|
||||
run: pip install tox
|
||||
- name: Run the tests
|
||||
run: tox -e ${{ matrix.jobs }}
|
||||
run: tox -e ${{ matrix.job }}
|
||||
|
||||
# Test with coverage tracking on most recent python (py11).
|
||||
python-version-tests-cov:
|
||||
name: Python 3.11 Tests
|
||||
permissions:
|
||||
contents: read
|
||||
pull-requests: write
|
||||
uses: ./.github/workflows/ci-test-python.yml
|
||||
with:
|
||||
python-version: "3.11"
|
||||
coverage: true
|
||||
secrets:
|
||||
gh_token: ${{ secrets.github_token }}
|
||||
|
||||
# Test without coverage tracking on older python versions.
|
||||
# This saves time, as testing without coverage tracking is faster.
|
||||
python-version-tests-nocov:
|
||||
name: Python ${{ matrix.python-version }} Tests
|
||||
# Test with coverage tracking on most recent python (py313).
|
||||
python-version-tests:
|
||||
name: Python Tests
|
||||
strategy:
|
||||
matrix:
|
||||
python-version: [ '3.7', '3.8', '3.9', '3.10' ]
|
||||
python-version: [ "3.8", "3.9", "3.10", "3.11", "3.12", "3.13" ]
|
||||
include:
|
||||
# Default to test without coverage tracking on older python versions.
|
||||
# This saves time, as testing without coverage tracking is faster.
|
||||
- coverage: false
|
||||
# Override coverage to be true for most recent python version.
|
||||
- python-version: "3.13"
|
||||
coverage: true
|
||||
permissions:
|
||||
contents: read
|
||||
pull-requests: write
|
||||
uses: ./.github/workflows/ci-test-python.yml
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
coverage: ${{ matrix.coverage }}
|
||||
secrets:
|
||||
gh_token: ${{ secrets.github_token }}
|
||||
|
||||
dbt-tests-cov:
|
||||
name: dbt ${{ matrix.dbt-version }} Plugin Tests
|
||||
dbt-tests:
|
||||
name: dbt Plugin Tests
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
dbt-version: [ "dbt110", "dbt130", "dbt140", "dbt150" ]
|
||||
dbt-version:
|
||||
- dbt140
|
||||
- dbt150
|
||||
- dbt160
|
||||
- dbt170
|
||||
- dbt180
|
||||
- dbt190
|
||||
include:
|
||||
# Default to python 3.12 for dbt tests.
|
||||
# * Python 3.13 not supported yet.
|
||||
# * Looks like it's due to psycopg2 support as of 2024-10-10
|
||||
- python-version: "3.12"
|
||||
# For dbt 1.4 - 1.6 override to python 3.11
|
||||
- dbt-version: dbt140
|
||||
python-version: "3.11"
|
||||
- dbt-version: dbt150
|
||||
python-version: "3.11"
|
||||
- dbt-version: dbt160
|
||||
python-version: "3.11"
|
||||
permissions:
|
||||
contents: read
|
||||
pull-requests: write
|
||||
uses: ./.github/workflows/ci-test-dbt.yml
|
||||
with:
|
||||
python-version: "3.10"
|
||||
dbt-version: "${{ matrix.dbt-version }}"
|
||||
python-version: ${{ matrix.python-version }}
|
||||
dbt-version: ${{ matrix.dbt-version }}
|
||||
coverage: true
|
||||
secrets:
|
||||
gh_token: ${{ secrets.github_token }}
|
||||
|
||||
dbt-tests-nocov:
|
||||
name: dbt ${{ matrix.dbt-version }} Plugin Tests
|
||||
dialect-tests:
|
||||
name: Dialect ${{ matrix.marks }}
|
||||
strategy:
|
||||
matrix:
|
||||
dbt-version: [ "dbt120" ]
|
||||
permissions:
|
||||
contents: read
|
||||
pull-requests: write
|
||||
uses: ./.github/workflows/ci-test-dbt.yml
|
||||
with:
|
||||
python-version: "3.9"
|
||||
dbt-version: "${{ matrix.dbt-version }}"
|
||||
secrets:
|
||||
gh_token: ${{ secrets.github_token }}
|
||||
|
||||
# This runs the bulk of the dialect _parsing_ tests.
|
||||
#
|
||||
# It's run as a separate job as takes longer than the CI jobs and allows
|
||||
# them to be rerun separately if GitHub Actions or Coverage is experiencing
|
||||
# issues.
|
||||
dialects_parse:
|
||||
name: Dialect parsing
|
||||
strategy:
|
||||
matrix:
|
||||
python-version: [ '3.11' ]
|
||||
include:
|
||||
# This runs the bulk of the dialect _parsing_ tests.
|
||||
#
|
||||
# It's run as a separate job as takes longer than the CI jobs and allows
|
||||
# them to be rerun separately if GitHub Actions or Coverage is experiencing
|
||||
# issues.
|
||||
- marks: "parse_suite"
|
||||
# We test coverage here for some parsing routines.
|
||||
coverage: true
|
||||
# This lints all our dialect fixtures to check rules can handle a variety
|
||||
# of SQL and don't error out badly.
|
||||
#
|
||||
# It's run as a separate job as takes longer than the CI jobs and allows
|
||||
# them to be rerun separately if GitHub Actions or Coverage is experiencing
|
||||
# issues.
|
||||
- marks: "fix_suite"
|
||||
coverage: false
|
||||
# This lints all our rules fixtures to check rules.
|
||||
#
|
||||
# It's run as a separate job as takes longer than the CI jobs and allows
|
||||
# them to be rerun separately if GitHub Actions or Coverage is experiencing
|
||||
# issues.
|
||||
- marks: "rules_suite"
|
||||
coverage: true
|
||||
permissions:
|
||||
contents: read
|
||||
pull-requests: write
|
||||
uses: ./.github/workflows/ci-test-python.yml
|
||||
with:
|
||||
python-version: "3.11"
|
||||
marks: "parse_suite"
|
||||
# We test coverage here for some parsing routines.
|
||||
coverage: true
|
||||
python-version: "3.13"
|
||||
marks: ${{ matrix.marks }}
|
||||
coverage: ${{ matrix.coverage }}
|
||||
secrets:
|
||||
gh_token: ${{ secrets.github_token }}
|
||||
|
||||
# This lints all our dialect fixtures to check rules can handle a variety
|
||||
# of SQL and don't error out badly.
|
||||
#
|
||||
# It's run as a separate job as takes longer than the CI jobs and allows
|
||||
# them to be rerun separately if GitHub Actions or Coverage is experiencing
|
||||
# issues.
|
||||
dialects_fix:
|
||||
name: Dialect fixing
|
||||
strategy:
|
||||
matrix:
|
||||
python-version: [ '3.11' ]
|
||||
permissions:
|
||||
contents: read
|
||||
pull-requests: write
|
||||
uses: ./.github/workflows/ci-test-python.yml
|
||||
with:
|
||||
python-version: "3.11"
|
||||
marks: "fix_suite"
|
||||
secrets:
|
||||
gh_token: ${{ secrets.github_token }}
|
||||
|
||||
# This lints all our rules fixtures to check rules.
|
||||
#
|
||||
# It's run as a separate job as takes longer than the CI jobs and allows
|
||||
# them to be rerun separately if GitHub Actions or Coverage is experiencing
|
||||
# issues.
|
||||
rules:
|
||||
name: Rule yaml test cases
|
||||
strategy:
|
||||
matrix:
|
||||
python-version: [ '3.11' ]
|
||||
permissions:
|
||||
contents: read
|
||||
pull-requests: write
|
||||
uses: ./.github/workflows/ci-test-python.yml
|
||||
with:
|
||||
python-version: "3.11"
|
||||
marks: "rules_suite"
|
||||
coverage: true
|
||||
secrets:
|
||||
gh_token: ${{ secrets.github_token }}
|
||||
|
||||
other-tests:
|
||||
runs-on: ubuntu-latest
|
||||
strategy:
|
||||
matrix:
|
||||
jobs: [ 'bench', 'mypy', 'doctests' ]
|
||||
name: ${{ matrix.jobs }} tests
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- name: Set up Python
|
||||
uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: '3.11'
|
||||
- name: Install dependencies
|
||||
run: pip install tox
|
||||
- name: Run the tests
|
||||
env:
|
||||
SQLFLUFF_BENCHMARK_API_KEY: ${{ secrets.SQLFLUFF_BENCHMARK_API_KEY }}
|
||||
run: |
|
||||
tox -e ${{ matrix.jobs }}
|
||||
|
||||
ymlchecks:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- name: Set up Python 3.11
|
||||
uses: actions/setup-python@v4
|
||||
- uses: actions/checkout@v4
|
||||
- name: Set up Python
|
||||
uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: '3.11'
|
||||
python-version: '3.13'
|
||||
- name: Install dependencies
|
||||
run: |
|
||||
pip install -r requirements.txt -r requirements_dev.txt
|
||||
python setup.py develop
|
||||
pip install -r requirements_dev.txt
|
||||
pip install -e .
|
||||
- name: Generate the YAML files
|
||||
run: |
|
||||
python test/generate_parse_fixture_yml.py
|
||||
@@ -215,11 +186,11 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
name: example tests
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/checkout@v4
|
||||
- name: Set up Python
|
||||
uses: actions/setup-python@v4
|
||||
uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: '3.11'
|
||||
python-version: '3.13'
|
||||
- name: Install dependencies
|
||||
run: |
|
||||
pip install -e .
|
||||
@@ -234,17 +205,17 @@ jobs:
|
||||
|
||||
python-windows-tests:
|
||||
runs-on: windows-latest
|
||||
name: Python 3.10 Windows tests
|
||||
name: Python 3.13 Windows tests
|
||||
steps:
|
||||
- name: Set git to use LF
|
||||
run: |
|
||||
git config --global core.autocrlf false
|
||||
git config --global core.eol lf
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/checkout@v4
|
||||
- name: Set up Python
|
||||
uses: actions/setup-python@v4
|
||||
uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: '3.10'
|
||||
python-version: "3.13"
|
||||
- name: List Env
|
||||
shell: bash
|
||||
run: |
|
||||
@@ -261,15 +232,16 @@ jobs:
|
||||
mkdir temp_pytest
|
||||
python -m tox -e winpy -- --cov=sqlfluff -n 2 test -m "not integration"
|
||||
- name: Upload coverage data (github)
|
||||
uses: actions/upload-artifact@v3
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: coverage-data
|
||||
name: coverage-data-winpy3.13
|
||||
path: ".coverage.*"
|
||||
if-no-files-found: ignore
|
||||
include-hidden-files: true
|
||||
|
||||
python-windows-dbt-tests:
|
||||
runs-on: windows-latest
|
||||
name: DBT Plugin Python 3.10 Windows tests
|
||||
name: dbt Plugin Python 3.12 Windows tests
|
||||
steps:
|
||||
- name: Start PostgreSQL on Windows
|
||||
run: |
|
||||
@@ -283,11 +255,12 @@ jobs:
|
||||
run: |
|
||||
git config --global core.autocrlf false
|
||||
git config --global core.eol lf
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/checkout@v4
|
||||
- name: Set up Python
|
||||
uses: actions/setup-python@v4
|
||||
uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: '3.10'
|
||||
# NOTE: As of 2024-10-10, dbt does not yet support python 3.13.
|
||||
python-version: "3.12"
|
||||
- name: Install dependencies
|
||||
shell: bash
|
||||
run: pip install tox
|
||||
@@ -296,7 +269,7 @@ jobs:
|
||||
# Do not set explicitly temp dir for dbt as causes problems
|
||||
# None of these test need temp dir set
|
||||
run: |
|
||||
python -m tox -e dbt150-winpy -- plugins/sqlfluff-templater-dbt
|
||||
python -m tox -e dbt180-winpy -- plugins/sqlfluff-templater-dbt
|
||||
|
||||
pip-test-pull-request:
|
||||
# Test that using pip install works as we've missed
|
||||
@@ -306,10 +279,10 @@ jobs:
|
||||
name: pip install tests
|
||||
steps:
|
||||
- name: Set up Python
|
||||
uses: actions/setup-python@v4
|
||||
uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: '3.11'
|
||||
- uses: actions/checkout@v3
|
||||
python-version: "3.13"
|
||||
- uses: actions/checkout@v4
|
||||
- name: Install dependencies
|
||||
run: |
|
||||
pip install .
|
||||
@@ -329,43 +302,58 @@ jobs:
|
||||
run: |
|
||||
sqlfluff lint --dialect=ansi <(echo "select 1")
|
||||
|
||||
coveralls_finish:
|
||||
name: Finalise coveralls.
|
||||
needs: [python-version-tests-cov, dbt-tests-cov, python-windows-tests, dialects_parse, rules]
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Coveralls Finished
|
||||
uses: coverallsapp/github-action@master
|
||||
with:
|
||||
github-token: ${{ secrets.github_token }}
|
||||
parallel-finished: true
|
||||
|
||||
coverage_check:
|
||||
name: Combine & check 100% coverage.
|
||||
runs-on: ubuntu-latest
|
||||
needs: [python-version-tests-cov, dbt-tests-cov, python-windows-tests, dialects_parse, rules]
|
||||
needs: [python-version-tests, dbt-tests, python-windows-tests, dialect-tests]
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/setup-python@v4
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: "3.10"
|
||||
python-version: "3.13"
|
||||
|
||||
- run: python -m pip install --upgrade coverage[toml]
|
||||
|
||||
- name: Download coverage data.
|
||||
uses: actions/download-artifact@v3
|
||||
uses: actions/download-artifact@v4
|
||||
with:
|
||||
name: coverage-data
|
||||
pattern: coverage-data-*
|
||||
merge-multiple: true
|
||||
|
||||
- name: Combine coverage & fail if it's <100%.
|
||||
id: report_coverage
|
||||
# NOTE: Setting the pipefail option here means that even when
|
||||
# piping the output to `tee`, we still get the exit code of the
|
||||
# `coverage report` command.
|
||||
run: |
|
||||
set -o pipefail
|
||||
python -m coverage combine
|
||||
python -m coverage html --skip-covered --skip-empty
|
||||
python -m coverage report --fail-under=100
|
||||
python -m coverage report --fail-under=100 --skip-covered --skip-empty -m | tee coverage-report.txt
|
||||
|
||||
- name: Upload HTML report if check failed.
|
||||
uses: actions/upload-artifact@v3
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: html-report
|
||||
path: htmlcov
|
||||
if: ${{ failure() }}
|
||||
if: failure() && github.event_name == 'pull_request'
|
||||
|
||||
- name: Stash PR Number.
|
||||
if: always() && github.event_name == 'pull_request'
|
||||
# NOTE: We do this so we know what PR to comment on when we pick up the report.
|
||||
run: |
|
||||
echo ${{ github.event.number }} > ./pr-number.txt
|
||||
echo ${{ steps.report_coverage.outcome }} > ./outcome.txt
|
||||
|
||||
- name: Upload TXT report always (to add as comment to PR).
|
||||
# NOTE: We don't actually comment on the PR from here, we'll do that in
|
||||
# a more secure way by triggering a more secure workflow.
|
||||
# https://securitylab.github.com/research/github-actions-preventing-pwn-requests/
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: txt-report
|
||||
path: |
|
||||
coverage-report.txt
|
||||
pr-number.txt
|
||||
outcome.txt
|
||||
if: always() && github.event_name == 'pull_request'
|
||||
|
||||
@@ -11,7 +11,7 @@ jobs:
|
||||
run:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/checkout@v4
|
||||
- name: Abort if branch already exists
|
||||
run: |
|
||||
_check_branch=$(git ls-remote --heads origin prep-${{ github.event.inputs.newVersionNumber }})
|
||||
@@ -23,7 +23,7 @@ jobs:
|
||||
fi
|
||||
|
||||
- name: Set up Python
|
||||
uses: actions/setup-python@v4
|
||||
uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: '3.11'
|
||||
|
||||
@@ -55,4 +55,17 @@ jobs:
|
||||
|
||||
[1]: https://github.com/sqlfluff/sqlfluff/releases
|
||||
[2]: https://github.com/peter-evans/create-pull-request
|
||||
labels: release
|
||||
labels: |
|
||||
release
|
||||
skip-changelog
|
||||
|
||||
- name: Update release title and tag
|
||||
uses: release-drafter/release-drafter@v5
|
||||
with:
|
||||
# NOTE: We should eventually actually populate the date here, but that
|
||||
# will most likely change before the new pull request actually gets
|
||||
# merged, so we just add "YYYY-MM-DD" for now as a placeholder.
|
||||
name: "[${{ github.event.inputs.newVersionNumber }}] - YYYY-MM-DD"
|
||||
tag: ${{ github.event.inputs.newVersionNumber }}
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
53
.github/workflows/pre-commit.yml
vendored
Normal file
53
.github/workflows/pre-commit.yml
vendored
Normal file
@@ -0,0 +1,53 @@
|
||||
---
|
||||
name: pre-commit
|
||||
on:
|
||||
pull_request:
|
||||
push:
|
||||
jobs:
|
||||
pre-commit:
|
||||
runs-on: ubuntu-latest
|
||||
env:
|
||||
RAW_LOG: pre-commit.log
|
||||
CS_XML: pre-commit.xml
|
||||
SKIP: no-commit-to-branch
|
||||
steps:
|
||||
- run: sudo apt-get update && sudo apt-get install cppcheck
|
||||
if: false
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/setup-python@v5
|
||||
if: false
|
||||
with:
|
||||
cache: pip
|
||||
python-version: 3.12.1
|
||||
- run: python -m pip install pre-commit
|
||||
- uses: actions/cache/restore@v4
|
||||
with:
|
||||
path: ~/.cache/pre-commit/
|
||||
key: pre-commit-4|${{ env.pythonLocation }}|${{ hashFiles('.pre-commit-config.yaml')
|
||||
}}
|
||||
- name: Run pre-commit hooks
|
||||
run: |
|
||||
set -o pipefail
|
||||
pre-commit gc
|
||||
pre-commit run --show-diff-on-failure --color=always --all-files | tee ${RAW_LOG}
|
||||
- name: Convert Raw Log to Checkstyle format (launch action)
|
||||
uses: mdeweerd/logToCheckStyle@v2024.3.5
|
||||
if: ${{ failure() }}
|
||||
with:
|
||||
in: ${{ env.RAW_LOG }}
|
||||
out: ${{ env.CS_XML }}
|
||||
- uses: actions/cache/save@v4
|
||||
if: ${{ ! cancelled() }}
|
||||
with:
|
||||
path: ~/.cache/pre-commit/
|
||||
key: pre-commit-4|${{ env.pythonLocation }}|${{ hashFiles('.pre-commit-config.yaml')
|
||||
}}
|
||||
- name: Provide log as artifact
|
||||
uses: actions/upload-artifact@v4
|
||||
if: ${{ ! cancelled() }}
|
||||
with:
|
||||
name: precommit-logs
|
||||
path: |
|
||||
${{ env.RAW_LOG }}
|
||||
${{ env.CS_XML }}
|
||||
retention-days: 2
|
||||
@@ -10,10 +10,10 @@ jobs:
|
||||
run:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/setup-python@v4
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: "3.7"
|
||||
python-version: "3.10"
|
||||
|
||||
- name: Install Dependencies
|
||||
run: |
|
||||
@@ -28,8 +28,8 @@ jobs:
|
||||
run: cp -r plugins/sqlfluff-templater-dbt/dist/. dist/
|
||||
|
||||
- name: Publish Python distribution to PyPI
|
||||
uses: pypa/gh-action-pypi-publish@master
|
||||
uses: pypa/gh-action-pypi-publish@release/v1
|
||||
with:
|
||||
user: __token__
|
||||
password: ${{ secrets.PYPI_DBT_TEMPLATER_TOKEN }}
|
||||
skip_existing: true
|
||||
skip-existing: true
|
||||
|
||||
@@ -25,15 +25,15 @@ jobs:
|
||||
# Setup QEMU and Buildx to allow for multi-platform builds.
|
||||
- name: Set up QEMU
|
||||
id: docker_qemu
|
||||
uses: docker/setup-qemu-action@v1
|
||||
uses: docker/setup-qemu-action@v3
|
||||
- name: Set up Docker Buildx
|
||||
id: docker_buildx
|
||||
uses: docker/setup-buildx-action@v1
|
||||
uses: docker/setup-buildx-action@v3
|
||||
|
||||
# Authenticate with DockerHub.
|
||||
- name: Login to DockerHub
|
||||
id: docker_login
|
||||
uses: docker/login-action@v1
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
@@ -41,7 +41,7 @@ jobs:
|
||||
# Build amd64 image to use in the integration test.
|
||||
- name: Build and export to Docker
|
||||
id: docker_build
|
||||
uses: docker/build-push-action@v2
|
||||
uses: docker/build-push-action@v5
|
||||
with:
|
||||
load: true
|
||||
tags: ${{ env.TEST_TAG }}
|
||||
@@ -59,7 +59,7 @@ jobs:
|
||||
# N.B. We tag this image as both latest and with its version number.
|
||||
- name: Build and push
|
||||
id: docker_build_push
|
||||
uses: docker/build-push-action@v2
|
||||
uses: docker/build-push-action@v5
|
||||
with:
|
||||
push: true
|
||||
platforms: linux/amd64,linux/arm64
|
||||
|
||||
@@ -10,10 +10,10 @@ jobs:
|
||||
run:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/setup-python@v4
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: "3.7"
|
||||
python-version: "3.10"
|
||||
|
||||
- name: Install Dependencies
|
||||
run: |
|
||||
@@ -23,8 +23,8 @@ jobs:
|
||||
run: tox -e build-dist
|
||||
|
||||
- name: Publish Python distribution to PyPI
|
||||
uses: pypa/gh-action-pypi-publish@master
|
||||
uses: pypa/gh-action-pypi-publish@release/v1
|
||||
with:
|
||||
user: __token__
|
||||
password: ${{ secrets.PYPI_TOKEN }}
|
||||
skip_existing: true
|
||||
skip-existing: true
|
||||
|
||||
8
.gitignore
vendored
8
.gitignore
vendored
@@ -13,6 +13,7 @@ build
|
||||
_build
|
||||
dist
|
||||
.pytest_cache
|
||||
/sqlfluff-*
|
||||
|
||||
# Ignore the Environment
|
||||
env
|
||||
@@ -54,3 +55,10 @@ plugins/sqlfluff-templater-dbt/test/fixtures/dbt/dbt_project/packages.yml
|
||||
# VSCode
|
||||
.vscode
|
||||
*.code-workspace
|
||||
|
||||
# Emacs
|
||||
*~
|
||||
|
||||
# Mypyc outputs
|
||||
*.pyd
|
||||
*.so
|
||||
|
||||
@@ -1,16 +1,23 @@
|
||||
repos:
|
||||
- repo: https://github.com/pre-commit/pre-commit-hooks
|
||||
rev: v4.1.0
|
||||
rev: v4.5.0
|
||||
hooks:
|
||||
- id: no-commit-to-branch
|
||||
args: [--branch, main]
|
||||
# If adding any exceptions here, make sure to add them to .editorconfig as well
|
||||
- id: end-of-file-fixer
|
||||
exclude: |
|
||||
(?x)^(
|
||||
test/fixtures/linter/sqlfluffignore/|
|
||||
test/fixtures/config/inheritance_b/example.sql|
|
||||
test/fixtures/config/inheritance_b/nested/example.sql|
|
||||
plugins/sqlfluff-templater-dbt/test/fixtures/dbt/templated_output/trailing_newlines.sql|
|
||||
plugins/sqlfluff-templater-dbt/test/fixtures/dbt/dbt_project/models/my_new_project/trailing_newlines.sql
|
||||
(?x)^
|
||||
(
|
||||
test/fixtures/templater/jinja_l_metas/0(0[134578]|11).sql|
|
||||
test/fixtures/linter/sqlfluffignore/[^/]*/[^/]*.sql|
|
||||
test/fixtures/config/inheritance_b/(nested/)?example.sql|
|
||||
(.*)/trailing_newlines.sql|
|
||||
plugins/sqlfluff-templater-dbt/test/fixtures/dbt.*/dbt_project/models/my_new_project/multiple_trailing_newline.sql|
|
||||
plugins/sqlfluff-templater-dbt/test/fixtures/dbt.*/templated_output/macro_in_macro.sql|
|
||||
plugins/sqlfluff-templater-dbt/test/fixtures/dbt.*/templated_output/(dbt_utils_0.8.0/)?last_day.sql|
|
||||
test/fixtures/linter/indentation_errors.sql|
|
||||
test/fixtures/templater/jinja_d_roundtrip/test.sql
|
||||
)$
|
||||
- id: trailing-whitespace
|
||||
exclude: |
|
||||
@@ -19,49 +26,70 @@ repos:
|
||||
test/fixtures/templater/jinja_d_roundtrip/test.sql|
|
||||
test/fixtures/config/inheritance_b/example.sql|
|
||||
test/fixtures/config/inheritance_b/nested/example.sql|
|
||||
plugins/sqlfluff-templater-dbt/test/fixtures/dbt/templated_output/macro_in_macro.sq|
|
||||
plugins/sqlfluff-templater-dbt/test/fixtures/dbt/templated_output/last_day.sql|
|
||||
plugins/sqlfluff-templater-dbt/test/fixtures/dbt/templated_output/dbt_utils_0.8.0/last_day.sql|
|
||||
plugins/sqlfluff-templater-dbt/test/fixtures/dbt.*/templated_output/macro_in_macro.sql|
|
||||
plugins/sqlfluff-templater-dbt/test/fixtures/dbt.*/templated_output/last_day.sql|
|
||||
plugins/sqlfluff-templater-dbt/test/fixtures/dbt.*/templated_output/dbt_utils_0.8.0/last_day.sql|
|
||||
test/fixtures/linter/sqlfluffignore/
|
||||
)$
|
||||
- repo: https://github.com/psf/black
|
||||
rev: 22.3.0
|
||||
rev: 24.2.0
|
||||
hooks:
|
||||
- id: black
|
||||
- repo: https://github.com/pre-commit/mirrors-mypy
|
||||
rev: v0.931
|
||||
rev: v1.13.0
|
||||
hooks:
|
||||
- id: mypy
|
||||
args: [--ignore-missing-imports]
|
||||
additional_dependencies:
|
||||
# NOTE: These dependencies should be the same as the `types-*` dependencies in
|
||||
# `requirements_dev.txt`. If you update these, make sure to update those too.
|
||||
[
|
||||
types-toml,
|
||||
types-pkg_resources,
|
||||
types-chardet,
|
||||
types-appdirs,
|
||||
types-colorama,
|
||||
types-pyyaml,
|
||||
types-regex,
|
||||
types-tqdm,
|
||||
# Type stubs are obvious to import, but some dependencies also define their own
|
||||
# types directly (e.g. jinja). pre-commit doesn't actually install the python
|
||||
# package, and so doesn't automatically install the dependencies from
|
||||
# `pyproject.toml` either. We include them here to make sure mypy can function
|
||||
# properly.
|
||||
jinja2,
|
||||
pathspec,
|
||||
pytest, # and by extension... pluggy
|
||||
click,
|
||||
platformdirs
|
||||
]
|
||||
files: ^src/sqlfluff/.*
|
||||
# The mypy pre-commit hook by default sets a few arguments that we don't normally
|
||||
# use. To undo that we reset the `args` to be empty here. This is important to
|
||||
# ensure we don't get conflicting results from the pre-commit hook and from the
|
||||
# CI job.
|
||||
args: []
|
||||
- repo: https://github.com/pycqa/flake8
|
||||
rev: 4.0.1
|
||||
rev: 7.0.0
|
||||
hooks:
|
||||
- id: flake8
|
||||
additional_dependencies: [flake8-black>=0.2.4, flake8-docstrings]
|
||||
additional_dependencies: [flake8-black>=0.3.6]
|
||||
- repo: https://github.com/pycqa/doc8
|
||||
rev: 0.10.1
|
||||
rev: v1.1.1
|
||||
hooks:
|
||||
- id: doc8
|
||||
args: [--file-encoding, utf8]
|
||||
files: docs/source/.*\.rst$
|
||||
- repo: https://github.com/adrienverge/yamllint.git
|
||||
rev: v1.26.3
|
||||
rev: v1.35.1
|
||||
hooks:
|
||||
- id: yamllint
|
||||
args: [-c=.yamllint]
|
||||
- repo: https://github.com/charliermarsh/ruff-pre-commit
|
||||
# Ruff version.
|
||||
rev: "v0.0.243"
|
||||
rev: "v0.3.2"
|
||||
hooks:
|
||||
- id: ruff
|
||||
- repo: https://github.com/codespell-project/codespell
|
||||
rev: v2.2.6
|
||||
hooks:
|
||||
- id: codespell
|
||||
exclude: (?x)^(test/fixtures/.*|pyproject.toml)$
|
||||
additional_dependencies: [tomli]
|
||||
|
||||
@@ -1,7 +1,9 @@
|
||||
- id: sqlfluff-lint
|
||||
name: sqlfluff-lint
|
||||
# Set `--processes 0` to use maximum parallelism
|
||||
entry: sqlfluff lint --processes 0
|
||||
# - `--disable-progress-bar` pre-commit suppresses logging already
|
||||
# this can cause an unneeded slow down.
|
||||
entry: sqlfluff lint --processes 0 --disable-progress-bar
|
||||
language: python
|
||||
description: "Lints sql files with `SQLFluff`"
|
||||
types: [sql]
|
||||
@@ -11,11 +13,12 @@
|
||||
- id: sqlfluff-fix
|
||||
name: sqlfluff-fix
|
||||
# Set a couple of default flags:
|
||||
# - `--force` to disable confirmation
|
||||
# - `--show-lint-violations` shows issues to not require running `sqlfluff lint`
|
||||
# - `--processes 0` to use maximum parallelism
|
||||
# - `--disable-progress-bar` pre-commit suppresses logging already
|
||||
# this can cause an unneeded slow down.
|
||||
# By default, this hook applies all rules.
|
||||
entry: sqlfluff fix --force --show-lint-violations --processes 0
|
||||
entry: sqlfluff fix --show-lint-violations --processes 0 --disable-progress-bar
|
||||
language: python
|
||||
description: "Fixes sql lint errors with `SQLFluff`"
|
||||
types: [sql]
|
||||
|
||||
@@ -16,9 +16,8 @@ formats: []
|
||||
# In our case we need both the docs requirements and the package itself.
|
||||
python:
|
||||
install:
|
||||
- requirements: requirements.txt
|
||||
- requirements: docs/requirements.txt
|
||||
- method: setuptools
|
||||
- method: pip
|
||||
path: .
|
||||
|
||||
build:
|
||||
@@ -26,6 +25,6 @@ build:
|
||||
tools:
|
||||
python: "3.11"
|
||||
jobs:
|
||||
# Before building, generate the rule docs
|
||||
# Before building, generate the rule & dialect docs
|
||||
pre_build:
|
||||
- python docs/generate-rule-docs.py
|
||||
- python docs/generate-auto-docs.py
|
||||
|
||||
1731
CHANGELOG.md
1731
CHANGELOG.md
File diff suppressed because it is too large
Load Diff
@@ -6,7 +6,7 @@
|
||||
We as members, contributors, and leaders pledge to make participation in our
|
||||
community a harassment-free experience for everyone, regardless of age, body
|
||||
size, visible or invisible disability, ethnicity, sex characteristics, gender
|
||||
identity and expression, level of experience, education, socio-economic status,
|
||||
identity and expression, level of experience, education, socioeconomic status,
|
||||
nationality, personal appearance, race, religion, or sexual identity
|
||||
and orientation.
|
||||
|
||||
|
||||
@@ -25,12 +25,11 @@ for the wiki.
|
||||
|
||||
:star2: **Fifth** - if you are so inclined - pull requests on the core codebase
|
||||
are always welcome. Dialect additions are often a good entry point for new
|
||||
contributors, and we have
|
||||
[a wiki page](https://github.com/sqlfluff/sqlfluff/wiki/Contributing-Dialect-Changes)
|
||||
contributors, and we have [a set of guides](https://docs.sqlfluff.com/en/stable/perma/guides.html)
|
||||
to help you through your first contribution. Bear in mind that all the tests
|
||||
should pass, and test coverage should not decrease unduly as part of the
|
||||
changes which you make. You may find it useful to familiarise yourself with the
|
||||
[architectural principles here](https://docs.sqlfluff.com/en/stable/internals.html#architecture)
|
||||
[architectural principles here](https://docs.sqlfluff.com/en/stable/perma/architecture.html)
|
||||
and with the [current documentation here](https://docs.sqlfluff.com).
|
||||
|
||||
## How The Community Works
|
||||
@@ -92,7 +91,7 @@ python3.8 -m pip install -U tox
|
||||
**IMPORTANT:** `tox` must be installed with a minimum of Python 3.8 as
|
||||
the `mypy` checks are incompatible with 3.7. Those using newer versions of
|
||||
Python may replace `python3.8` as necessary (the test suite runs primarily
|
||||
under 3.11 for example).
|
||||
under 3.12 for example).
|
||||
|
||||
Note: Unfortunately tox does not currently support setting just a minimum
|
||||
Python version (though this may be be coming in tox 4!).
|
||||
@@ -189,6 +188,62 @@ sqlfluff parse test.sql
|
||||
```
|
||||
(ensure your virtual environment is activated first).
|
||||
|
||||
#### How to use and understand the test suite
|
||||
|
||||
When developing for SQLFluff, you may not need (or wish) to run the whole test
|
||||
suite, depending on what you are working on. Here are a couple of scenarios
|
||||
for development, and which parts of the test suite you may find most useful.
|
||||
|
||||
1. For dialect improvements (i.e. changes to anything in [src/sqlfluff/dialects](./src/sqlfluff/dialects))
|
||||
you should not need to continuously run the full core test suite. Running
|
||||
either `tox -e generate-fixture-yml` (if using tox), or setting up a python
|
||||
virtualenv and running `test/generate_parse_fixture_yml.py` directly will
|
||||
usually be sufficient. Both of these options accept arguments to restrict
|
||||
runs to specific dialects to further improve iteration speed. e.g.
|
||||
- `tox -e generate-fixture-yml -- -d mysql` will run just the mysql tests.
|
||||
- `python test/generate_parse_fixture_yml.py -d mysql` will do the same.
|
||||
2. Developing for the dbt templater should only require running the dbt test
|
||||
suite (see below).
|
||||
3. Developing rules and rule plugins there are a couple of scenarios.
|
||||
- When developing a new rule or working with a more isolated rule, you
|
||||
should only need to run the tests for that rule. These are usually what
|
||||
are called the _yaml tests_. This refers to a body of example sql
|
||||
statements and potential fixes defined in a large set of yaml files
|
||||
found in [test/fixtures/rules/std_rule_cases](./test/fixtures/rules/std_rule_cases).
|
||||
The easiest way to run these is by calling that part of the suite
|
||||
directly and filtering to just that rule. For example:
|
||||
- `tox -e py39 -- test/rules/yaml_test_cases_test.py -k AL01`
|
||||
- `pytest test/rules/yaml_test_cases_test.py -k AL01`
|
||||
- When developing on some more complicated rules, or ones known to
|
||||
have interactions with other rules, there are a set of rule fixing
|
||||
tests which apply a set combination of those rules. These are best
|
||||
run via the `autofix` tests. For example:
|
||||
- `tox -e py39 -- test/rules/std_fix_auto_test.py`
|
||||
- `pytest test/rules/std_fix_auto_test.py`
|
||||
- Potentially even the full rules suite `tox -e py39 -- test/rules`
|
||||
- A small number of core rules are also used in making sure that inner
|
||||
parts of SQLFluff are also functioning. This isn't great isolation
|
||||
but does mean that occasionally you may find side effects of your
|
||||
changes in the wider test suite. These can usually be caught by
|
||||
running the full `tox -e py39` suite as a final check (or using the
|
||||
test suite on GitHub when posting your PR).
|
||||
4. When developing the internals of SQLFluff (i.e. anything not
|
||||
already mentioned above), the test suite typically mirrors the structure
|
||||
of the internal submodules of sqlfluff:
|
||||
- When working with the CLI, the `sqlfluff.cli` module has a test suite
|
||||
called via `tox -e py39 -- test/cli`.
|
||||
- When working with the templaters (i.e. `sqlfluff.core.templaters`), the
|
||||
corresponding test suite is found via `tox -e py39 -- test/core/templaters`.
|
||||
- This rough guidance and may however not apply for all of the internals.
|
||||
For example, changes to the internals of the parsing module (`sqlfluff.core.parser`)
|
||||
are very likely to have knock-on implications across the rest of the test
|
||||
suite and it may be necessary to run the whole thing. In these
|
||||
situations however you can usually work slowly outward, for example:
|
||||
1. If your change is to the `AnyOf()` grammar, first running `tox -e py39 -- test/core/parser/grammar_test.py` would be wise.
|
||||
2. ...followed by `tox -e py39 -- test/core/parser` once the above is passing.
|
||||
3. ...and then `tox -e py39 -- test/core`.
|
||||
4. ...and finally the full suite `tox -e py39`.
|
||||
|
||||
#### dbt templater tests
|
||||
|
||||
The dbt templater tests require a locally running Postgres instance. See the
|
||||
@@ -317,7 +372,7 @@ described above.
|
||||
- [ ] Comment in #contributing slack channel about release candidate.
|
||||
- [ ] Update the draft PR as more changes get merged.
|
||||
- [ ] Get another contributor to approve the PR.
|
||||
- [ ] Merge the PR when looks like we've got all we’re gonna get for this release.
|
||||
- [ ] Merge the PR when looks like we've got all we’re going to get for this release.
|
||||
- [ ] Go to the [releases page](https://github.com/sqlfluff/sqlfluff/releases), edit
|
||||
the release to be same as [CHANGELOG.md](CHANGELOG.md) (remember to remove your
|
||||
release PR which doesn’t need to go in this). Add version tag and a title and
|
||||
|
||||
11
Dockerfile
11
Dockerfile
@@ -7,19 +7,20 @@ WORKDIR /app
|
||||
ENV VIRTUAL_ENV /app/.venv
|
||||
RUN python -m venv $VIRTUAL_ENV
|
||||
ENV PATH $VIRTUAL_ENV/bin:$PATH
|
||||
RUN pip install --no-cache-dir --upgrade pip setuptools wheel
|
||||
RUN pip install --no-cache-dir --upgrade pip setuptools wheel pip-tools
|
||||
|
||||
# Install requirements separately
|
||||
# to take advantage of layer caching.
|
||||
# N.B. we extract the requirements from setup.cfg
|
||||
COPY setup.cfg .
|
||||
RUN python -c "import configparser; c = configparser.ConfigParser(); c.read('setup.cfg'); print(c['options']['install_requires'])" > requirements.txt
|
||||
# N.B. we extract the requirements from pyproject.toml
|
||||
COPY pyproject.toml .
|
||||
# Use piptools to extract requirements from pyproject.toml as described in
|
||||
# https://github.com/pypa/pip/issues/11584
|
||||
RUN pip-compile -o requirements.txt pyproject.toml -v --strip-extras
|
||||
RUN pip install --no-cache-dir --upgrade -r requirements.txt
|
||||
|
||||
# Copy minimal set of SQLFluff package files.
|
||||
COPY MANIFEST.in .
|
||||
COPY README.md .
|
||||
COPY setup.py .
|
||||
COPY src ./src
|
||||
|
||||
# Install sqlfluff package.
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
MIT License
|
||||
|
||||
Copyright (c) 2018 Alan Cruickshank
|
||||
Copyright (c) 2023 Alan Cruickshank
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
|
||||
37
README.md
37
README.md
@@ -8,18 +8,30 @@
|
||||
[](https://pypi.org/project/sqlfluff/)
|
||||
[](https://pypi.org/project/sqlfluff/)
|
||||
|
||||
[](https://codecov.io/gh/sqlfluff/sqlfluff)
|
||||
[](https://coveralls.io/github/sqlfluff/sqlfluff?branch=main)
|
||||
[](https://github.com/sqlfluff/sqlfluff/actions/workflows/ci-tests.yml?query=branch%3Amain)
|
||||
[](https://sqlfluff.readthedocs.io)
|
||||
[](https://github.com/psf/black)
|
||||
[](https://hub.docker.com/r/sqlfluff/sqlfluff)
|
||||
[](https://gurubase.io/g/sqlfluff)
|
||||
|
||||
**SQLFluff** is a dialect-flexible and configurable SQL linter. Designed
|
||||
with ELT applications in mind, **SQLFluff** also works with Jinja templating
|
||||
and dbt. **SQLFluff** will auto-fix most linting errors, allowing you to focus
|
||||
your time on what matters.
|
||||
|
||||
## Table of Contents
|
||||
|
||||
1. [Dialects Supported](#dialects-supported)
|
||||
2. [Templates Supported](#templates-supported)
|
||||
3. [VS Code Extension](#vs-code-extension)
|
||||
4. [Getting Started](#getting-started)
|
||||
5. [Documentation](#documentation)
|
||||
6. [Releases](#releases)
|
||||
7. [SQLFluff on Slack](#sqlfluff-on-slack)
|
||||
8. [SQLFluff on Twitter](#sqlfluff-on-twitter)
|
||||
9. [Contributing](#contributing)
|
||||
10. [Sponsors](#sponsors)
|
||||
|
||||
## Dialects Supported
|
||||
|
||||
Although SQL is reasonably consistent in its implementations, there are several
|
||||
@@ -38,6 +50,7 @@ currently supports the following SQL dialects (though perhaps not in full):
|
||||
- [Exasol](https://www.exasol.com/)
|
||||
- [Greenplum](https://greenplum.org/)
|
||||
- [Hive](https://hive.apache.org/)
|
||||
- [Impala](https://impala.apache.org/)
|
||||
- [Materialize](https://materialize.com/)
|
||||
- [MySQL](https://www.mysql.com/)
|
||||
- [Oracle](https://docs.oracle.com/en/database/oracle/oracle-database/21/sqlrf/index.html)
|
||||
@@ -47,8 +60,11 @@ currently supports the following SQL dialects (though perhaps not in full):
|
||||
- [SOQL](https://developer.salesforce.com/docs/atlas.en-us.soql_sosl.meta/soql_sosl/sforce_api_calls_soql.htm)
|
||||
- [SparkSQL](https://spark.apache.org/docs/latest/)
|
||||
- [SQLite](https://www.sqlite.org/)
|
||||
- [StarRocks](https://www.starrocks.io)
|
||||
- [Teradata](https://www.teradata.com/)
|
||||
- [Transact-SQL](https://docs.microsoft.com/en-us/sql/t-sql/language-reference) (aka T-SQL)
|
||||
- [Trino](https://trino.io/)
|
||||
- [Vertica](https://www.vertica.com/)
|
||||
|
||||
We aim to make it easy to expand on the support of these dialects and also
|
||||
add other, currently unsupported, dialects. Please [raise issues](https://github.com/sqlfluff/sqlfluff/issues)
|
||||
@@ -64,11 +80,14 @@ fit for this project before spending time on this work.
|
||||
|
||||
SQL itself does not lend itself well to [modularity](https://docs.getdbt.com/docs/viewpoint#section-modularity),
|
||||
so to introduce some flexibility and reusability it is often [templated](https://en.wikipedia.org/wiki/Template_processor)
|
||||
as discussed more in [our modularity documentation](https://docs.sqlfluff.com/en/stable/realworld.html#modularity).
|
||||
as discussed more in [our modularity documentation](https://docs.sqlfluff.com/en/stable/perma/modularity.html).
|
||||
|
||||
**SQLFluff** supports the following templates:
|
||||
|
||||
- [Jinja](https://jinja.palletsprojects.com/) (aka Jinja2)
|
||||
- [dbt](https://www.getdbt.com/)
|
||||
- SQL placeholders (e.g. SQLAlchemy parameters)
|
||||
- [Python format strings](https://docs.python.org/3/library/string.html#format-string-syntax)
|
||||
- [dbt](https://www.getdbt.com/) (requires plugin)
|
||||
|
||||
Again, please raise issues if you wish to support more templating languages/syntaxes.
|
||||
|
||||
@@ -108,8 +127,8 @@ All Finished 📜 🎉!
|
||||
Alternatively, you can use the [**Official SQLFluff Docker Image**](https://hub.docker.com/r/sqlfluff/sqlfluff)
|
||||
or have a play using [**SQLFluff online**](https://online.sqlfluff.com/).
|
||||
|
||||
For full [CLI usage](https://docs.sqlfluff.com/en/stable/cli.html) and
|
||||
[rules reference](https://docs.sqlfluff.com/en/stable/rules.html), see
|
||||
For full [CLI usage](https://docs.sqlfluff.com/en/stable/perma/cli.html) and
|
||||
[rules reference](https://docs.sqlfluff.com/en/stable/perma/rules.html), see
|
||||
[the SQLFluff docs](https://docs.sqlfluff.com/en/stable/).
|
||||
|
||||
# Documentation
|
||||
@@ -126,7 +145,7 @@ so breaking changes should be restricted to major versions releases. Some
|
||||
elements (such as the python API) are in a less stable state and may see more
|
||||
significant changes more often. For details on breaking changes and how
|
||||
to migrate between versions, see our
|
||||
[release notes](https://docs.sqlfluff.com/en/latest/releasenotes.html). See the
|
||||
[release notes](https://docs.sqlfluff.com/en/latest/perma/releasenotes.html). See the
|
||||
[changelog](CHANGELOG.md) for more details. If you would like to join in, please
|
||||
consider [contributing](CONTRIBUTING.md).
|
||||
|
||||
@@ -136,7 +155,7 @@ New releases are made monthly. For more information, visit
|
||||
# SQLFluff on Slack
|
||||
|
||||
We have a fast-growing community
|
||||
[on Slack](https://join.slack.com/t/sqlfluff/shared_invite/zt-o1f4x0e8-pZzarAIlQmKj_6ZwD16w0g),
|
||||
[on Slack](https://join.slack.com/t/sqlfluff/shared_invite/zt-2qtu36kdt-OS4iONPbQ3aCz2DIbYJdWg),
|
||||
come and join us!
|
||||
|
||||
# SQLFluff on Twitter
|
||||
@@ -150,7 +169,7 @@ We are grateful to all our [contributors](https://github.com/sqlfluff/sqlfluff/g
|
||||
There is a lot to do in this project, and we are just getting started.
|
||||
|
||||
If you want to understand more about the architecture of **SQLFluff**, you can
|
||||
find [more here](https://docs.sqlfluff.com/en/latest/internals.html#architecture).
|
||||
find [more here](https://docs.sqlfluff.com/en/latest/perma/architecture.html).
|
||||
|
||||
If you would like to contribute, check out the
|
||||
[open issues on GitHub](https://github.com/sqlfluff/sqlfluff/issues). You can also see
|
||||
|
||||
@@ -1,92 +0,0 @@
|
||||
-- Package model
|
||||
|
||||
{{
|
||||
config(
|
||||
materialized = "table",
|
||||
)
|
||||
}}
|
||||
|
||||
with
|
||||
wer as (
|
||||
SELECT
|
||||
sp.sergvsdrvs,
|
||||
sp.sdtrsbnt,
|
||||
trim(LTRIM(sp.sresgdr, 'asecesf')) as srebstrgserg,
|
||||
sp.bdfgsrg,
|
||||
sp.sfnsfdgnfd,
|
||||
sp.vsdfbvsdfv,
|
||||
sp.sdbsdr,
|
||||
sp.srgdsrbsfcgb,
|
||||
s.sdrgsdrbsd,
|
||||
s.sdrgngf,
|
||||
s.cvyjhcth,
|
||||
tspc.fgyjmgbhmv,
|
||||
tspc.cgh,
|
||||
tspc.ghftdnftbcfhcgnc,
|
||||
tspc.ynvgnmbjmvb,
|
||||
s.vgbhm,
|
||||
fhdtdtnyjftgjyumh,
|
||||
sp.fgufghjfghjf
|
||||
FROM {{ ref('wetrghdxfh') }} as sp
|
||||
inner join {{ ref('strghtdfh') }} s using(sergvsdrvs)
|
||||
left join {{ ref('sdrtghsdtrh') }} as tspc
|
||||
on (sp.sdtrsbnt = tspc.sdtrsbnt)
|
||||
),
|
||||
qwe as (
|
||||
select
|
||||
servsdrfvsdzrv,
|
||||
min(dsfgsergsrdg) as sftgbnsbgvszd
|
||||
from {{ ref('sdfgsre') }}
|
||||
group by servsdrfvsdzrv
|
||||
),
|
||||
ert as (
|
||||
SELECT
|
||||
p.sdfsdgsdfg,
|
||||
MAX(IFF(ce.ts is not null, 1, 0)) = 1 as has_events,
|
||||
min(ce.ts) as first_event,
|
||||
max(ce.ts) as last_event
|
||||
FROM sdfsgdfg p
|
||||
LEFT JOIN {{ ref('dsrgsdrg') }} ce
|
||||
on (p.dfgsd = trim(ce.lpn)
|
||||
and ce.ts > p.sdfg - interval '30 days'
|
||||
and ce.ts < p.sdfg + interval '60 days'
|
||||
and ce.ts < CURRENT_DATE + interval '78 hours')
|
||||
GROUP BY p.sdfsdgsdfg
|
||||
),
|
||||
dsfg as (
|
||||
SELECT
|
||||
p.rfgsrdsrd,
|
||||
MAX(IFF(t.success = 0, 1, 0)) = 1 as sergsdrg
|
||||
FROM wer p
|
||||
LEFT JOIN {{ ref('ncvbncbvnvcn') }} t
|
||||
ON (p.dfg = t.dfg AND t.ertwretwetr = 'purchase')
|
||||
GROUP BY p.rfgsrdsrd
|
||||
)
|
||||
select
|
||||
p.sdfgsdg,
|
||||
p.wertwert,
|
||||
p.nfghncvn,
|
||||
p.fgsgdfg,
|
||||
p.dfgsncn,
|
||||
p.sdfhgdg,
|
||||
p.ghdstrh,
|
||||
p.dgnsfnstrh,
|
||||
p.srthsdhfgh,
|
||||
p.fgdfhgdfgn,
|
||||
p.dfgnhdndtf,
|
||||
p.dfthstghsreg,
|
||||
qwe.sdfbsrb,
|
||||
qwe.sdfbsfdb,
|
||||
qwe.dfdfgdr,
|
||||
billing_events.ahreagre,
|
||||
p.fsdgseag,
|
||||
p.fb,
|
||||
p.fsgfdg,
|
||||
od.xcbrdbrbsdrbsg,
|
||||
p.sdfgsrbsrebs,
|
||||
p.sdfgsdfbsdrg,
|
||||
p.sdgsdrgrgrdgs
|
||||
from packages p
|
||||
inner join qwe using(sdrgsrdg)
|
||||
inner join ert using(sdfasef)
|
||||
INNER JOIN dsfg od ON p.shipment_id = od.shipment_id
|
||||
@@ -1,34 +0,0 @@
|
||||
[sqlfluff]
|
||||
verbose = 0
|
||||
nocolor = False
|
||||
dialect = bigquery
|
||||
templater = jinja
|
||||
rules = None
|
||||
exclude_rules = None
|
||||
recurse = 0
|
||||
|
||||
[sqlfluff:templater:jinja:macros]
|
||||
|
||||
# Some rules can be configured directly from the config common to other rules.
|
||||
[sqlfluff:rules]
|
||||
tab_space_size = 4
|
||||
|
||||
# Some rules have their own specific config.
|
||||
[sqlfluff:rules:L010]
|
||||
capitalisation_policy = consistent
|
||||
|
||||
[sqlfluff:templater:jinja:context]
|
||||
dataset=dataset
|
||||
gcp_project=gcp_project
|
||||
benchmark_user_map_project=project
|
||||
benchmark_user_map_dataset=summary
|
||||
benchmark_user_map_table=benchmark_user_map
|
||||
benchmark_summaries_project=project
|
||||
benchmark_summaries_dataset=summary
|
||||
benchmark_summaries_table=benchmark_summaries
|
||||
campaign_performance_project=project
|
||||
campaign_performance_dataset=summary
|
||||
campaign_performance_table=campaign_performance
|
||||
user_average_project=project
|
||||
user_average_dataset=summary
|
||||
user_average_table=average_user_performance
|
||||
@@ -1,246 +0,0 @@
|
||||
-- This query generated by script/generate_corr_queries.py and should probably not be
|
||||
-- modified manually. Instead, make changes to that script and rerun it.
|
||||
|
||||
WITH
|
||||
raw_effect_sizes AS (
|
||||
SELECT
|
||||
COUNT(1) AS campaign_count,
|
||||
state_user_v_peer_open
|
||||
,business_type
|
||||
|
||||
-- The following is the slope of the regression line. Note that CORR (which is the Pearson's correlation
|
||||
-- coefficient is symmetric in its arguments, but since STDDEV_POP(open_rate_su) appears in the
|
||||
-- numerator this is the slope of the regression line considering STDDEV_POP(open_rate_su) to be
|
||||
-- the "y variable" and uses_small_subject_line to be the "x variable" in terms of the regression line.
|
||||
,SAFE_DIVIDE(SAFE_MULTIPLY(CORR(open_rate_su, uses_small_subject_line), STDDEV_POP(open_rate_su)), STDDEV_POP(uses_small_subject_line)) AS open_uses_small_subject_line
|
||||
|
||||
-- The following is the slope of the regression line. Note that CORR (which is the Pearson's correlation
|
||||
-- coefficient is symmetric in its arguments, but since STDDEV_POP(open_rate_su) appears in the
|
||||
-- numerator this is the slope of the regression line considering STDDEV_POP(open_rate_su) to be
|
||||
-- the "y variable" and uses_personal_subject to be the "x variable" in terms of the regression line.
|
||||
,SAFE_DIVIDE(SAFE_MULTIPLY(CORR(open_rate_su, uses_personal_subject), STDDEV_POP(open_rate_su)), STDDEV_POP(uses_personal_subject)) AS open_uses_personal_subject
|
||||
|
||||
-- The following is the slope of the regression line. Note that CORR (which is the Pearson's correlation
|
||||
-- coefficient is symmetric in its arguments, but since STDDEV_POP(open_rate_su) appears in the
|
||||
-- numerator this is the slope of the regression line considering STDDEV_POP(open_rate_su) to be
|
||||
-- the "y variable" and uses_timewarp to be the "x variable" in terms of the regression line.
|
||||
,SAFE_DIVIDE(SAFE_MULTIPLY(CORR(open_rate_su, uses_timewarp), STDDEV_POP(open_rate_su)), STDDEV_POP(uses_timewarp)) AS open_uses_timewarp
|
||||
|
||||
-- The following is the slope of the regression line. Note that CORR (which is the Pearson's correlation
|
||||
-- coefficient is symmetric in its arguments, but since STDDEV_POP(open_rate_su) appears in the
|
||||
-- numerator this is the slope of the regression line considering STDDEV_POP(open_rate_su) to be
|
||||
-- the "y variable" and uses_small_preview to be the "x variable" in terms of the regression line.
|
||||
,SAFE_DIVIDE(SAFE_MULTIPLY(CORR(open_rate_su, uses_small_preview), STDDEV_POP(open_rate_su)), STDDEV_POP(uses_small_preview)) AS open_uses_small_preview
|
||||
|
||||
-- The following is the slope of the regression line. Note that CORR (which is the Pearson's correlation
|
||||
-- coefficient is symmetric in its arguments, but since STDDEV_POP(open_rate_su) appears in the
|
||||
-- numerator this is the slope of the regression line considering STDDEV_POP(open_rate_su) to be
|
||||
-- the "y variable" and uses_personal_to to be the "x variable" in terms of the regression line.
|
||||
,SAFE_DIVIDE(SAFE_MULTIPLY(CORR(open_rate_su, uses_personal_to), STDDEV_POP(open_rate_su)), STDDEV_POP(uses_personal_to)) AS open_uses_personal_to
|
||||
|
||||
-- The following is the slope of the regression line. Note that CORR (which is the Pearson's correlation
|
||||
-- coefficient is symmetric in its arguments, but since STDDEV_POP(open_rate_su) appears in the
|
||||
-- numerator this is the slope of the regression line considering STDDEV_POP(open_rate_su) to be
|
||||
-- the "y variable" and uses_ab_test_subject to be the "x variable" in terms of the regression line.
|
||||
,SAFE_DIVIDE(SAFE_MULTIPLY(CORR(open_rate_su, uses_ab_test_subject), STDDEV_POP(open_rate_su)), STDDEV_POP(uses_ab_test_subject)) AS open_uses_ab_test_subject
|
||||
|
||||
-- The following is the slope of the regression line. Note that CORR (which is the Pearson's correlation
|
||||
-- coefficient is symmetric in its arguments, but since STDDEV_POP(open_rate_su) appears in the
|
||||
-- numerator this is the slope of the regression line considering STDDEV_POP(open_rate_su) to be
|
||||
-- the "y variable" and uses_ab_test_content to be the "x variable" in terms of the regression line.
|
||||
,SAFE_DIVIDE(SAFE_MULTIPLY(CORR(open_rate_su, uses_ab_test_content), STDDEV_POP(open_rate_su)), STDDEV_POP(uses_ab_test_content)) AS open_uses_ab_test_content
|
||||
|
||||
-- The following is the slope of the regression line. Note that CORR (which is the Pearson's correlation
|
||||
-- coefficient is symmetric in its arguments, but since STDDEV_POP(open_rate_su) appears in the
|
||||
-- numerator this is the slope of the regression line considering STDDEV_POP(open_rate_su) to be
|
||||
-- the "y variable" and uses_preview_text to be the "x variable" in terms of the regression line.
|
||||
,SAFE_DIVIDE(SAFE_MULTIPLY(CORR(open_rate_su, uses_preview_text), STDDEV_POP(open_rate_su)), STDDEV_POP(uses_preview_text)) AS open_uses_preview_text
|
||||
|
||||
-- The following is the slope of the regression line. Note that CORR (which is the Pearson's correlation
|
||||
-- coefficient is symmetric in its arguments, but since STDDEV_POP(open_rate_su) appears in the
|
||||
-- numerator this is the slope of the regression line considering STDDEV_POP(open_rate_su) to be
|
||||
-- the "y variable" and uses_sto to be the "x variable" in terms of the regression line.
|
||||
,SAFE_DIVIDE(SAFE_MULTIPLY(CORR(open_rate_su, uses_sto), STDDEV_POP(open_rate_su)), STDDEV_POP(uses_sto)) AS open_uses_sto
|
||||
|
||||
-- The following is the slope of the regression line. Note that CORR (which is the Pearson's correlation
|
||||
-- coefficient is symmetric in its arguments, but since STDDEV_POP(open_rate_su) appears in the
|
||||
-- numerator this is the slope of the regression line considering STDDEV_POP(open_rate_su) to be
|
||||
-- the "y variable" and uses_freemail_from to be the "x variable" in terms of the regression line.
|
||||
,SAFE_DIVIDE(SAFE_MULTIPLY(CORR(open_rate_su, uses_freemail_from), STDDEV_POP(open_rate_su)), STDDEV_POP(uses_freemail_from)) AS open_uses_freemail_from
|
||||
|
||||
-- The following is the slope of the regression line. Note that CORR (which is the Pearson's correlation
|
||||
-- coefficient is symmetric in its arguments, but since STDDEV_POP(open_rate_su) appears in the
|
||||
-- numerator this is the slope of the regression line considering STDDEV_POP(open_rate_su) to be
|
||||
-- the "y variable" and uses_resend_non_openers to be the "x variable" in terms of the regression line.
|
||||
,SAFE_DIVIDE(SAFE_MULTIPLY(CORR(open_rate_su, uses_resend_non_openers), STDDEV_POP(open_rate_su)), STDDEV_POP(uses_resend_non_openers)) AS open_uses_resend_non_openers
|
||||
|
||||
-- The following is the slope of the regression line. Note that CORR (which is the Pearson's correlation
|
||||
-- coefficient is symmetric in its arguments, but since STDDEV_POP(open_rate_su) appears in the
|
||||
-- numerator this is the slope of the regression line considering STDDEV_POP(open_rate_su) to be
|
||||
-- the "y variable" and uses_promo_code to be the "x variable" in terms of the regression line.
|
||||
,SAFE_DIVIDE(SAFE_MULTIPLY(CORR(open_rate_su, uses_promo_code), STDDEV_POP(open_rate_su)), STDDEV_POP(uses_promo_code)) AS open_uses_promo_code
|
||||
|
||||
-- The following is the slope of the regression line. Note that CORR (which is the Pearson's correlation
|
||||
-- coefficient is symmetric in its arguments, but since STDDEV_POP(open_rate_su) appears in the
|
||||
-- numerator this is the slope of the regression line considering STDDEV_POP(open_rate_su) to be
|
||||
-- the "y variable" and uses_prex to be the "x variable" in terms of the regression line.
|
||||
,SAFE_DIVIDE(SAFE_MULTIPLY(CORR(open_rate_su, uses_prex), STDDEV_POP(open_rate_su)), STDDEV_POP(uses_prex)) AS open_uses_prex
|
||||
|
||||
-- The following is the slope of the regression line. Note that CORR (which is the Pearson's correlation
|
||||
-- coefficient is symmetric in its arguments, but since STDDEV_POP(open_rate_su) appears in the
|
||||
-- numerator this is the slope of the regression line considering STDDEV_POP(open_rate_su) to be
|
||||
-- the "y variable" and uses_ab_test_from to be the "x variable" in terms of the regression line.
|
||||
,SAFE_DIVIDE(SAFE_MULTIPLY(CORR(open_rate_su, uses_ab_test_from), STDDEV_POP(open_rate_su)), STDDEV_POP(uses_ab_test_from)) AS open_uses_ab_test_from
|
||||
|
||||
FROM
|
||||
`{{gcp_project}}.{{dataset}}.global_actions_states`
|
||||
GROUP BY
|
||||
state_user_v_peer_open
|
||||
,business_type),
|
||||
|
||||
imputed_effect_sizes AS (
|
||||
SELECT
|
||||
campaign_count,
|
||||
state_user_v_peer_open
|
||||
,business_type
|
||||
|
||||
-- We now impute the value of the effect size to 0 if it was NaN or NULL. This is to
|
||||
-- take into account states where all campaigns either did or did not perform an
|
||||
-- action. In these cases, we assume that campaign outcome is uncorrelated with
|
||||
-- the action because we do not have evidence otherwise.
|
||||
,COALESCE(IF(IS_NAN(open_uses_small_subject_line), 0, open_uses_small_subject_line), 0) AS open_uses_small_subject_line
|
||||
|
||||
-- We now impute the value of the effect size to 0 if it was NaN or NULL. This is to
|
||||
-- take into account states where all campaigns either did or did not perform an
|
||||
-- action. In these cases, we assume that campaign outcome is uncorrelated with
|
||||
-- the action because we do not have evidence otherwise.
|
||||
,COALESCE(IF(IS_NAN(open_uses_personal_subject), 0, open_uses_personal_subject), 0) AS open_uses_personal_subject
|
||||
|
||||
-- We now impute the value of the effect size to 0 if it was NaN or NULL. This is to
|
||||
-- take into account states where all campaigns either did or did not perform an
|
||||
-- action. In these cases, we assume that campaign outcome is uncorrelated with
|
||||
-- the action because we do not have evidence otherwise.
|
||||
,COALESCE(IF(IS_NAN(open_uses_timewarp), 0, open_uses_timewarp), 0) AS open_uses_timewarp
|
||||
|
||||
-- We now impute the value of the effect size to 0 if it was NaN or NULL. This is to
|
||||
-- take into account states where all campaigns either did or did not perform an
|
||||
-- action. In these cases, we assume that campaign outcome is uncorrelated with
|
||||
-- the action because we do not have evidence otherwise.
|
||||
,COALESCE(IF(IS_NAN(open_uses_small_preview), 0, open_uses_small_preview), 0) AS open_uses_small_preview
|
||||
|
||||
-- We now impute the value of the effect size to 0 if it was NaN or NULL. This is to
|
||||
-- take into account states where all campaigns either did or did not perform an
|
||||
-- action. In these cases, we assume that campaign outcome is uncorrelated with
|
||||
-- the action because we do not have evidence otherwise.
|
||||
,COALESCE(IF(IS_NAN(open_uses_personal_to), 0, open_uses_personal_to), 0) AS open_uses_personal_to
|
||||
|
||||
-- We now impute the value of the effect size to 0 if it was NaN or NULL. This is to
|
||||
-- take into account states where all campaigns either did or did not perform an
|
||||
-- action. In these cases, we assume that campaign outcome is uncorrelated with
|
||||
-- the action because we do not have evidence otherwise.
|
||||
,COALESCE(IF(IS_NAN(open_uses_ab_test_subject), 0, open_uses_ab_test_subject), 0) AS open_uses_ab_test_subject
|
||||
|
||||
-- We now impute the value of the effect size to 0 if it was NaN or NULL. This is to
|
||||
-- take into account states where all campaigns either did or did not perform an
|
||||
-- action. In these cases, we assume that campaign outcome is uncorrelated with
|
||||
-- the action because we do not have evidence otherwise.
|
||||
,COALESCE(IF(IS_NAN(open_uses_ab_test_content), 0, open_uses_ab_test_content), 0) AS open_uses_ab_test_content
|
||||
|
||||
-- We now impute the value of the effect size to 0 if it was NaN or NULL. This is to
|
||||
-- take into account states where all campaigns either did or did not perform an
|
||||
-- action. In these cases, we assume that campaign outcome is uncorrelated with
|
||||
-- the action because we do not have evidence otherwise.
|
||||
,COALESCE(IF(IS_NAN(open_uses_preview_text), 0, open_uses_preview_text), 0) AS open_uses_preview_text
|
||||
|
||||
-- We now impute the value of the effect size to 0 if it was NaN or NULL. This is to
|
||||
-- take into account states where all campaigns either did or did not perform an
|
||||
-- action. In these cases, we assume that campaign outcome is uncorrelated with
|
||||
-- the action because we do not have evidence otherwise.
|
||||
,COALESCE(IF(IS_NAN(open_uses_sto), 0, open_uses_sto), 0) AS open_uses_sto
|
||||
|
||||
-- We now impute the value of the effect size to 0 if it was NaN or NULL. This is to
|
||||
-- take into account states where all campaigns either did or did not perform an
|
||||
-- action. In these cases, we assume that campaign outcome is uncorrelated with
|
||||
-- the action because we do not have evidence otherwise.
|
||||
,COALESCE(IF(IS_NAN(open_uses_freemail_from), 0, open_uses_freemail_from), 0) AS open_uses_freemail_from
|
||||
|
||||
-- We now impute the value of the effect size to 0 if it was NaN or NULL. This is to
|
||||
-- take into account states where all campaigns either did or did not perform an
|
||||
-- action. In these cases, we assume that campaign outcome is uncorrelated with
|
||||
-- the action because we do not have evidence otherwise.
|
||||
,COALESCE(IF(IS_NAN(open_uses_resend_non_openers), 0, open_uses_resend_non_openers), 0) AS open_uses_resend_non_openers
|
||||
|
||||
-- We now impute the value of the effect size to 0 if it was NaN or NULL. This is to
|
||||
-- take into account states where all campaigns either did or did not perform an
|
||||
-- action. In these cases, we assume that campaign outcome is uncorrelated with
|
||||
-- the action because we do not have evidence otherwise.
|
||||
,COALESCE(IF(IS_NAN(open_uses_promo_code), 0, open_uses_promo_code), 0) AS open_uses_promo_code
|
||||
|
||||
-- We now impute the value of the effect size to 0 if it was NaN or NULL. This is to
|
||||
-- take into account states where all campaigns either did or did not perform an
|
||||
-- action. In these cases, we assume that campaign outcome is uncorrelated with
|
||||
-- the action because we do not have evidence otherwise.
|
||||
,COALESCE(IF(IS_NAN(open_uses_prex), 0, open_uses_prex), 0) AS open_uses_prex
|
||||
|
||||
-- We now impute the value of the effect size to 0 if it was NaN or NULL. This is to
|
||||
-- take into account states where all campaigns either did or did not perform an
|
||||
-- action. In these cases, we assume that campaign outcome is uncorrelated with
|
||||
-- the action because we do not have evidence otherwise.
|
||||
,COALESCE(IF(IS_NAN(open_uses_ab_test_from), 0, open_uses_ab_test_from), 0) AS open_uses_ab_test_from
|
||||
|
||||
FROM
|
||||
raw_effect_sizes
|
||||
),
|
||||
|
||||
action_states AS (
|
||||
SELECT
|
||||
has_used_small_subject_line
|
||||
,has_used_personal_subject
|
||||
,has_used_timewarp
|
||||
,has_used_small_preview
|
||||
,has_used_personal_to
|
||||
,has_used_ab_test_subject
|
||||
,has_used_ab_test_content
|
||||
,has_used_preview_text
|
||||
,has_used_sto
|
||||
,has_used_freemail_from
|
||||
,has_used_resend_non_openers
|
||||
,has_used_promo_code
|
||||
,has_used_prex
|
||||
,has_used_ab_test_from
|
||||
FROM `{{gcp_project}}.{{dataset}}.global_state_space`
|
||||
GROUP BY has_used_small_subject_line
|
||||
,has_used_personal_subject
|
||||
,has_used_timewarp
|
||||
,has_used_small_preview
|
||||
,has_used_personal_to
|
||||
,has_used_ab_test_subject
|
||||
,has_used_ab_test_content
|
||||
,has_used_preview_text
|
||||
,has_used_sto
|
||||
,has_used_freemail_from
|
||||
,has_used_resend_non_openers
|
||||
,has_used_promo_code
|
||||
,has_used_prex
|
||||
,has_used_ab_test_from)
|
||||
|
||||
SELECT
|
||||
imputed_effect_sizes.*,
|
||||
has_used_small_subject_line
|
||||
,has_used_personal_subject
|
||||
,has_used_timewarp
|
||||
,has_used_small_preview
|
||||
,has_used_personal_to
|
||||
,has_used_ab_test_subject
|
||||
,has_used_ab_test_content
|
||||
,has_used_preview_text
|
||||
,has_used_sto
|
||||
,has_used_freemail_from
|
||||
,has_used_resend_non_openers
|
||||
,has_used_promo_code
|
||||
,has_used_prex
|
||||
,has_used_ab_test_from
|
||||
FROM
|
||||
imputed_effect_sizes
|
||||
CROSS JOIN action_states
|
||||
ORDER BY campaign_count DESC
|
||||
@@ -1,366 +0,0 @@
|
||||
-- This query generated by script/generate_corr_queries.py and should probably not be
|
||||
-- modified manually. Instead, make changes to that script and rerun it.
|
||||
|
||||
WITH
|
||||
raw_effect_sizes AS (
|
||||
SELECT
|
||||
state_user_v_peer_open,
|
||||
business_type,
|
||||
COUNT(*) AS campaign_count,
|
||||
|
||||
-- The following is the slope of the regression line. Note that CORR (which is the Pearson's correlation
|
||||
-- coefficient is symmetric in its arguments, but since STDDEV_POP(open_rate_su) appears in the
|
||||
-- numerator this is the slope of the regression line considering STDDEV_POP(open_rate_su) to be
|
||||
-- the "y variable" and uses_small_subject_line to be the "x variable" in terms of the regression line.
|
||||
SAFE_DIVIDE(
|
||||
SAFE_MULTIPLY(
|
||||
CORR(open_rate_su, uses_small_subject_line),
|
||||
STDDEV_POP(open_rate_su)
|
||||
),
|
||||
STDDEV_POP(uses_small_subject_line)
|
||||
) AS open_uses_small_subject_line,
|
||||
|
||||
-- The following is the slope of the regression line. Note that CORR (which is the Pearson's correlation
|
||||
-- coefficient is symmetric in its arguments, but since STDDEV_POP(open_rate_su) appears in the
|
||||
-- numerator this is the slope of the regression line considering STDDEV_POP(open_rate_su) to be
|
||||
-- the "y variable" and uses_personal_subject to be the "x variable" in terms of the regression line.
|
||||
SAFE_DIVIDE(
|
||||
SAFE_MULTIPLY(
|
||||
CORR(open_rate_su, uses_personal_subject),
|
||||
STDDEV_POP(open_rate_su)
|
||||
),
|
||||
STDDEV_POP(uses_personal_subject)
|
||||
) AS open_uses_personal_subject,
|
||||
|
||||
-- The following is the slope of the regression line. Note that CORR (which is the Pearson's correlation
|
||||
-- coefficient is symmetric in its arguments, but since STDDEV_POP(open_rate_su) appears in the
|
||||
-- numerator this is the slope of the regression line considering STDDEV_POP(open_rate_su) to be
|
||||
-- the "y variable" and uses_timewarp to be the "x variable" in terms of the regression line.
|
||||
SAFE_DIVIDE(
|
||||
SAFE_MULTIPLY(
|
||||
CORR(open_rate_su, uses_timewarp), STDDEV_POP(open_rate_su)
|
||||
),
|
||||
STDDEV_POP(uses_timewarp)
|
||||
) AS open_uses_timewarp,
|
||||
|
||||
-- The following is the slope of the regression line. Note that CORR (which is the Pearson's correlation
|
||||
-- coefficient is symmetric in its arguments, but since STDDEV_POP(open_rate_su) appears in the
|
||||
-- numerator this is the slope of the regression line considering STDDEV_POP(open_rate_su) to be
|
||||
-- the "y variable" and uses_small_preview to be the "x variable" in terms of the regression line.
|
||||
SAFE_DIVIDE(
|
||||
SAFE_MULTIPLY(
|
||||
CORR(open_rate_su, uses_small_preview), STDDEV_POP(open_rate_su)
|
||||
),
|
||||
STDDEV_POP(uses_small_preview)
|
||||
) AS open_uses_small_preview,
|
||||
|
||||
-- The following is the slope of the regression line. Note that CORR (which is the Pearson's correlation
|
||||
-- coefficient is symmetric in its arguments, but since STDDEV_POP(open_rate_su) appears in the
|
||||
-- numerator this is the slope of the regression line considering STDDEV_POP(open_rate_su) to be
|
||||
-- the "y variable" and uses_personal_to to be the "x variable" in terms of the regression line.
|
||||
SAFE_DIVIDE(
|
||||
SAFE_MULTIPLY(
|
||||
CORR(open_rate_su, uses_personal_to), STDDEV_POP(open_rate_su)
|
||||
),
|
||||
STDDEV_POP(uses_personal_to)
|
||||
) AS open_uses_personal_to,
|
||||
|
||||
-- The following is the slope of the regression line. Note that CORR (which is the Pearson's correlation
|
||||
-- coefficient is symmetric in its arguments, but since STDDEV_POP(open_rate_su) appears in the
|
||||
-- numerator this is the slope of the regression line considering STDDEV_POP(open_rate_su) to be
|
||||
-- the "y variable" and uses_ab_test_subject to be the "x variable" in terms of the regression line.
|
||||
SAFE_DIVIDE(
|
||||
SAFE_MULTIPLY(
|
||||
CORR(open_rate_su, uses_ab_test_subject),
|
||||
STDDEV_POP(open_rate_su)
|
||||
),
|
||||
STDDEV_POP(uses_ab_test_subject)
|
||||
) AS open_uses_ab_test_subject,
|
||||
|
||||
-- The following is the slope of the regression line. Note that CORR (which is the Pearson's correlation
|
||||
-- coefficient is symmetric in its arguments, but since STDDEV_POP(open_rate_su) appears in the
|
||||
-- numerator this is the slope of the regression line considering STDDEV_POP(open_rate_su) to be
|
||||
-- the "y variable" and uses_ab_test_content to be the "x variable" in terms of the regression line.
|
||||
SAFE_DIVIDE(
|
||||
SAFE_MULTIPLY(
|
||||
CORR(open_rate_su, uses_ab_test_content),
|
||||
STDDEV_POP(open_rate_su)
|
||||
),
|
||||
STDDEV_POP(uses_ab_test_content)
|
||||
) AS open_uses_ab_test_content,
|
||||
|
||||
-- The following is the slope of the regression line. Note that CORR (which is the Pearson's correlation
|
||||
-- coefficient is symmetric in its arguments, but since STDDEV_POP(open_rate_su) appears in the
|
||||
-- numerator this is the slope of the regression line considering STDDEV_POP(open_rate_su) to be
|
||||
-- the "y variable" and uses_preview_text to be the "x variable" in terms of the regression line.
|
||||
SAFE_DIVIDE(
|
||||
SAFE_MULTIPLY(
|
||||
CORR(open_rate_su, uses_preview_text), STDDEV_POP(open_rate_su)
|
||||
),
|
||||
STDDEV_POP(uses_preview_text)
|
||||
) AS open_uses_preview_text,
|
||||
|
||||
-- The following is the slope of the regression line. Note that CORR (which is the Pearson's correlation
|
||||
-- coefficient is symmetric in its arguments, but since STDDEV_POP(open_rate_su) appears in the
|
||||
-- numerator this is the slope of the regression line considering STDDEV_POP(open_rate_su) to be
|
||||
-- the "y variable" and uses_sto to be the "x variable" in terms of the regression line.
|
||||
SAFE_DIVIDE(
|
||||
SAFE_MULTIPLY(
|
||||
CORR(open_rate_su, uses_sto), STDDEV_POP(open_rate_su)
|
||||
),
|
||||
STDDEV_POP(uses_sto)
|
||||
) AS open_uses_sto,
|
||||
|
||||
-- The following is the slope of the regression line. Note that CORR (which is the Pearson's correlation
|
||||
-- coefficient is symmetric in its arguments, but since STDDEV_POP(open_rate_su) appears in the
|
||||
-- numerator this is the slope of the regression line considering STDDEV_POP(open_rate_su) to be
|
||||
-- the "y variable" and uses_freemail_from to be the "x variable" in terms of the regression line.
|
||||
SAFE_DIVIDE(
|
||||
SAFE_MULTIPLY(
|
||||
CORR(open_rate_su, uses_freemail_from), STDDEV_POP(open_rate_su)
|
||||
),
|
||||
STDDEV_POP(uses_freemail_from)
|
||||
) AS open_uses_freemail_from,
|
||||
|
||||
-- The following is the slope of the regression line. Note that CORR (which is the Pearson's correlation
|
||||
-- coefficient is symmetric in its arguments, but since STDDEV_POP(open_rate_su) appears in the
|
||||
-- numerator this is the slope of the regression line considering STDDEV_POP(open_rate_su) to be
|
||||
-- the "y variable" and uses_resend_non_openers to be the "x variable" in terms of the regression line.
|
||||
SAFE_DIVIDE(
|
||||
SAFE_MULTIPLY(
|
||||
CORR(open_rate_su, uses_resend_non_openers),
|
||||
STDDEV_POP(open_rate_su)
|
||||
),
|
||||
STDDEV_POP(uses_resend_non_openers)
|
||||
) AS open_uses_resend_non_openers,
|
||||
|
||||
-- The following is the slope of the regression line. Note that CORR (which is the Pearson's correlation
|
||||
-- coefficient is symmetric in its arguments, but since STDDEV_POP(open_rate_su) appears in the
|
||||
-- numerator this is the slope of the regression line considering STDDEV_POP(open_rate_su) to be
|
||||
-- the "y variable" and uses_promo_code to be the "x variable" in terms of the regression line.
|
||||
SAFE_DIVIDE(
|
||||
SAFE_MULTIPLY(
|
||||
CORR(open_rate_su, uses_promo_code), STDDEV_POP(open_rate_su)
|
||||
),
|
||||
STDDEV_POP(uses_promo_code)
|
||||
) AS open_uses_promo_code,
|
||||
|
||||
-- The following is the slope of the regression line. Note that CORR (which is the Pearson's correlation
|
||||
-- coefficient is symmetric in its arguments, but since STDDEV_POP(open_rate_su) appears in the
|
||||
-- numerator this is the slope of the regression line considering STDDEV_POP(open_rate_su) to be
|
||||
-- the "y variable" and uses_prex to be the "x variable" in terms of the regression line.
|
||||
SAFE_DIVIDE(
|
||||
SAFE_MULTIPLY(
|
||||
CORR(open_rate_su, uses_prex), STDDEV_POP(open_rate_su)
|
||||
),
|
||||
STDDEV_POP(uses_prex)
|
||||
) AS open_uses_prex,
|
||||
|
||||
-- The following is the slope of the regression line. Note that CORR (which is the Pearson's correlation
|
||||
-- coefficient is symmetric in its arguments, but since STDDEV_POP(open_rate_su) appears in the
|
||||
-- numerator this is the slope of the regression line considering STDDEV_POP(open_rate_su) to be
|
||||
-- the "y variable" and uses_ab_test_from to be the "x variable" in terms of the regression line.
|
||||
SAFE_DIVIDE(
|
||||
SAFE_MULTIPLY(
|
||||
CORR(open_rate_su, uses_ab_test_from), STDDEV_POP(open_rate_su)
|
||||
),
|
||||
STDDEV_POP(uses_ab_test_from)
|
||||
) AS open_uses_ab_test_from
|
||||
|
||||
FROM
|
||||
`{{gcp_project}}.{{dataset}}.global_actions_states`
|
||||
GROUP BY
|
||||
state_user_v_peer_open,
|
||||
business_type),
|
||||
|
||||
imputed_effect_sizes AS (
|
||||
SELECT
|
||||
campaign_count,
|
||||
state_user_v_peer_open,
|
||||
business_type,
|
||||
|
||||
-- We now impute the value of the effect size to 0 if it was NaN or NULL. This is to
|
||||
-- take into account states where all campaigns either did or did not perform an
|
||||
-- action. In these cases, we assume that campaign outcome is uncorrelated with
|
||||
-- the action because we do not have evidence otherwise.
|
||||
COALESCE(
|
||||
IF(
|
||||
IS_NAN(open_uses_small_subject_line),
|
||||
0,
|
||||
open_uses_small_subject_line
|
||||
),
|
||||
0
|
||||
) AS open_uses_small_subject_line,
|
||||
|
||||
-- We now impute the value of the effect size to 0 if it was NaN or NULL. This is to
|
||||
-- take into account states where all campaigns either did or did not perform an
|
||||
-- action. In these cases, we assume that campaign outcome is uncorrelated with
|
||||
-- the action because we do not have evidence otherwise.
|
||||
COALESCE(
|
||||
IF(
|
||||
IS_NAN(open_uses_personal_subject),
|
||||
0,
|
||||
open_uses_personal_subject
|
||||
),
|
||||
0
|
||||
) AS open_uses_personal_subject,
|
||||
|
||||
-- We now impute the value of the effect size to 0 if it was NaN or NULL. This is to
|
||||
-- take into account states where all campaigns either did or did not perform an
|
||||
-- action. In these cases, we assume that campaign outcome is uncorrelated with
|
||||
-- the action because we do not have evidence otherwise.
|
||||
COALESCE(
|
||||
IF(IS_NAN(open_uses_timewarp), 0, open_uses_timewarp), 0
|
||||
) AS open_uses_timewarp,
|
||||
|
||||
-- We now impute the value of the effect size to 0 if it was NaN or NULL. This is to
|
||||
-- take into account states where all campaigns either did or did not perform an
|
||||
-- action. In these cases, we assume that campaign outcome is uncorrelated with
|
||||
-- the action because we do not have evidence otherwise.
|
||||
COALESCE(
|
||||
IF(IS_NAN(open_uses_small_preview), 0, open_uses_small_preview), 0
|
||||
) AS open_uses_small_preview,
|
||||
|
||||
-- We now impute the value of the effect size to 0 if it was NaN or NULL. This is to
|
||||
-- take into account states where all campaigns either did or did not perform an
|
||||
-- action. In these cases, we assume that campaign outcome is uncorrelated with
|
||||
-- the action because we do not have evidence otherwise.
|
||||
COALESCE(
|
||||
IF(IS_NAN(open_uses_personal_to), 0, open_uses_personal_to), 0
|
||||
) AS open_uses_personal_to,
|
||||
|
||||
-- We now impute the value of the effect size to 0 if it was NaN or NULL. This is to
|
||||
-- take into account states where all campaigns either did or did not perform an
|
||||
-- action. In these cases, we assume that campaign outcome is uncorrelated with
|
||||
-- the action because we do not have evidence otherwise.
|
||||
COALESCE(
|
||||
IF(IS_NAN(open_uses_ab_test_subject), 0, open_uses_ab_test_subject),
|
||||
0
|
||||
) AS open_uses_ab_test_subject,
|
||||
|
||||
-- We now impute the value of the effect size to 0 if it was NaN or NULL. This is to
|
||||
-- take into account states where all campaigns either did or did not perform an
|
||||
-- action. In these cases, we assume that campaign outcome is uncorrelated with
|
||||
-- the action because we do not have evidence otherwise.
|
||||
COALESCE(
|
||||
IF(IS_NAN(open_uses_ab_test_content), 0, open_uses_ab_test_content),
|
||||
0
|
||||
) AS open_uses_ab_test_content,
|
||||
|
||||
-- We now impute the value of the effect size to 0 if it was NaN or NULL. This is to
|
||||
-- take into account states where all campaigns either did or did not perform an
|
||||
-- action. In these cases, we assume that campaign outcome is uncorrelated with
|
||||
-- the action because we do not have evidence otherwise.
|
||||
COALESCE(
|
||||
IF(IS_NAN(open_uses_preview_text), 0, open_uses_preview_text), 0
|
||||
) AS open_uses_preview_text,
|
||||
|
||||
-- We now impute the value of the effect size to 0 if it was NaN or NULL. This is to
|
||||
-- take into account states where all campaigns either did or did not perform an
|
||||
-- action. In these cases, we assume that campaign outcome is uncorrelated with
|
||||
-- the action because we do not have evidence otherwise.
|
||||
COALESCE(
|
||||
IF(IS_NAN(open_uses_sto), 0, open_uses_sto), 0
|
||||
) AS open_uses_sto,
|
||||
|
||||
-- We now impute the value of the effect size to 0 if it was NaN or NULL. This is to
|
||||
-- take into account states where all campaigns either did or did not perform an
|
||||
-- action. In these cases, we assume that campaign outcome is uncorrelated with
|
||||
-- the action because we do not have evidence otherwise.
|
||||
COALESCE(
|
||||
IF(IS_NAN(open_uses_freemail_from), 0, open_uses_freemail_from), 0
|
||||
) AS open_uses_freemail_from,
|
||||
|
||||
-- We now impute the value of the effect size to 0 if it was NaN or NULL. This is to
|
||||
-- take into account states where all campaigns either did or did not perform an
|
||||
-- action. In these cases, we assume that campaign outcome is uncorrelated with
|
||||
-- the action because we do not have evidence otherwise.
|
||||
COALESCE(
|
||||
IF(
|
||||
IS_NAN(open_uses_resend_non_openers),
|
||||
0,
|
||||
open_uses_resend_non_openers
|
||||
),
|
||||
0
|
||||
) AS open_uses_resend_non_openers,
|
||||
|
||||
-- We now impute the value of the effect size to 0 if it was NaN or NULL. This is to
|
||||
-- take into account states where all campaigns either did or did not perform an
|
||||
-- action. In these cases, we assume that campaign outcome is uncorrelated with
|
||||
-- the action because we do not have evidence otherwise.
|
||||
COALESCE(
|
||||
IF(IS_NAN(open_uses_promo_code), 0, open_uses_promo_code), 0
|
||||
) AS open_uses_promo_code,
|
||||
|
||||
-- We now impute the value of the effect size to 0 if it was NaN or NULL. This is to
|
||||
-- take into account states where all campaigns either did or did not perform an
|
||||
-- action. In these cases, we assume that campaign outcome is uncorrelated with
|
||||
-- the action because we do not have evidence otherwise.
|
||||
COALESCE(
|
||||
IF(IS_NAN(open_uses_prex), 0, open_uses_prex), 0
|
||||
) AS open_uses_prex,
|
||||
|
||||
-- We now impute the value of the effect size to 0 if it was NaN or NULL. This is to
|
||||
-- take into account states where all campaigns either did or did not perform an
|
||||
-- action. In these cases, we assume that campaign outcome is uncorrelated with
|
||||
-- the action because we do not have evidence otherwise.
|
||||
COALESCE(
|
||||
IF(IS_NAN(open_uses_ab_test_from), 0, open_uses_ab_test_from), 0
|
||||
) AS open_uses_ab_test_from
|
||||
|
||||
FROM
|
||||
raw_effect_sizes
|
||||
),
|
||||
|
||||
action_states AS (
|
||||
SELECT
|
||||
has_used_small_subject_line,
|
||||
has_used_personal_subject,
|
||||
has_used_timewarp,
|
||||
has_used_small_preview,
|
||||
has_used_personal_to,
|
||||
has_used_ab_test_subject,
|
||||
has_used_ab_test_content,
|
||||
has_used_preview_text,
|
||||
has_used_sto,
|
||||
has_used_freemail_from,
|
||||
has_used_resend_non_openers,
|
||||
has_used_promo_code,
|
||||
has_used_prex,
|
||||
has_used_ab_test_from
|
||||
FROM `{{gcp_project}}.{{dataset}}.global_state_space`
|
||||
GROUP BY has_used_small_subject_line,
|
||||
has_used_personal_subject,
|
||||
has_used_timewarp,
|
||||
has_used_small_preview,
|
||||
has_used_personal_to,
|
||||
has_used_ab_test_subject,
|
||||
has_used_ab_test_content,
|
||||
has_used_preview_text,
|
||||
has_used_sto,
|
||||
has_used_freemail_from,
|
||||
has_used_resend_non_openers,
|
||||
has_used_promo_code,
|
||||
has_used_prex,
|
||||
has_used_ab_test_from)
|
||||
|
||||
SELECT
|
||||
imputed_effect_sizes.*,
|
||||
has_used_small_subject_line,
|
||||
has_used_personal_subject,
|
||||
has_used_timewarp,
|
||||
has_used_small_preview,
|
||||
has_used_personal_to,
|
||||
has_used_ab_test_subject,
|
||||
has_used_ab_test_content,
|
||||
has_used_preview_text,
|
||||
has_used_sto,
|
||||
has_used_freemail_from,
|
||||
has_used_resend_non_openers,
|
||||
has_used_promo_code,
|
||||
has_used_prex,
|
||||
has_used_ab_test_from
|
||||
FROM
|
||||
imputed_effect_sizes
|
||||
CROSS JOIN action_states
|
||||
ORDER BY campaign_count DESC
|
||||
@@ -1,8 +0,0 @@
|
||||
benchmarks:
|
||||
- name: 004_L003_indentation_3
|
||||
cmd: ['sqlfluff', 'parse', '--dialect=ansi', '--bench', 'test/fixtures/linter/autofix/ansi/004_indentation/before.sql']
|
||||
- name: B_001_package
|
||||
cmd: ['sqlfluff', 'parse', '--dialect=ansi', '--bench', 'benchmarks/bench_001_package.sql']
|
||||
- name: B_002_pearson
|
||||
cmd: ['sqlfluff', 'fix', '--dialect=ansi', '-f', '--bench',
|
||||
'--fixed-suffix', '_fix', 'benchmarks/bench_002/bench_002_pearson.sql']
|
||||
@@ -1,2 +0,0 @@
|
||||
dbt-core~=1.1.0
|
||||
dbt-postgres~=1.1.0
|
||||
@@ -1,2 +0,0 @@
|
||||
dbt-core~=1.2.0
|
||||
dbt-postgres~=1.2.0
|
||||
@@ -1,2 +0,0 @@
|
||||
dbt-core~=1.3.0
|
||||
dbt-postgres~=1.3.0
|
||||
@@ -1,2 +1,2 @@
|
||||
dbt-core~=1.4.0
|
||||
dbt-postgres~=1.4.0
|
||||
dbt-core~=1.4.1
|
||||
dbt-postgres~=1.4.1
|
||||
|
||||
@@ -1,3 +1,2 @@
|
||||
dbt-core~=1.5.0
|
||||
dbt-postgres~=1.5.0
|
||||
markupsafe<=2.0.1
|
||||
|
||||
2
constraints/dbt160.txt
Normal file
2
constraints/dbt160.txt
Normal file
@@ -0,0 +1,2 @@
|
||||
dbt-core~=1.6.0
|
||||
dbt-postgres~=1.6.0
|
||||
2
constraints/dbt170.txt
Normal file
2
constraints/dbt170.txt
Normal file
@@ -0,0 +1,2 @@
|
||||
dbt-core~=1.7.0
|
||||
dbt-postgres~=1.7.0
|
||||
2
constraints/dbt180-winpy.txt
Normal file
2
constraints/dbt180-winpy.txt
Normal file
@@ -0,0 +1,2 @@
|
||||
dbt-core~=1.8.0
|
||||
dbt-postgres~=1.8.0
|
||||
2
constraints/dbt180.txt
Normal file
2
constraints/dbt180.txt
Normal file
@@ -0,0 +1,2 @@
|
||||
dbt-core~=1.8.0
|
||||
dbt-postgres~=1.8.0
|
||||
2
constraints/dbt190-winpy.txt
Normal file
2
constraints/dbt190-winpy.txt
Normal file
@@ -0,0 +1,2 @@
|
||||
dbt-core~=1.9.0b1
|
||||
dbt-postgres~=1.9.0b1
|
||||
2
constraints/dbt190.txt
Normal file
2
constraints/dbt190.txt
Normal file
@@ -0,0 +1,2 @@
|
||||
dbt-core~=1.9.0b1
|
||||
dbt-postgres~=1.9.0b1
|
||||
@@ -20,5 +20,5 @@ help:
|
||||
# Catch-all target: route all unknown targets to Sphinx using the new
|
||||
# "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS).
|
||||
%: Makefile
|
||||
python generate-rule-docs.py
|
||||
python generate-auto-docs.py
|
||||
@$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O)
|
||||
|
||||
@@ -1,8 +1,24 @@
|
||||
"""Generate rule documentation automatically."""
|
||||
"""Generate some documentation automatically.
|
||||
|
||||
This script generates partial documentation sections (i.e. the content of
|
||||
`/docs/source/_partials/`) by importing SQLFluff and extracting data about
|
||||
rules and dialects.
|
||||
|
||||
It should run before every docs generation so that those partial .rst files
|
||||
can then be correctly referenced by other sections of the docs. For example
|
||||
this file builds the file `/docs/source/_partials/rule_summaries.rst`, which
|
||||
is then inserted into `/docs/source/reference/rules.rst` using the directive
|
||||
`.. include:: ../_partials/rule_summaries.rst`.
|
||||
|
||||
This script is referenced in the `Makefile` and the `make.bat` file to ensure
|
||||
it is run at the appropriate moment.
|
||||
"""
|
||||
|
||||
import json
|
||||
from collections import defaultdict
|
||||
from pathlib import Path
|
||||
|
||||
import sqlfluff
|
||||
from sqlfluff.core.plugin.host import get_plugin_manager
|
||||
|
||||
base_path = Path(__file__).parent.absolute()
|
||||
@@ -27,14 +43,21 @@ table_header = f"""
|
||||
# Extract all the rules.
|
||||
print("Rule Docs Generation: Reading Rules...")
|
||||
rule_bundles = defaultdict(list)
|
||||
rule_list = []
|
||||
for plugin_rules in get_plugin_manager().hook.get_rules():
|
||||
for rule in plugin_rules:
|
||||
_bundle_name = rule.name.split(".")[0]
|
||||
rule_bundles[_bundle_name].append(rule)
|
||||
rule_list.append((rule.code, rule.name))
|
||||
|
||||
# Write them into a json file for use by redirects.
|
||||
print("Rule Docs Generation: Writing Rule JSON...")
|
||||
with open(base_path / "source/_partials/rule_list.json", "w", encoding="utf8") as f:
|
||||
json.dump(rule_list, f)
|
||||
|
||||
# Write them into the table. Bundle by bundle.
|
||||
print("Rule Docs Generation: Writing Rule Table...")
|
||||
with open(base_path / "source/partials/rule_table.rst", "w", encoding="utf8") as f:
|
||||
with open(base_path / "source/_partials/rule_table.rst", "w", encoding="utf8") as f:
|
||||
f.write(autogen_header)
|
||||
f.write(table_header)
|
||||
for bundle in sorted(rule_bundles.keys()):
|
||||
@@ -74,7 +97,7 @@ with open(base_path / "source/partials/rule_table.rst", "w", encoding="utf8") as
|
||||
|
||||
# Write each of the summary files.
|
||||
print("Rule Docs Generation: Writing Rule Summaries...")
|
||||
with open(base_path / "source/partials/rule_summaries.rst", "w", encoding="utf8") as f:
|
||||
with open(base_path / "source/_partials/rule_summaries.rst", "w", encoding="utf8") as f:
|
||||
f.write(autogen_header)
|
||||
for bundle in sorted(rule_bundles.keys()):
|
||||
if "sql" in bundle:
|
||||
@@ -102,3 +125,31 @@ with open(base_path / "source/partials/rule_summaries.rst", "w", encoding="utf8"
|
||||
f.write("\n\n")
|
||||
|
||||
print("Rule Docs Generation: Done")
|
||||
|
||||
# Extract all the dialects.
|
||||
print("Dialect Docs Generation: Reading Dialects...")
|
||||
# We make a dictionary of all of them first, because we want to force the ANSI
|
||||
# one to be first.
|
||||
dialect_dict = {dialect.label: dialect for dialect in sqlfluff.list_dialects()}
|
||||
dialect_list = [dialect_dict["ansi"]] + [
|
||||
dialect for dialect_name, dialect in dialect_dict.items() if dialect_name != "ansi"
|
||||
]
|
||||
|
||||
# Write each of the summary files.
|
||||
print("Dialect Docs Generation: Writing Dialect Summaries...")
|
||||
with open(
|
||||
base_path / "source/_partials/dialect_summaries.rst", "w", encoding="utf8"
|
||||
) as f:
|
||||
f.write(autogen_header)
|
||||
for dialect in dialect_list:
|
||||
f.write(
|
||||
f".. _{dialect.label}_dialect_ref:\n\n"
|
||||
f"{dialect.name}\n{'-' * len(dialect.name)}\n\n"
|
||||
f"**Label**: ``{dialect.label}``\n\n"
|
||||
)
|
||||
if dialect.label != "ansi":
|
||||
f.write(
|
||||
f"**Inherits from**: :ref:`{dialect.inherits_from}_dialect_ref`\n\n"
|
||||
)
|
||||
if dialect.docstring:
|
||||
f.write(dialect.docstring + "\n\n")
|
||||
@@ -25,8 +25,8 @@ if errorlevel 9009 (
|
||||
exit /b 1
|
||||
)
|
||||
|
||||
REM Generate the rule docs
|
||||
py generate-rule-docs.py
|
||||
REM Generate the rule & dialect docs
|
||||
python generate-auto-docs.py
|
||||
|
||||
%SPHINXBUILD% -M %1 %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O%
|
||||
goto end
|
||||
|
||||
@@ -1,8 +1,8 @@
|
||||
"""The sqlfluff domain for documenting rules."""
|
||||
|
||||
from sphinx import addnodes
|
||||
from sphinx.domains import Domain, ObjType
|
||||
from sphinx.directives import ObjectDescription
|
||||
from sphinx.domains import Domain, ObjType
|
||||
from sphinx.roles import XRefRole
|
||||
from sphinx.util.nodes import make_refnode
|
||||
|
||||
@@ -15,10 +15,18 @@ class SQLFluffRule(ObjectDescription):
|
||||
.. code-block:: rst
|
||||
|
||||
.. sqlfluff:rule:: AM01
|
||||
ambiguous.distinct
|
||||
ambiguous.distinct
|
||||
|
||||
Write the documentation for the rule here.
|
||||
|
||||
To cross reference (i.e. refer to) objects defined like this
|
||||
both the code and name reference is available:
|
||||
|
||||
.. code-block:: rst
|
||||
|
||||
:sqlfluff:ref:`CP02`
|
||||
:sqlfluff:ref:`capitalisation.identifiers`
|
||||
|
||||
"""
|
||||
|
||||
def handle_signature(self, sig, signode):
|
||||
|
||||
4
docs/source/_partials/.gitignore
vendored
Normal file
4
docs/source/_partials/.gitignore
vendored
Normal file
@@ -0,0 +1,4 @@
|
||||
rule_table.rst
|
||||
rule_summaries.rst
|
||||
rule_list.json
|
||||
dialect_summaries.rst
|
||||
@@ -6,4 +6,4 @@ Some of those files are also auto-generated by scripts,
|
||||
in which case they should be included in the `.gitignore`
|
||||
and not edited by hand.
|
||||
|
||||
See docs/generate-rule-docs.py for more info.
|
||||
See [generate-auto-docs.py](https://github.com/sqlfluff/sqlfluff/blob/main/docs/generate-auto-docs.py) for more info.
|
||||
@@ -1,6 +1,6 @@
|
||||
[sqlfluff]
|
||||
|
||||
# Supported dialects https://docs.sqlfluff.com/en/stable/dialects.html
|
||||
# Supported dialects https://docs.sqlfluff.com/en/stable/perma/dialects.html
|
||||
# Or run 'sqlfluff dialects'
|
||||
dialect = snowflake
|
||||
|
||||
@@ -8,7 +8,7 @@ dialect = snowflake
|
||||
templater = jinja
|
||||
|
||||
# Comma separated list of rules to exclude, or None
|
||||
# See https://docs.sqlfluff.com/en/stable/configuration.html#enabling-and-disabling-rules
|
||||
# See https://docs.sqlfluff.com/en/stable/perma/rule_disabling.html
|
||||
# AM04 (ambiguous.column_count) and ST06 (structure.column_order) are
|
||||
# two of the more controversial rules included to illustrate usage.
|
||||
exclude_rules = ambiguous.column_count, structure.column_order
|
||||
@@ -34,16 +34,8 @@ project_dir = ./
|
||||
[sqlfluff:indentation]
|
||||
# While implicit indents are not enabled by default. Many of the
|
||||
# SQLFluff maintainers do use them in their projects.
|
||||
allow_implicit_indents = true
|
||||
allow_implicit_indents = True
|
||||
|
||||
# The default configuration for aliasing rules is "consistent"
|
||||
# which will auto-detect the setting from the rest of the file. This
|
||||
# is less desirable in a new project and you may find this (slightly
|
||||
# more strict) setting more useful.
|
||||
[sqlfluff:rules:aliasing.table]
|
||||
aliasing = explicit
|
||||
[sqlfluff:rules:aliasing.column]
|
||||
aliasing = explicit
|
||||
[sqlfluff:rules:aliasing.length]
|
||||
min_alias_length = 3
|
||||
|
||||
@@ -59,10 +51,18 @@ min_alias_length = 3
|
||||
[sqlfluff:rules:capitalisation.keywords]
|
||||
capitalisation_policy = lower
|
||||
[sqlfluff:rules:capitalisation.identifiers]
|
||||
capitalisation_policy = lower
|
||||
extended_capitalisation_policy = lower
|
||||
[sqlfluff:rules:capitalisation.functions]
|
||||
extended_capitalisation_policy = lower
|
||||
[sqlfluff:rules:capitalisation.literals]
|
||||
capitalisation_policy = lower
|
||||
[sqlfluff:rules:capitalisation.types]
|
||||
extended_capitalisation_policy = lower
|
||||
|
||||
# The default configuration for the not equal convention rule is "consistent"
|
||||
# which will auto-detect the setting from the rest of the file. This
|
||||
# is less desirable in a new project and you may find this (slightly
|
||||
# more strict) setting more useful.
|
||||
[sqlfluff:rules:convention.not_equal]
|
||||
# Default to preferring the "c_style" (i.e. `!=`)
|
||||
preferred_not_equal_style = c_style
|
||||
@@ -5,9 +5,15 @@ list see the documentation:
|
||||
https://www.sphinx-doc.org/en/master/usage/configuration.html
|
||||
"""
|
||||
|
||||
import json
|
||||
import os
|
||||
import sys
|
||||
import configparser
|
||||
|
||||
# tomllib is only in the stdlib from 3.11+
|
||||
if sys.version_info >= (3, 11):
|
||||
import tomllib
|
||||
else: # pragma: no cover
|
||||
import toml as tomllib
|
||||
|
||||
# -- Path setup --------------------------------------------------------------
|
||||
|
||||
@@ -19,14 +25,14 @@ sys.path.append(os.path.abspath("./_ext"))
|
||||
|
||||
# Get the global config info as currently stated
|
||||
# (we use the config file to avoid actually loading any python here)
|
||||
config = configparser.ConfigParser()
|
||||
config.read(["../../setup.cfg"])
|
||||
stable_version = config.get("sqlfluff_docs", "stable_version")
|
||||
with open("../../pyproject.toml", "rb") as config_file:
|
||||
config = tomllib.load(config_file)
|
||||
stable_version = config.get("tool.sqlfluff_docs", "stable_version")
|
||||
|
||||
# -- Project information -----------------------------------------------------
|
||||
|
||||
project = "SQLFluff"
|
||||
copyright = "2023, Alan Cruickshank"
|
||||
copyright = "2024, Alan Cruickshank"
|
||||
author = "Alan Cruickshank"
|
||||
|
||||
# The full version, including alpha/beta/rc tags
|
||||
@@ -60,7 +66,7 @@ templates_path = ["_templates"]
|
||||
exclude_patterns = [
|
||||
# Exclude the partials folder, which is made up of files intended
|
||||
# to be included in others.
|
||||
"partials",
|
||||
"_partials",
|
||||
]
|
||||
|
||||
# Master doc
|
||||
@@ -94,24 +100,76 @@ html_theme_options = {
|
||||
"touch_icon": "images/sqlfluff-sm2-sq.png",
|
||||
"github_user": "sqlfluff",
|
||||
"github_repo": "sqlfluff",
|
||||
# GitHub Fork button
|
||||
"github_banner": True,
|
||||
# GitHub Fork button (points at a broken link, so disabling it)
|
||||
"github_banner": False,
|
||||
# GitHub star button
|
||||
"github_type": "star",
|
||||
# Use `"true"` instead of `True` for counting GitHub star, see https://ghbtns.com
|
||||
"github_count": "true",
|
||||
# Codecov button
|
||||
"codecov_button": True,
|
||||
}
|
||||
|
||||
# -- Options for redirects ---------------------------------------------
|
||||
# https://documatt.gitlab.io/sphinx-reredirects/usage.html
|
||||
|
||||
# Load the rule lists to generate rule permalinks
|
||||
with open("_partials/rule_list.json", "r") as rule_file:
|
||||
rule_list = json.load(rule_file)
|
||||
|
||||
redirects = {
|
||||
# There's an old link to /indentation in config files.
|
||||
# That should point to the layout section now.
|
||||
"indentation": "layout.html#configuring-indent-locations",
|
||||
"architecture": "internals.html#architecture",
|
||||
# Where there are references to the docs in any of the codebase (whether in
|
||||
# places like the README or in error messages), they should all reference
|
||||
# a perma link (to redirect). This ensures we can support a consistent
|
||||
# link location even if the docs move around.
|
||||
"perma/layout": "../configuration/layout.html",
|
||||
"perma/indent_locations": (
|
||||
"../configuration/layout.html#configuring-indent-locations"
|
||||
),
|
||||
"perma/hanging_indents": "../configuration/layout.html#hanging-indents",
|
||||
"perma/layout_spacing": (
|
||||
"../configuration/layout.html#configuring-layout-and-spacing"
|
||||
),
|
||||
"perma/configuration": "../configuration/index.html",
|
||||
"perma/dbt": "../configuration/templating/dbt.html",
|
||||
"perma/cli": "../reference/cli.html",
|
||||
"perma/rules": "../reference/rules.html",
|
||||
"perma/dialects": "../reference/dialects.html",
|
||||
"perma/architecture": "../guides/contributing/architecture.html",
|
||||
"perma/rule_disabling": (
|
||||
"../configuration/rule_configuration.html#enabling-and-disabling-rules"
|
||||
),
|
||||
"perma/internals": "../reference/internals/index.html",
|
||||
"perma/modularity": "../why_sqlfluff.html#modularity",
|
||||
"perma/indentation": "../configuration/layout.html#configuring-indent-locations",
|
||||
"perma/releasenotes": "../reference/releasenotes.html",
|
||||
"perma/why": "../why_sqlfluff.html",
|
||||
"perma/plugin_dev": "../guides/contributing/plugins.html",
|
||||
"perma/plugin_guide": "../guides/setup/developing_custom_rules.html",
|
||||
"perma/variables": "../configuration/templating/index.html",
|
||||
"perma/python_templating": "../configuration/templating/python.html",
|
||||
"perma/guides": "../guides/index.html",
|
||||
"perma/contribute_dialect_keywords": (
|
||||
"../guides/contributing/dialect.html#dialect_keywords"
|
||||
),
|
||||
# Add permalinks for rule codes
|
||||
**{
|
||||
f"perma/rule/{code}": (
|
||||
f"../../reference/rules.html#sqlfluff.rules.sphinx.Rule_{code}"
|
||||
)
|
||||
for code, _ in rule_list
|
||||
},
|
||||
# These are legacy links which used to exist in different parts of the
|
||||
# SQLFluff code base, and which we continue to support so those links
|
||||
# aren't dead ends. They should redirect to permalinks.
|
||||
"indentation": "perma/indentation.html",
|
||||
"architecture": "perma/architecture.html",
|
||||
"dialects": "perma/dialects.html",
|
||||
"internals": "perma/internals.html",
|
||||
"layout": "perma/layout.html",
|
||||
"releasenotes": "perma/releasenotes.html",
|
||||
"realworld": "perma/why.html",
|
||||
# This is a legacy link to support older versions of the VSCode plugin.
|
||||
# https://github.com/sqlfluff/vscode-sqlfluff/blob/master/src/features/providers/linter/actions/hover.ts
|
||||
"rules": "perma/rules.html",
|
||||
}
|
||||
|
||||
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
36
docs/source/configuration/default_configuration.rst
Normal file
36
docs/source/configuration/default_configuration.rst
Normal file
@@ -0,0 +1,36 @@
|
||||
.. _defaultconfig:
|
||||
|
||||
Default Configuration
|
||||
---------------------
|
||||
|
||||
The default configuration is as follows, note the :ref:`builtin_jinja_blocks`
|
||||
in section :code:`[sqlfluff:templater:jinja:macros]` as referred to above.
|
||||
|
||||
.. note::
|
||||
|
||||
This shows the *entire* default config. **We do not recommend that users**
|
||||
**copy this whole config as the starter config file for their project**.
|
||||
|
||||
This is for two reasons:
|
||||
|
||||
#. The config file should act as a form of *documentation* for your team.
|
||||
A record of what decisions you've made which govern how your format your
|
||||
sql. By having a more concise config file, and only defining config settings
|
||||
where they differ from the defaults - you are more clearly stating to your
|
||||
team what choices you've made.
|
||||
|
||||
#. As the project evolves, the structure of the config file may change
|
||||
and we will attempt to make changes as backward compatible as possible.
|
||||
If you have not overridden a config setting in your project, we can
|
||||
easily update the default config to match your expected behaviour over time.
|
||||
We may also find issues with the default config which we can also fix
|
||||
in the background. *However*, the longer your local config file, the
|
||||
more work it will be to update and migrate your config file between
|
||||
major versions.
|
||||
|
||||
If you are starting a fresh project and are looking for a good *starter config*,
|
||||
check out the :ref:`starter_config` section above.
|
||||
|
||||
|
||||
.. literalinclude:: ../../../src/sqlfluff/core/default_config.cfg
|
||||
:language: cfg
|
||||
100
docs/source/configuration/ignoring_configuration.rst
Normal file
100
docs/source/configuration/ignoring_configuration.rst
Normal file
@@ -0,0 +1,100 @@
|
||||
.. _ignoreconfig:
|
||||
|
||||
Ignoring Errors & Files
|
||||
-----------------------
|
||||
|
||||
.. _inline_ignoring_errors:
|
||||
|
||||
Ignoring individual lines
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
Similar to `flake8's ignore`_, individual lines can be ignored by adding
|
||||
:code:`-- noqa` to the end of the line. Additionally, specific rules can
|
||||
be ignored by quoting their code or the category.
|
||||
|
||||
.. code-block:: sql
|
||||
|
||||
-- Ignore all errors
|
||||
SeLeCt 1 from tBl ; -- noqa
|
||||
|
||||
-- Ignore rule CP02 & rule CP03
|
||||
SeLeCt 1 from tBl ; -- noqa: CP02,CP03
|
||||
|
||||
-- Ignore all parsing errors
|
||||
SeLeCt from tBl ; -- noqa: PRS
|
||||
|
||||
.. note::
|
||||
It should be noted that ignoring ``TMP`` and ``PRS`` errors can lead to
|
||||
incorrect ``sqlfluff lint`` and ``sqfluff fix`` results as `SQLFluff` can
|
||||
misinterpret the SQL being analysed.
|
||||
|
||||
.. _`flake8's ignore`: https://flake8.pycqa.org/en/3.1.1/user/ignoring-errors.html#in-line-ignoring-errors
|
||||
|
||||
.. _inline_ignoring_ranges:
|
||||
|
||||
Ignoring line ranges
|
||||
^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
Similar to `pylint's "pylint" directive"`_, ranges of lines can be ignored by
|
||||
adding :code:`-- noqa:disable=<rule>[,...] | all` to the line. Following this
|
||||
directive, specified rules (or all rules, if "all" was specified) will be
|
||||
ignored until a corresponding `-- noqa:enable=<rule>[,...] | all` directive.
|
||||
|
||||
.. code-block:: sql
|
||||
|
||||
-- Ignore rule AL02 from this line forward
|
||||
SELECT col_a a FROM foo -- noqa: disable=AL02
|
||||
|
||||
-- Ignore all rules from this line forward
|
||||
SELECT col_a a FROM foo -- noqa: disable=all
|
||||
|
||||
-- Enforce all rules from this line forward
|
||||
SELECT col_a a FROM foo -- noqa: enable=all
|
||||
|
||||
|
||||
.. _`pylint's "pylint" directive"`: http://pylint.pycqa.org/en/latest/user_guide/message-control.html
|
||||
|
||||
.. _sqlfluffignore:
|
||||
|
||||
:code:`.sqlfluffignore`
|
||||
^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
Similar to `Git's`_ :code:`.gitignore` and `Docker's`_ :code:`.dockerignore`,
|
||||
SQLFluff supports a :ref:`sqlfluffignore` file to control which files are and
|
||||
aren't linted. Under the hood we use the python `pathspec library`_ which also
|
||||
has a brief tutorial in their documentation.
|
||||
|
||||
An example of a potential :ref:`sqlfluffignore` placed in the root of your
|
||||
project would be:
|
||||
|
||||
.. code-block:: cfg
|
||||
|
||||
# Comments start with a hash.
|
||||
|
||||
# Ignore anything in the "temp" path
|
||||
/temp/
|
||||
|
||||
# Ignore anything called "testing.sql"
|
||||
testing.sql
|
||||
|
||||
# Ignore any ".tsql" files
|
||||
*.tsql
|
||||
|
||||
Ignore files can also be placed in subdirectories of a path which is being
|
||||
linted and the sub files will also be applied within that subdirectory.
|
||||
|
||||
|
||||
.. _`Git's`: https://git-scm.com/docs/gitignore#_pattern_format
|
||||
.. _`Docker's`: https://docs.docker.com/engine/reference/builder/#dockerignore-file
|
||||
.. _`pathspec library`: https://python-path-specification.readthedocs.io/
|
||||
|
||||
Ignoring types of errors
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
General *categories* of errors can be ignored using the ``--ignore`` command
|
||||
line option or the ``ignore`` setting in :ref:`sqlfluffignore`. Types of errors
|
||||
that can be ignored include:
|
||||
|
||||
* :code:`lexing`
|
||||
* :code:`linting`
|
||||
* :code:`parsing`
|
||||
* :code:`templating`
|
||||
14
docs/source/configuration/index.rst
Normal file
14
docs/source/configuration/index.rst
Normal file
@@ -0,0 +1,14 @@
|
||||
.. _config:
|
||||
|
||||
Configuration
|
||||
=============
|
||||
|
||||
.. toctree::
|
||||
:maxdepth: 2
|
||||
|
||||
setting_configuration
|
||||
rule_configuration
|
||||
layout
|
||||
templating/index
|
||||
ignoring_configuration
|
||||
default_configuration
|
||||
@@ -1,7 +1,7 @@
|
||||
.. _layoutref:
|
||||
|
||||
Let's talk about whitespace
|
||||
===========================
|
||||
Layout & Whitespace Configuration
|
||||
=================================
|
||||
|
||||
If there is one part of building a linter that is going to be controversial
|
||||
it's going to be **whitespace** (closely followed by **cApiTaLiSaTiOn** 😁).
|
||||
173
docs/source/configuration/rule_configuration.rst
Normal file
173
docs/source/configuration/rule_configuration.rst
Normal file
@@ -0,0 +1,173 @@
|
||||
.. _ruleconfig:
|
||||
|
||||
Rule Configuration
|
||||
------------------
|
||||
|
||||
Rules can be configured with the :code:`.sqlfluff` config files.
|
||||
|
||||
Common rule configurations can be set in the :code:`[sqlfluff:rules]` section.
|
||||
|
||||
For example:
|
||||
|
||||
.. code-block:: cfg
|
||||
|
||||
[sqlfluff:rules]
|
||||
allow_scalar = True
|
||||
single_table_references = consistent
|
||||
unquoted_identifiers_policy = all
|
||||
|
||||
Rule specific configurations are set in rule specific subsections.
|
||||
|
||||
For example, enforce that keywords are upper case by configuring the rule
|
||||
:sqlfluff:ref:`CP01`:
|
||||
|
||||
.. code-block:: cfg
|
||||
|
||||
[sqlfluff:rules:capitalisation.keywords]
|
||||
# Keywords
|
||||
capitalisation_policy = upper
|
||||
|
||||
All possible options for rule sections are documented in :ref:`ruleref`.
|
||||
|
||||
For an overview of the most common rule configurations that you may want to
|
||||
tweak, see :ref:`defaultconfig` (and use :ref:`ruleref` to find the
|
||||
available alternatives).
|
||||
|
||||
.. _ruleselection:
|
||||
|
||||
Enabling and Disabling Rules
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
The decision as to which rules are applied to a given file is applied on a file
|
||||
by file basis, by the effective configuration for that file. There are two
|
||||
configuration values which you can use to set this:
|
||||
|
||||
* :code:`rules`, which explicitly *enables* the specified rules. If this
|
||||
parameter is unset or empty for a file, this implies "no selection" and
|
||||
so "all rules" is taken to be the meaning.
|
||||
* :code:`exclude_rules`, which explicitly *disables* the specified rules.
|
||||
This parameter is applied *after* the :code:`rules` parameter so can be
|
||||
used to *subtract* from the otherwise enabled set.
|
||||
|
||||
Each of these two configuration values accept a comma separated list of
|
||||
*references*. Each of those references can be:
|
||||
|
||||
* a rule *code* e.g. :code:`LN01`
|
||||
* a rule *name* e.g. :code:`layout.indent`
|
||||
* a rule *alias*, which is often a deprecated *code* e.g. :code:`L003`
|
||||
* a rule *group* e.g. :code:`layout` or :code:`capitalisation`
|
||||
|
||||
These different references can be mixed within a given expression, which
|
||||
results in a very powerful syntax for selecting exactly which rules are
|
||||
active for a given file.
|
||||
|
||||
.. note::
|
||||
|
||||
It's worth mentioning here that the application of :code:`rules` and
|
||||
:code:`exclude_rules`, with *groups*, *aliases* and *names*, in projects
|
||||
with potentially multiple nested configuration files defining different
|
||||
rules for different areas of a project can get very confusing very fast.
|
||||
While this flexibility is intended for users to take advantage of, we do
|
||||
have some recommendations about how to do this is a way that remains
|
||||
manageable.
|
||||
|
||||
When considering configuration inheritance, each of :code:`rules` and
|
||||
:code:`exclude_rules` will totally overwrite any values in parent config
|
||||
files if they are set in a child file. While the subtraction operation
|
||||
between both of them is calculated *"per file"*, there is no combination
|
||||
operation between two definitions of :code:`rules` (just one overwrites
|
||||
the other).
|
||||
|
||||
The effect of this is that we recommend one of two approaches:
|
||||
|
||||
#. Simply only use :code:`rules`. This has the upshot of each area of
|
||||
your project being very explicit in which rules are enabled. When
|
||||
that changes for part of your project you just reset the whole list
|
||||
of applicable rules for that part of the project.
|
||||
#. Set a single :code:`rules` value in your master project config file
|
||||
and then only use :code:`exclude_rules` in sub-configuration files
|
||||
to *turn off* specific rules for parts of the project where those
|
||||
rules are inappropriate. This keeps the simplicity of only having
|
||||
one value which is inherited, but allows slightly easier and simpler
|
||||
rollout of new rules because we manage by exception.
|
||||
|
||||
|
||||
For example, to disable the rules :sqlfluff:ref:`LT08`
|
||||
and :sqlfluff:ref:`RF02`:
|
||||
|
||||
.. code-block:: cfg
|
||||
|
||||
[sqlfluff]
|
||||
exclude_rules = LT08, RF02
|
||||
|
||||
To enable individual rules, configure :code:`rules`, respectively.
|
||||
|
||||
For example, to enable :sqlfluff:ref:`RF02`:
|
||||
|
||||
.. code-block:: cfg
|
||||
|
||||
[sqlfluff]
|
||||
rules = RF02
|
||||
|
||||
Rules can also be enabled/disabled by their grouping. Right now, the only
|
||||
rule grouping is :code:`core`. This will enable (or disable) a select group
|
||||
of rules that have been deemed 'core rules'.
|
||||
|
||||
.. code-block:: cfg
|
||||
|
||||
[sqlfluff]
|
||||
rules = core
|
||||
|
||||
More information about 'core rules' can be found in the :ref:`ruleref`.
|
||||
|
||||
Additionally, some rules have a special :code:`force_enable` configuration
|
||||
option, which allows to enable the given rule even for dialects where it is
|
||||
disabled by default. The rules that support this can be found in the
|
||||
:ref:`ruleref`.
|
||||
|
||||
The default values can be seen in :ref:`defaultconfig`.
|
||||
|
||||
See :ref:`ignoreconfig` for more information on how to turn ignore particular
|
||||
rules for specific lines, sections or files.
|
||||
|
||||
Downgrading rules to warnings
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
To keep displaying violations for specific rules, but not have those
|
||||
issues lead to a failed run, rules can be downgraded to *warnings*.
|
||||
Rules set as *warnings* won't cause a file to fail, but will still
|
||||
be shown in the CLI to warn users of their presence.
|
||||
|
||||
The configuration of this behaves very like :code:`exclude_rules`
|
||||
above:
|
||||
|
||||
.. code-block:: cfg
|
||||
|
||||
[sqlfluff]
|
||||
warnings = LT01, LT04
|
||||
|
||||
With this configuration, files with no other issues (other than
|
||||
those set to warn) will pass. If there are still other issues, then
|
||||
the file will still fail, but will show both warnings and failures.
|
||||
|
||||
.. code-block::
|
||||
|
||||
== [test.sql] PASS
|
||||
L: 2 | P: 9 | LT01 | WARNING: Missing whitespace before +
|
||||
== [test2.sql] FAIL
|
||||
L: 2 | P: 8 | CP02 | Unquoted identifiers must be consistently upper case.
|
||||
L: 2 | P: 11 | LT01 | WARNING: Missing whitespace before +
|
||||
|
||||
This is particularly useful as a transitional tool when considering
|
||||
the introduction on new rules on a project where you might want to
|
||||
make users aware of issues without blocking their workflow (yet).
|
||||
|
||||
You can use either rule code or rule name for this setting.
|
||||
|
||||
Layout & Spacing Configuration
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
The :code:`[sqlfluff:layout]` section of the config controls the
|
||||
treatment of spacing and line breaks across all rules. To understand
|
||||
more about this section, see the section of the docs dedicated to
|
||||
layout: :ref:`layoutconfig`.
|
||||
190
docs/source/configuration/setting_configuration.rst
Normal file
190
docs/source/configuration/setting_configuration.rst
Normal file
@@ -0,0 +1,190 @@
|
||||
.. _setting_config:
|
||||
|
||||
Setting Configuration
|
||||
=====================
|
||||
|
||||
SQLFluff accepts configuration either through the command line or
|
||||
through configuration files. There is *rough* parity between the
|
||||
two approaches with the exception that *templating* configuration
|
||||
must be done via a file, because it otherwise gets slightly complicated.
|
||||
|
||||
For details of what's available on the command line check out
|
||||
the :ref:`cliref`.
|
||||
|
||||
.. _`config-files`:
|
||||
|
||||
Configuration Files
|
||||
-------------------
|
||||
|
||||
For file based configuration *SQLFluff* will look for the following
|
||||
files in order. Later files will (if found) will be used to overwrite
|
||||
any values read from earlier files.
|
||||
|
||||
- :code:`setup.cfg`
|
||||
- :code:`tox.ini`
|
||||
- :code:`pep8.ini`
|
||||
- :code:`.sqlfluff`
|
||||
- :code:`pyproject.toml`
|
||||
|
||||
Within these files, the first four will be read like a `cfg file`_, and
|
||||
*SQLFluff* will look for sections which start with :code:`sqlfluff`, and where
|
||||
subsections are delimited by a semicolon. For example the *jinjacontext*
|
||||
section will be indicated in the section started with
|
||||
:code:`[sqlfluff:jinjacontext]`.
|
||||
|
||||
For example, a snippet from a :code:`.sqlfluff` file (as well as any of the
|
||||
supported cfg file types):
|
||||
|
||||
.. code-block:: cfg
|
||||
|
||||
[sqlfluff]
|
||||
templater = jinja
|
||||
sql_file_exts = .sql,.sql.j2,.dml,.ddl
|
||||
|
||||
[sqlfluff:indentation]
|
||||
indented_joins = False
|
||||
indented_using_on = True
|
||||
template_blocks_indent = False
|
||||
|
||||
[sqlfluff:templater]
|
||||
unwrap_wrapped_queries = True
|
||||
|
||||
[sqlfluff:templater:jinja]
|
||||
apply_dbt_builtins = True
|
||||
|
||||
For the `pyproject.toml file`_, all valid sections start with
|
||||
:code:`tool.sqlfluff` and subsections are delimited by a dot. For example the
|
||||
*jinjacontext* section will be indicated in the section started with
|
||||
:code:`[tool.sqlfluff.jinjacontext]`.
|
||||
|
||||
For example, a snippet from a :code:`pyproject.toml` file:
|
||||
|
||||
.. code-block:: toml
|
||||
|
||||
[tool.sqlfluff.core]
|
||||
templater = "jinja"
|
||||
sql_file_exts = ".sql,.sql.j2,.dml,.ddl"
|
||||
|
||||
[tool.sqlfluff.indentation]
|
||||
indented_joins = false
|
||||
indented_using_on = true
|
||||
template_blocks_indent = false
|
||||
|
||||
[tool.sqlfluff.templater]
|
||||
unwrap_wrapped_queries = true
|
||||
|
||||
[tool.sqlfluff.templater.jinja]
|
||||
apply_dbt_builtins = true
|
||||
|
||||
# For rule specific configuration, use dots between the names exactly
|
||||
# as you would in .sqlfluff. In the background, SQLFluff will unpack the
|
||||
# configuration paths accordingly.
|
||||
[tool.sqlfluff.rules.capitalisation.keywords]
|
||||
capitalisation_policy = "upper"
|
||||
|
||||
.. _`cfg file`: https://docs.python.org/3/library/configparser.html
|
||||
.. _`pyproject.toml file`: https://www.python.org/dev/peps/pep-0518/
|
||||
|
||||
|
||||
.. _starter_config:
|
||||
|
||||
New Project Configuration
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
When setting up a new project with SQLFluff, we recommend keeping your
|
||||
configuration file fairly minimal. The config file should act as a form
|
||||
of *documentation* for your team i.e. a record of what decisions you've
|
||||
made which govern how your format your SQL. By having a more concise
|
||||
config file, and only defining config settings where they differ from the
|
||||
defaults - you are more clearly stating to your team what choices you've made.
|
||||
|
||||
*However*, there are also a few places where the *default* configuration
|
||||
is designed more for *existing projects*, rather than *fresh projects*, and
|
||||
so there is an opportunity to be a little stricter than you might otherwise
|
||||
be with an existing codebase.
|
||||
|
||||
Here is a simple configuration file which would be suitable for a starter
|
||||
project:
|
||||
|
||||
.. literalinclude:: /_partials/starter_config.cfg
|
||||
:language: cfg
|
||||
|
||||
|
||||
.. _nesting:
|
||||
|
||||
Nesting
|
||||
^^^^^^^
|
||||
|
||||
**SQLFluff** uses **nesting** in its configuration files, with files
|
||||
closer *overriding* (or *patching*, if you will) values from other files.
|
||||
That means you'll end up with a final config which will be a patchwork
|
||||
of all the values from the config files loaded up to that path. The exception
|
||||
to this is the value for `templater`, which cannot be set in config files in
|
||||
subdirectories of the working directory.
|
||||
You don't **need** any config files to be present to make *SQLFluff*
|
||||
work. If you do want to override any values though SQLFluff will use
|
||||
files in the following locations in order, with values from later
|
||||
steps overriding those from earlier:
|
||||
|
||||
0. *[...and this one doesn't really count]* There's a default config as
|
||||
part of the SQLFluff package. You can find this below, in the
|
||||
:ref:`defaultconfig` section.
|
||||
1. It will look in the user's os-specific app config directory.
|
||||
On macOS and Unix this is `~/.config/sqlfluff`, Windows is
|
||||
`<home>\\AppData\\Local\\sqlfluff\\sqlfluff`, for any of the filenames
|
||||
above in the main :ref:`setting_config` section. If multiple are present, they will
|
||||
*patch*/*override* each other in the order above.
|
||||
2. It will look for the same files in the user's home directory (~).
|
||||
3. *[if the current working directory is a subdirectory of the user's home directory (~)]*
|
||||
It will look for the same files in all directories between the
|
||||
user's home directory (~), and the current working directory.
|
||||
4. It will look for the same files in the current working directory.
|
||||
5. *[if parsing a file in a subdirectory of the current working directory]*
|
||||
It will look for the same files in every subdirectory between the
|
||||
current working dir and the file directory.
|
||||
6. It will look for the same files in the directory containing the file
|
||||
being linted.
|
||||
|
||||
This whole structure leads to efficient configuration, in particular
|
||||
in projects which utilise a lot of complicated templating.
|
||||
|
||||
.. _in_file_config:
|
||||
|
||||
In-File Configuration Directives
|
||||
--------------------------------
|
||||
|
||||
In addition to configuration files mentioned above, SQLFluff also supports
|
||||
comment based configuration switching in files. This allows specific SQL
|
||||
file to modify a default configuration if they have specific needs.
|
||||
|
||||
When used, these apply to the whole file, and are parsed from the file in
|
||||
an initial step before the rest of the file is properly parsed. This means
|
||||
they can be used for both rule configuration and also for parsing
|
||||
configuration.
|
||||
|
||||
To use these, the syntax must start as an *inline sql comment* beginning
|
||||
with :code:`sqlfluff` (i.e. :code:`-- sqlfluff`). The line is then interpreted
|
||||
as a colon-separated address of the configuration value you wish to set.
|
||||
A few common examples are shown below:
|
||||
|
||||
.. code-block:: sql
|
||||
|
||||
-- Set Indented Joins
|
||||
-- sqlfluff:indentation:indented_joins:True
|
||||
|
||||
-- Set a smaller indent for this file
|
||||
-- sqlfluff:indentation:tab_space_size:2
|
||||
|
||||
-- Set keywords to be capitalised
|
||||
-- sqlfluff:rules:capitalisation.keywords:capitalisation_policy:upper
|
||||
|
||||
SELECT *
|
||||
FROM a
|
||||
JOIN b USING(c)
|
||||
|
||||
We recommend only using this configuration approach for configuration that
|
||||
applies to one file in isolation. For configuration changes for areas of
|
||||
a project or for whole projects we recommend :ref:`nesting` of configuration
|
||||
files.
|
||||
|
||||
This syntax is very similar to the method for :ref:`inline_ignoring_errors`.
|
||||
146
docs/source/configuration/templating/dbt.rst
Normal file
146
docs/source/configuration/templating/dbt.rst
Normal file
@@ -0,0 +1,146 @@
|
||||
.. _dbt_templater:
|
||||
|
||||
:code:`dbt` templater
|
||||
^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
.. note::
|
||||
From sqlfluff version 0.7.0 onwards, the dbt templater has been moved
|
||||
to a separate plugin and python package. Projects that were already using
|
||||
the dbt templater may initially fail after an upgrade to 0.7.0+. See the
|
||||
installation instructions below to install the dbt templater.
|
||||
|
||||
dbt templating is still a relatively new feature added in 0.4.0 and
|
||||
is still in very active development! If you encounter an issue, please
|
||||
let us know in a GitHub issue or on the SQLFluff slack workspace.
|
||||
|
||||
:code:`dbt` is not the default templater for *SQLFluff* (it is :code:`jinja`).
|
||||
:code:`dbt` is a complex tool, so using the default :code:`jinja` templater
|
||||
will be simpler. You should be aware when using the :code:`dbt` templater that
|
||||
you will be exposed to some of the complexity of :code:`dbt`. Users may wish to
|
||||
try both templaters and choose according to how they intend to use *SQLFluff*.
|
||||
|
||||
A simple rule of thumb might be:
|
||||
|
||||
- If you are using *SQLFluff* in a CI/CD context, where speed is not
|
||||
critical but accuracy in rendering sql is, then the `dbt` templater
|
||||
may be more appropriate.
|
||||
- If you are using *SQLFluff* in an IDE or on a git hook, where speed
|
||||
of response may be more important, then the `jinja` templater may
|
||||
be more appropriate.
|
||||
|
||||
Pros:
|
||||
|
||||
* Most (potentially all) macros will work
|
||||
|
||||
Cons:
|
||||
|
||||
* More complex, e.g. using it successfully may require deeper
|
||||
understanding of your models and/or macros (including third-party macros)
|
||||
|
||||
* More configuration decisions to make
|
||||
* Best practices are not yet established or documented
|
||||
|
||||
* If your :code:`dbt` model files access a database at compile time, using
|
||||
SQLFluff with the :code:`dbt` templater will **also** require access to a
|
||||
database.
|
||||
|
||||
* Note that you can often point SQLFluff and the :code:`dbt` templater at a
|
||||
test database (i.e. it doesn't have to be the production database).
|
||||
|
||||
* Runs slower
|
||||
|
||||
Installation & Configuration
|
||||
""""""""""""""""""""""""""""
|
||||
|
||||
In order to get started using *SQLFluff* with a dbt project you will
|
||||
first need to install the relevant `dbt adapter`_ for your dialect
|
||||
and the :code:`sqlfluff-templater-dbt` package using
|
||||
your package manager of choice (e.g.
|
||||
:code:`pip install dbt-postgres sqlfluff-templater-dbt`) and then will need the
|
||||
following configuration:
|
||||
|
||||
.. _`dbt adapter`: https://docs.getdbt.com/docs/available-adapters
|
||||
|
||||
In *.sqlfluff*:
|
||||
|
||||
.. code-block:: cfg
|
||||
|
||||
[sqlfluff]
|
||||
templater = dbt
|
||||
|
||||
In *.sqlfluffignore*:
|
||||
|
||||
.. code-block:: text
|
||||
|
||||
target/
|
||||
# dbt <1.0.0
|
||||
dbt_modules/
|
||||
# dbt >=1.0.0
|
||||
dbt_packages/
|
||||
macros/
|
||||
|
||||
You can set the dbt project directory, profiles directory and profile with:
|
||||
|
||||
.. code-block:: cfg
|
||||
|
||||
[sqlfluff:templater:dbt]
|
||||
project_dir = <relative or absolute path to dbt_project directory>
|
||||
profiles_dir = <relative or absolute path to the directory that contains the profiles.yml file>
|
||||
profile = <dbt profile>
|
||||
target = <dbt target>
|
||||
|
||||
.. note::
|
||||
|
||||
If the `profiles_dir` setting is omitted, SQLFluff will look for the profile
|
||||
in the default location, which varies by operating system. On Unix-like
|
||||
operating systems (e.g. Linux or macOS), the default profile directory is
|
||||
`~/.dbt/`. On Windows, you can determine your default profile directory by
|
||||
running `dbt debug --config-dir`.
|
||||
|
||||
To use builtin dbt Jinja functions SQLFluff provides a configuration option
|
||||
that enables usage within templates.
|
||||
|
||||
.. code-block:: cfg
|
||||
|
||||
[sqlfluff:templater:jinja]
|
||||
apply_dbt_builtins = True
|
||||
|
||||
This will provide dbt macros like `ref`, `var`, `is_incremental()`. If the need
|
||||
arises builtin dbt macros can be customised via Jinja macros in `.sqlfluff`
|
||||
configuration file.
|
||||
|
||||
.. code-block:: cfg
|
||||
|
||||
[sqlfluff:templater:jinja:macros]
|
||||
# Macros provided as builtins for dbt projects
|
||||
dbt_ref = {% macro ref(model_ref) %}{{model_ref}}{% endmacro %}
|
||||
dbt_source = {% macro source(source_name, table) %}{{source_name}}_{{table}}{% endmacro %}
|
||||
dbt_config = {% macro config() %}{% for k in kwargs %}{% endfor %}{% endmacro %}
|
||||
dbt_var = {% macro var(variable, default='') %}item{% endmacro %}
|
||||
dbt_is_incremental = {% macro is_incremental() %}True{% endmacro %}
|
||||
|
||||
If your project requires that you pass variables to dbt through command line,
|
||||
you can specify them in `template:dbt:context` section of `.sqlfluff`.
|
||||
See below configuration and its equivalent dbt command:
|
||||
|
||||
.. code-block:: cfg
|
||||
|
||||
[sqlfluff:templater:dbt:context]
|
||||
my_variable = 1
|
||||
|
||||
.. code-block:: text
|
||||
|
||||
dbt run --vars '{"my_variable": 1}'
|
||||
|
||||
Known Caveats
|
||||
"""""""""""""
|
||||
|
||||
- To use the dbt templater, you must set `templater = dbt` in the `.sqlfluff`
|
||||
config file in the directory where sqlfluff is run. The templater cannot
|
||||
be changed in `.sqlfluff` files in subdirectories.
|
||||
- In SQLFluff 0.4.0 using the dbt templater requires that all files
|
||||
within the root and child directories of the dbt project must be part
|
||||
of the project. If there are deployment scripts which refer to SQL files
|
||||
not part of the project for instance, this will result in an error.
|
||||
You can overcome this by adding any non-dbt project SQL files to
|
||||
.sqlfluffignore.
|
||||
100
docs/source/configuration/templating/index.rst
Normal file
100
docs/source/configuration/templating/index.rst
Normal file
@@ -0,0 +1,100 @@
|
||||
.. _templateconfig:
|
||||
|
||||
Templating Configuration
|
||||
------------------------
|
||||
|
||||
This section explains how to configure templating for SQL files.
|
||||
|
||||
When writing SQL files, users might utilise some kind of templating.
|
||||
The SQL file itself is written with placeholders which get rendered to proper
|
||||
SQL at run time.
|
||||
This can range from very simple placeholder templating to complex Jinja
|
||||
templating.
|
||||
|
||||
SQLFluff supports templated sections in SQL, see :ref:`templater`.
|
||||
This is achieved by the following set of operations:
|
||||
|
||||
1. SQLFluff pre-renders the templated SQL
|
||||
2. SQLFluff applies the lint and fix operations to the rendered file
|
||||
3. SQLFluff backports the rule violations to the templated section of the SQL.
|
||||
|
||||
SQLFluff does not automatically have access to the same environment used in
|
||||
production template setup. This means it is necessary to either provide that
|
||||
environment or provide dummy values to effectively render the template and
|
||||
generate valid SQL. Refer to the templater sections below for details.
|
||||
|
||||
SQLFluff natively supports the following templating engines
|
||||
|
||||
- :ref:`jinja_templater`
|
||||
- :ref:`placeholder_templater`
|
||||
- :ref:`python_templater`
|
||||
|
||||
Also, SQLFluff has an integration to use :code:`dbt` as a templater.
|
||||
|
||||
- :ref:`dbt_templater` (via plugin which is covered in a different section).
|
||||
|
||||
.. note::
|
||||
|
||||
Templaters may not be able to generate a rendered SQL that cover
|
||||
the entire raw file.
|
||||
|
||||
For example, if the raw SQL uses a :code:`{% if condition %}` block,
|
||||
the rendered version of the template will only include either the
|
||||
:code:`{% then %}` or the :code:`{% else %}` block (depending on the
|
||||
provided configuration for the templater), but not both.
|
||||
|
||||
In this case, because SQLFluff linting can only operate on the output
|
||||
of the templater, some areas of the raw SQL will never be seen by the
|
||||
linter and will not be covered by lint rules.
|
||||
|
||||
This is functionality we hope to support in future.
|
||||
|
||||
.. toctree::
|
||||
:maxdepth: 2
|
||||
:caption: Templater Specific Configuration:
|
||||
|
||||
jinja
|
||||
placeholder
|
||||
python
|
||||
dbt
|
||||
|
||||
.. _generic_variable_templating:
|
||||
|
||||
Generic Variable Templating
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
Variables are available in all the templaters.
|
||||
By default the templating engine will expect variables for templating to be
|
||||
available in the config, and the templater will be look in the section
|
||||
corresponding to the context for that templater. By convention, the config for
|
||||
the ``jinja`` templater is found in the ``sqlfluff:templater:jinja:context``
|
||||
section, the config for the ``python`` templater is found in the
|
||||
``sqlfluff:templater:python:context`` section, the one for the ``placeholder``
|
||||
templater is found in the ``sqlfluff:templater:placeholder:context`` section.
|
||||
|
||||
For example, if passed the following *.sql* file:
|
||||
|
||||
.. code-block:: SQL+Jinja
|
||||
|
||||
SELECT {{ num_things }} FROM {{ tbl_name }} WHERE id > 10 LIMIT 5
|
||||
|
||||
...and the following configuration in *.sqlfluff* in the same directory:
|
||||
|
||||
.. code-block:: cfg
|
||||
|
||||
[sqlfluff:templater:jinja:context]
|
||||
num_things=456
|
||||
tbl_name=my_table
|
||||
|
||||
...then before parsing, the sql will be transformed to:
|
||||
|
||||
.. code-block:: sql
|
||||
|
||||
SELECT 456 FROM my_table WHERE id > 10 LIMIT 5
|
||||
|
||||
.. note::
|
||||
|
||||
If there are variables in the template which cannot be found in
|
||||
the current configuration context, then this will raise a
|
||||
`SQLTemplatingError` and this will appear as a violation without
|
||||
a line number, quoting the name of the variable that couldn't be found.
|
||||
417
docs/source/configuration/templating/jinja.rst
Normal file
417
docs/source/configuration/templating/jinja.rst
Normal file
@@ -0,0 +1,417 @@
|
||||
.. _jinja_templater:
|
||||
|
||||
Jinja templater
|
||||
^^^^^^^^^^^^^^^
|
||||
|
||||
The Jinja templater uses Jinja2_ to render templates.
|
||||
|
||||
.. _Jinja2: https://jinja.palletsprojects.com/
|
||||
|
||||
There are multiple, complementary ways of configuring the Jinja templater.
|
||||
|
||||
- Reading variables and Jinja macros directly from the SQLFLuff config file
|
||||
- Loading macros from a path
|
||||
- Using a library
|
||||
|
||||
.. list-table:: Overview of Jinja templater's configuration options
|
||||
:header-rows: 1
|
||||
|
||||
* - Configuration
|
||||
- Variables
|
||||
- Macros
|
||||
- Filters
|
||||
- Documentation
|
||||
* - Config file
|
||||
- ✅
|
||||
- ✅
|
||||
- ❌
|
||||
- `Complex Jinja Variable Templating`_ and `Jinja Macro Templating (from config)`_
|
||||
* - Macro Path
|
||||
- ❌
|
||||
- ✅
|
||||
- ❌
|
||||
- `Jinja Macro Templating (from file)`_
|
||||
* - Library
|
||||
- ✅
|
||||
- ✅
|
||||
- ✅
|
||||
- `Library Templating`_
|
||||
|
||||
For example, a snippet from a :code:`.sqlfluff` file that uses all config
|
||||
options:
|
||||
|
||||
.. code-block:: cfg
|
||||
|
||||
[sqlfluff]
|
||||
templater = jinja
|
||||
|
||||
[sqlfluff:templater:jinja]
|
||||
apply_dbt_builtins = True
|
||||
load_macros_from_path = my_macros
|
||||
loader_search_path = included_templates
|
||||
library_path = sqlfluff_libs
|
||||
exclude_macros_from_path = my_macros_exclude
|
||||
|
||||
[sqlfluff:templater:jinja:context]
|
||||
my_list = ['a', 'b', 'c']
|
||||
MY_LIST = ("d", "e", "f")
|
||||
my_where_dict = {"field_1": 1, "field_2": 2}
|
||||
|
||||
[sqlfluff:templater:jinja:macros]
|
||||
a_macro_def = {% macro my_macro(n) %}{{ n }} + {{ n * 2 }}{% endmacro %}
|
||||
|
||||
Complex Jinja Variable Templating
|
||||
"""""""""""""""""""""""""""""""""
|
||||
|
||||
Apart from the Generic variable templating that is supported for all
|
||||
templaters, two more advanced features of variable templating are available for
|
||||
Jinja.
|
||||
|
||||
*case sensitivity* and *native python types*.
|
||||
Both are illustrated in the following example:
|
||||
|
||||
.. code-block:: cfg
|
||||
|
||||
[sqlfluff:templater:jinja:context]
|
||||
my_list = ['a', 'b', 'c']
|
||||
MY_LIST = ("d", "e", "f")
|
||||
my_where_dict = {"field_1": 1, "field_2": 2}
|
||||
|
||||
.. code-block:: SQL+Jinja
|
||||
|
||||
SELECT
|
||||
{% for elem in MY_LIST %}
|
||||
'{{elem}}' {% if not loop.last %}||{% endif %}
|
||||
{% endfor %} as concatenated_list
|
||||
FROM tbl
|
||||
WHERE
|
||||
{% for field, value in my_where_dict.items() %}
|
||||
{{field}} = {{value}} {% if not loop.last %}and{% endif %}
|
||||
{% endfor %}
|
||||
|
||||
...will render as...
|
||||
|
||||
.. code-block:: sql
|
||||
|
||||
SELECT
|
||||
'd' || 'e' || 'f' as concatenated_list
|
||||
FROM tbl
|
||||
WHERE
|
||||
field_1 = 1 and field_2 = 2
|
||||
|
||||
Note that the variable was replaced in a case sensitive way and that the
|
||||
settings in the config file were interpreted as native python types.
|
||||
|
||||
Jinja Macro Templating (from config)
|
||||
""""""""""""""""""""""""""""""""""""
|
||||
|
||||
Macros (which also look and feel like *functions* are available only in the
|
||||
*jinja* templater. Similar to :ref:`generic_variable_templating`, these are
|
||||
specified in config files, what's different in this case is how they are named.
|
||||
Similar to the *context* section above, macros are configured separately in the
|
||||
*macros* section of the config.
|
||||
Consider the following example.
|
||||
|
||||
If passed the following *.sql* file:
|
||||
|
||||
.. code-block:: SQL+Jinja
|
||||
|
||||
SELECT {{ my_macro(6) }} FROM some_table
|
||||
|
||||
...and the following configuration in *.sqlfluff* in the same directory (note
|
||||
the tight control of whitespace):
|
||||
|
||||
.. code-block:: cfg
|
||||
|
||||
[sqlfluff:templater:jinja:macros]
|
||||
a_macro_def = {% macro my_macro(n) %}{{ n }} + {{ n * 2 }}{% endmacro %}
|
||||
|
||||
...then before parsing, the sql will be transformed to:
|
||||
|
||||
.. code-block:: sql
|
||||
|
||||
SELECT 6 + 12 FROM some_table
|
||||
|
||||
Note that in the code block above, the variable name in the config is
|
||||
*a_macro_def*, and this isn't apparently otherwise used anywhere else.
|
||||
Broadly this is accurate, however within the configuration loader this will
|
||||
still be used to overwrite previous *values* in other config files. As such
|
||||
this introduces the idea of config *blocks* which could be selectively
|
||||
overwritten by other configuration files downstream as required.
|
||||
|
||||
Jinja Macro Templating (from file)
|
||||
""""""""""""""""""""""""""""""""""
|
||||
|
||||
In addition to macros specified in the config file, macros can also be
|
||||
loaded from files or folders. This is specified in the config file:
|
||||
|
||||
.. code-block:: cfg
|
||||
|
||||
[sqlfluff:templater:jinja]
|
||||
load_macros_from_path = my_macros,other_macros
|
||||
|
||||
``load_macros_from_path`` is a comma-separated list of :code:`.sql` files or
|
||||
folders. Locations are *relative to the config file*. For example, if the
|
||||
config file above was found at :code:`/home/my_project/.sqlfluff`, then
|
||||
SQLFluff will look for macros in the folders :code:`/home/my_project/my_macros/`
|
||||
and :code:`/home/my_project/other_macros/`, including any of their subfolders.
|
||||
Any macros defined in the config will always take precedence over a macro
|
||||
defined in the path.
|
||||
|
||||
``exclude_macros_from_path`` works in the same manner as ``load_macros_from_path`` but
|
||||
allows you to have sqlfluff ignore certain macros. This can be useful if you have
|
||||
custom jinja tags.
|
||||
|
||||
Macros loaded from these files are available in every :code:`.sql` file without
|
||||
requiring a Jinja :code:`include` or :code:`import`. They are loaded into the
|
||||
`Jinja Global Namespace <https://jinja.palletsprojects.com/en/3.1.x/api/#global-namespace>`_.
|
||||
|
||||
**Note:** The :code:`load_macros_from_path` setting also defines the search
|
||||
path for Jinja
|
||||
`include <https://jinja.palletsprojects.com/en/3.1.x/templates/#include>`_ or
|
||||
`import <https://jinja.palletsprojects.com/en/3.1.x/templates/#import>`_.
|
||||
As with loaded macros, subdirectories are also supported. For example,
|
||||
if :code:`load_macros_from_path` is set to :code:`my_macros`, and there is a
|
||||
file :code:`my_macros/subdir/my_file.sql`, you can do:
|
||||
|
||||
.. code-block:: jinja
|
||||
|
||||
{% include 'subdir/my_file.sql' %}
|
||||
|
||||
If you would like to define the Jinja search path without also loading the
|
||||
macros into the global namespace, use the :code:`loader_search_path` setting
|
||||
instead.
|
||||
|
||||
.. note::
|
||||
|
||||
Throughout the templating process **whitespace** will still be treated
|
||||
rigorously, and this includes **newlines**. In particular you may choose
|
||||
to provide *dummy* macros in your configuration different from the actual
|
||||
macros used in production.
|
||||
|
||||
**REMEMBER:** The reason SQLFluff supports macros is to *enable* it to parse
|
||||
templated sql without it being a blocker. It shouldn't be a requirement that
|
||||
the *templating* is accurate - it only needs to work well enough that
|
||||
*parsing* and *linting* are helpful.
|
||||
|
||||
.. _builtin_jinja_blocks:
|
||||
|
||||
Builtin Jinja Macro Blocks
|
||||
""""""""""""""""""""""""""
|
||||
|
||||
One of the main use cases which inspired *SQLFluff* as a project was `dbt`_.
|
||||
It uses jinja templating extensively and leads to some users maintaining large
|
||||
repositories of sql files which could potentially benefit from some linting.
|
||||
|
||||
.. note::
|
||||
*SQLFluff* has now a tighter integration with dbt through the "dbt" templater.
|
||||
It is the recommended templater for dbt projects. If used, it eliminates the
|
||||
need for the overrides described in this section.
|
||||
|
||||
To use the dbt templater, go to :ref:`dbt_templater`.
|
||||
|
||||
*SQLFluff* anticipates this use case and provides some built in macro blocks
|
||||
in the :ref:`defaultconfig` which assist in getting started with `dbt`_
|
||||
projects. In particular it provides mock objects for:
|
||||
|
||||
* *ref*: The mock version of this provided simply returns the model reference
|
||||
as the name of the table. In most cases this is sufficient.
|
||||
* *config*: A regularly used macro in `dbt`_ to set configuration values. For
|
||||
linting purposes, this makes no difference and so the provided macro simply
|
||||
returns nothing.
|
||||
|
||||
.. note::
|
||||
If there are other builtin macros which would make your life easier,
|
||||
consider submitting the idea (or even better a pull request) on `github`_.
|
||||
|
||||
.. _`dbt`: https://www.getdbt.com/
|
||||
.. _`github`: https://www.github.com/sqlfluff/sqlfluff
|
||||
|
||||
.. _jinja_library_templating:
|
||||
|
||||
Library Templating
|
||||
""""""""""""""""""
|
||||
|
||||
If using *SQLFluff* with jinja as your templater, you may have library
|
||||
function calls within your sql files that can not be templated via the
|
||||
normal macro templating mechanisms:
|
||||
|
||||
.. code-block:: SQL+Jinja
|
||||
|
||||
SELECT foo, bar FROM baz {{ dbt_utils.group_by(2) }}
|
||||
|
||||
To template these libraries, you can use the `sqlfluff:jinja:library_path`
|
||||
config option:
|
||||
|
||||
.. code-block:: cfg
|
||||
|
||||
[sqlfluff:templater:jinja]
|
||||
library_path = sqlfluff_libs
|
||||
|
||||
This will pull in any python modules from that directory and allow sqlfluff
|
||||
to use them in templates. In the above example, you might define a file at
|
||||
`sqlfluff_libs/dbt_utils.py` as:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
def group_by(n):
|
||||
return "GROUP BY 1,2"
|
||||
|
||||
|
||||
If an `__init__.py` is detected, it will be loaded alongside any modules and
|
||||
submodules found within the library path.
|
||||
|
||||
.. code-block:: SQL+Jinja
|
||||
|
||||
SELECT
|
||||
{{ custom_sum('foo', 'bar') }},
|
||||
{{ foo.bar.another_sum('foo', 'bar') }}
|
||||
FROM
|
||||
baz
|
||||
|
||||
`sqlfluff_libs/__init__.py`:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
def custom_sum(a: str, b: str) -> str:
|
||||
return a + b
|
||||
|
||||
`sqlfluff_libs/foo/__init__.py`:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
# empty file
|
||||
|
||||
`sqlfluff_libs/foo/bar.py`:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
def another_sum(a: str, b: str) -> str:
|
||||
return a + b
|
||||
|
||||
Additionally, the library can be used to expose `Jinja Filters <https://jinja.palletsprojects.com/en/3.1.x/templates/#filters>`_
|
||||
to the Jinja environment used by SQLFluff.
|
||||
|
||||
This is achieve by setting a global variable named ``SQLFLUFF_JINJA_FILTERS``.
|
||||
``SQLFLUFF_JINJA_FILTERS`` is a dictionary where
|
||||
|
||||
* dictionary keys map to the Jinja filter name
|
||||
* dictionary values map to the Python callable
|
||||
|
||||
For example, to make the Airflow filter ``ds`` available to SQLFLuff, add
|
||||
the following to the `__init__.py` of the library:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
# https://github.com/apache/airflow/blob/main/airflow/templates.py#L53
|
||||
def ds_filter(value: datetime.date | datetime.time | None) -> str | None:
|
||||
"""Date filter."""
|
||||
if value is None:
|
||||
return None
|
||||
return value.strftime("%Y-%m-%d")
|
||||
|
||||
SQLFLUFF_JINJA_FILTERS = {"ds": ds_filter}
|
||||
|
||||
Now, ``ds`` can be used in SQL
|
||||
|
||||
.. code-block:: SQL+Jinja
|
||||
|
||||
SELECT "{{ "2000-01-01" | ds }}";
|
||||
|
||||
Jinja loader search path
|
||||
""""""""""""""""""""""""
|
||||
|
||||
The Jinja environment can be configured to search for files included with
|
||||
`include <https://jinja.palletsprojects.com/en/3.1.x/templates/#include>`_ or
|
||||
`import <https://jinja.palletsprojects.com/en/3.1.x/templates/#import>`_ in a
|
||||
list of folders. This is specified in the config file:
|
||||
|
||||
.. code-block:: cfg
|
||||
|
||||
[sqlfluff:templater:jinja]
|
||||
loader_search_path = included_templates,other_templates
|
||||
|
||||
``loader_search_path`` is a comma-separated list of folders. Locations are
|
||||
*relative to the config file*. For example, if the config file above was found
|
||||
at :code:`/home/my_project/.sqlfluff`, then SQLFluff will look for included
|
||||
files in the folders :code:`/home/my_project/included_templates/` and
|
||||
:code:`/home/my_project/other_templates/`, including any of their subfolders.
|
||||
For example, this will read from
|
||||
:code:`/home/my_project/included_templates/my_template.sql`:
|
||||
|
||||
.. code-block:: jinja
|
||||
|
||||
{% include 'included_templates/my_template.sql' %}
|
||||
|
||||
Any folders specified in the :code:`load_macros_from_path` setting are
|
||||
automatically appended to the ``loader_search_path``. It is not necessary to
|
||||
specify a given directory in both settings.
|
||||
|
||||
Unlike the :code:`load_macros_from_path` setting, any macros within these
|
||||
folders are *not* automatically loaded into the global namespace. They must be
|
||||
explicitly imported using the
|
||||
`import <https://jinja.palletsprojects.com/en/3.1.x/templates/#import>`_ Jinja
|
||||
directive. If you would like macros to be automatically included in the
|
||||
global Jinja namespace, use the :code:`load_macros_from_path` setting instead.
|
||||
|
||||
Interaction with ``--ignore=templating``
|
||||
""""""""""""""""""""""""""""""""""""""""
|
||||
|
||||
Ignoring Jinja templating errors provides a way for users to use SQLFluff
|
||||
while reducing or avoiding the need to spend a lot of time adding variables
|
||||
to ``[sqlfluff:templater:jinja:context]``.
|
||||
|
||||
When ``--ignore=templating`` is enabled, the Jinja templater behaves a bit
|
||||
differently. This additional behavior is *usually* but not *always* helpful
|
||||
for making the file at least partially parsable and fixable. It definitely
|
||||
doesn’t **guarantee** that every file can be fixed, but it’s proven useful for
|
||||
some users.
|
||||
|
||||
Here's how it works:
|
||||
|
||||
* Within the expanded SQL, undefined variables are automatically *replaced*
|
||||
with the corresponding string value.
|
||||
* If you do: ``{% include query %}``, and the variable ``query`` is not
|
||||
defined, it returns a “file” containing the string ``query``.
|
||||
* If you do: ``{% include "query_file.sql" %}``, and that file does not exist
|
||||
or you haven’t configured a setting for ``load_macros_from_path`` or
|
||||
``loader_search_path``, it returns a “file” containing the text
|
||||
``query_file``.
|
||||
|
||||
For example:
|
||||
|
||||
.. code-block:: SQL+Jinja
|
||||
|
||||
select {{ my_variable }}
|
||||
from {% include "my_table.sql" %}
|
||||
|
||||
is interpreted as:
|
||||
|
||||
.. code-block:: sql
|
||||
|
||||
select my_variable
|
||||
from my_table
|
||||
|
||||
The values provided by the Jinja templater act *a bit* (not exactly) like a
|
||||
mixture of several types:
|
||||
|
||||
* ``str``
|
||||
* ``int``
|
||||
* ``list``
|
||||
* Jinja's ``Undefined`` `class <https://jinja.palletsprojects.com/en/3.1.x/api/#jinja2.Undefined>`_
|
||||
|
||||
Because the values behave like ``Undefined``, it's possible to replace them
|
||||
using Jinja's ``default()`` `filter <https://jinja.palletsprojects.com/en/3.1.x/templates/#jinja-filters.default>`_.
|
||||
For example:
|
||||
|
||||
.. code-block:: SQL+Jinja
|
||||
|
||||
select {{ my_variable | default("col_a") }}
|
||||
from my_table
|
||||
|
||||
is interpreted as:
|
||||
|
||||
.. code-block:: sql
|
||||
|
||||
select col_a
|
||||
from my_table
|
||||
106
docs/source/configuration/templating/placeholder.rst
Normal file
106
docs/source/configuration/templating/placeholder.rst
Normal file
@@ -0,0 +1,106 @@
|
||||
.. _placeholder_templater:
|
||||
|
||||
Placeholder templater
|
||||
^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
Libraries such as SQLAlchemy or Psycopg use different parameter placeholder
|
||||
styles to mark where a parameter has to be inserted in the query.
|
||||
|
||||
For example a query in SQLAlchemy can look like this:
|
||||
|
||||
.. code-block:: sql
|
||||
|
||||
SELECT * FROM table WHERE id = :myid
|
||||
|
||||
At runtime `:myid` will be replace by a value provided by the application and
|
||||
escaped as needed, but this is not standard SQL and cannot be parsed as is.
|
||||
|
||||
In order to parse these queries is then necessary to replace these
|
||||
placeholders with sample values, and this is done with the placeholder
|
||||
templater.
|
||||
|
||||
Placeholder templating can be enabled in the config using:
|
||||
|
||||
.. code-block:: cfg
|
||||
|
||||
[sqlfluff]
|
||||
templater = placeholder
|
||||
|
||||
A few common styles are supported:
|
||||
|
||||
.. code-block:: sql
|
||||
:force:
|
||||
|
||||
-- colon
|
||||
WHERE bla = :my_name
|
||||
|
||||
-- colon_nospaces
|
||||
-- (use with caution as more prone to false positives)
|
||||
WHERE bla = table:my_name
|
||||
|
||||
-- colon_optional_quotes
|
||||
SELECT :"column" FROM :table WHERE bla = :'my_name'
|
||||
|
||||
-- numeric_colon
|
||||
WHERE bla = :2
|
||||
|
||||
-- pyformat
|
||||
WHERE bla = %(my_name)s
|
||||
|
||||
-- dollar
|
||||
WHERE bla = $my_name or WHERE bla = ${my_name}
|
||||
|
||||
-- question_mark
|
||||
WHERE bla = ?
|
||||
|
||||
-- numeric_dollar
|
||||
WHERE bla = $3 or WHERE bla = ${3}
|
||||
|
||||
-- percent
|
||||
WHERE bla = %s
|
||||
|
||||
-- ampersand
|
||||
WHERE bla = &s or WHERE bla = &{s} or USE DATABASE MARK_{ENV}
|
||||
|
||||
These can be configured by setting `param_style` to the names above:
|
||||
|
||||
.. code-block:: cfg
|
||||
|
||||
[sqlfluff:templater:placeholder]
|
||||
param_style = colon
|
||||
my_name = 'john'
|
||||
|
||||
then you can set sample values for each parameter, like `my_name`
|
||||
above. Notice that the value needs to be escaped as it will be replaced as a
|
||||
string during parsing. When the sample values aren't provided, the templater
|
||||
will use parameter names themselves by default.
|
||||
|
||||
When parameters are positional, like `question_mark`, then their name is
|
||||
simply the order in which they appear, starting with `1`.
|
||||
|
||||
.. code-block:: cfg
|
||||
|
||||
[sqlfluff:templater:placeholder]
|
||||
param_style = question_mark
|
||||
1 = 'john'
|
||||
|
||||
In case you need a parameter style different from the ones above, you can pass
|
||||
a custom regex.
|
||||
|
||||
.. code-block:: cfg
|
||||
|
||||
[sqlfluff:templater:placeholder]
|
||||
param_regex = __(?P<param_name>[\w_]+)__
|
||||
my_name = 'john'
|
||||
|
||||
N.B. quotes around `param_regex` in the config are
|
||||
interpreted literally by the templater.
|
||||
e.g. `param_regex='__(?P<param_name>[\w_]+)__'` matches
|
||||
`'__some_param__'` not `__some_param__`
|
||||
|
||||
the named parameter `param_name` will be used as the key to replace, if
|
||||
missing, the parameter is assumed to be positional and numbers are used
|
||||
instead.
|
||||
|
||||
Also consider making a pull request to the project to have your style added,
|
||||
it may be useful to other people and simplify your configuration.
|
||||
58
docs/source/configuration/templating/python.rst
Normal file
58
docs/source/configuration/templating/python.rst
Normal file
@@ -0,0 +1,58 @@
|
||||
.. _python_templater:
|
||||
|
||||
Python templater
|
||||
^^^^^^^^^^^^^^^^
|
||||
|
||||
Uses native Python f-strings. As described in
|
||||
:ref:`generic_variable_templating`, an example usage would look be
|
||||
configured as follows:
|
||||
|
||||
If passed the following *.sql* file:
|
||||
|
||||
.. code-block::
|
||||
|
||||
SELECT * FROM {tbl_name}
|
||||
|
||||
...and the following configuration in *.sqlfluff* in the same directory:
|
||||
|
||||
.. code-block:: cfg
|
||||
|
||||
[sqlfluff]
|
||||
templater = python
|
||||
|
||||
[sqlfluff:templater:python:context]
|
||||
tbl_name = my_table
|
||||
|
||||
...then before parsing, the sql will be transformed to:
|
||||
|
||||
.. code-block:: sql
|
||||
|
||||
SELECT * FROM my_table
|
||||
|
||||
|
||||
Complex Python Variable Templating
|
||||
""""""""""""""""""""""""""""""""""""
|
||||
|
||||
`Python string formatting`_ supports accessing object attributes
|
||||
via dot notation (e.g. :code:`{foo.bar}`). However, since we cannot create Python
|
||||
objects within configuration files, we need a workaround in order to provide
|
||||
dummy values to render templates containing these values. The SQLFluff
|
||||
python templater will interpret any variable containing a "." as a
|
||||
dictionary lookup on the *magic* fixed context key :code:`sqlfluff`.
|
||||
|
||||
.. code-block::
|
||||
|
||||
-- this SQL
|
||||
SELECT * FROM {foo.bar}
|
||||
|
||||
-- becomes this
|
||||
SELECT * FROM {sqlfluff["foo.bar"]}
|
||||
|
||||
..which can be populated using the following configuration:
|
||||
|
||||
.. code-block:: cfg
|
||||
|
||||
[sqlfluff:templater:python:context]
|
||||
sqlfluff = {"foo.bar": "abc"}
|
||||
|
||||
.. _`Python string formatting`: https://docs.python.org/3/library/string.html#format-string-syntax
|
||||
@@ -1,249 +0,0 @@
|
||||
.. _dialectref:
|
||||
|
||||
Dialects Reference
|
||||
==================
|
||||
|
||||
SQLFluff is designed to be flexible in supporting a variety of dialects.
|
||||
Not all potential dialects are supported so far, but several have been
|
||||
implemented by the community. Below are a list of the currently available
|
||||
dialects. Each inherits from another, up to the root `ansi` dialect.
|
||||
|
||||
For a canonical list of supported dialects, run the
|
||||
:program:`sqlfluff dialects` command, which will output a list of the
|
||||
current dialects available on your installation of SQLFluff.
|
||||
|
||||
.. note::
|
||||
|
||||
For technical users looking to add new dialects or add new features
|
||||
to existing ones, the dependent nature of how dialects have been
|
||||
implemented is to try and reduce the amount of repetition in how
|
||||
different elements are defined. As an example, when we say that
|
||||
the :ref:`redshift_dialect_ref` dialect *inherits* from the
|
||||
:ref:`postgres_dialect_ref` dialect this is not because there
|
||||
is an agreement between those projects which means that features
|
||||
in one must end up in the other, but that the design of the
|
||||
:ref:`redshift_dialect_ref` dialect was heavily *inspired* by the
|
||||
postgres dialect and therefore when defining the dialect within
|
||||
sqlfuff it makes sense to use :ref:`postgres_dialect_ref` as a
|
||||
starting point rather than starting from scratch.
|
||||
|
||||
Consider when adding new features to a dialect:
|
||||
|
||||
- Should I be adding it just to this dialect, or adding it to
|
||||
a *parent* dialect?
|
||||
- If I'm creating a new dialect, which dialect would be best to
|
||||
inherit from?
|
||||
- Will the feature I'm adding break any *downstream* dependencies
|
||||
within dialects which inherit from this one?
|
||||
|
||||
.. _ansi_dialect_ref:
|
||||
|
||||
ANSI
|
||||
----
|
||||
|
||||
This is the base dialect which holds most of the definitions of common
|
||||
SQL commands and structures. If the dialect which you're actually using
|
||||
isn't specifically implemented by SQLFluff, using this dialect is a good
|
||||
place to start.
|
||||
|
||||
This dialect doesn't intend to be brutal in adhering to (and only to) the
|
||||
ANSI SQL spec *(mostly because ANSI charges for access to that spec)*. It aims
|
||||
to be a representation of vanilla SQL before any other project adds their
|
||||
spin to it, and so may contain a slightly wider set of functions than actually
|
||||
available in true ANSI SQL.
|
||||
|
||||
.. _athena_dialect_ref:
|
||||
|
||||
Athena
|
||||
--------
|
||||
|
||||
The dialect for `Amazon Athena`_.
|
||||
|
||||
.. _`Amazon Athena`: https://aws.amazon.com/athena/
|
||||
|
||||
.. _bigquery_dialect_ref:
|
||||
|
||||
BigQuery
|
||||
--------
|
||||
|
||||
The dialect for `Google BigQuery`_.
|
||||
|
||||
.. _`Google BigQuery`: https://cloud.google.com/bigquery/
|
||||
|
||||
.. _clickhouse_dialect_ref:
|
||||
|
||||
ClickHouse
|
||||
----------
|
||||
|
||||
The dialect for `ClickHouse`_.
|
||||
|
||||
.. _`ClickHouse`: https://clickhouse.com/
|
||||
|
||||
.. _databricks_dialect_ref:
|
||||
|
||||
Databricks
|
||||
----------
|
||||
|
||||
The dialect `Databricks`_.
|
||||
|
||||
.. _`Databricks`: https://databricks.com/
|
||||
|
||||
.. _db2_dialect_ref:
|
||||
|
||||
Db2
|
||||
------
|
||||
|
||||
The dialect for `Db2`_.
|
||||
|
||||
.. _`Db2`: https://www.ibm.com/analytics/db2
|
||||
|
||||
.. _duck_dialect_ref:
|
||||
|
||||
DuckDB
|
||||
------
|
||||
|
||||
The dialect for `DuckDB`_.
|
||||
|
||||
.. _`DuckDB`: https://duckdb.org/
|
||||
|
||||
|
||||
.. _exasol_dialect_ref:
|
||||
|
||||
Exasol
|
||||
------
|
||||
|
||||
The dialect for `Exasol`_.
|
||||
|
||||
.. _`Exasol`: https://www.exasol.com/
|
||||
|
||||
.. _hive_dialect_ref:
|
||||
|
||||
Greenplum
|
||||
---------
|
||||
|
||||
The dialect for `Greenplum`_.
|
||||
|
||||
.. _`Greenplum`: https://www.greenplum.org/
|
||||
|
||||
.. _greens_dialect_ref:
|
||||
|
||||
Hive
|
||||
----
|
||||
|
||||
The dialect for `Hive`_.
|
||||
|
||||
.. _`Hive`: https://hive.apache.org/
|
||||
|
||||
.. _materialize_dialect_ref:
|
||||
|
||||
Materialize
|
||||
-----------
|
||||
|
||||
The dialect for `Materialize`_.
|
||||
|
||||
.. _`Materialize`: https://materialize.com/
|
||||
|
||||
.. _mysql_dialect_ref:
|
||||
|
||||
MySQL
|
||||
-----
|
||||
|
||||
The dialect for `MySQL`_.
|
||||
|
||||
.. _`MySQL`: https://www.mysql.com/
|
||||
|
||||
.. _oracle_dialect_ref:
|
||||
|
||||
Oracle
|
||||
------
|
||||
|
||||
The dialect for `Oracle`_ SQL. Note: this does not include PL/SQL.
|
||||
|
||||
.. _`Oracle`: https://www.oracle.com/database/technologies/appdev/sql.html
|
||||
|
||||
.. _postgres_dialect_ref:
|
||||
|
||||
PostgreSQL
|
||||
----------
|
||||
|
||||
This is based around the `PostgreSQL spec`_. Many other SQL instances are often
|
||||
based on PostreSQL syntax. If you're running an unsupported dialect, then
|
||||
this is often the dialect to use (until someone makes a specific dialect).
|
||||
|
||||
.. _`PostgreSQL spec`: https://www.postgresql.org/docs/9.6/reference.html
|
||||
|
||||
.. _redshift_dialect_ref:
|
||||
|
||||
Redshift
|
||||
----------
|
||||
|
||||
|
||||
The dialect for `Amazon Redshift`_.
|
||||
|
||||
.. _`Amazon Redshift`: https://aws.amazon.com/redshift/
|
||||
|
||||
.. _snowflake_dialect_ref:
|
||||
|
||||
Snowflake
|
||||
---------
|
||||
|
||||
The dialect for `Snowflake`_, which has much of its syntax
|
||||
inherited from :ref:`postgres_dialect_ref`.
|
||||
|
||||
.. _`Snowflake`: https://docs.snowflake.com/en/sql-reference.html
|
||||
|
||||
.. _soql_dialect_ref:
|
||||
|
||||
SOQL
|
||||
----
|
||||
|
||||
The dialect for `SOQL`_ (Salesforce Object Query Language).
|
||||
|
||||
.. _`SOQL`: https://developer.salesforce.com/docs/atlas.en-us.soql_sosl.meta/soql_sosl/sforce_api_calls_soql.htm
|
||||
|
||||
.. _sparksql_dialect_ref:
|
||||
|
||||
SparkSQL
|
||||
--------
|
||||
|
||||
The dialect for Apache `Spark SQL`_. It inherits from :ref:`ansi_dialect_ref`
|
||||
and includes relevant syntax from :ref:`hive_dialect_ref` for commands that
|
||||
permit Hive Format. Spark SQL extensions provided by the `Delta Lake`_ project
|
||||
are also implemented in this dialect.
|
||||
|
||||
This implementation focuses on the `Ansi Compliant Mode`_ introduced in
|
||||
Spark3, instead of being Hive Compliant. The introduction of ANSI Compliance
|
||||
provides better data quality and easier migration from traditional DBMS.
|
||||
|
||||
Versions of Spark prior to 3.x will only support the Hive dialect.
|
||||
|
||||
.. _`Spark SQL`: https://spark.apache.org/docs/latest/sql-ref.html
|
||||
.. _`Delta Lake`: https://docs.delta.io/latest/quick-start.html#set-up-apache-spark-with-delta-lake
|
||||
.. _`Ansi Compliant Mode`: https://spark.apache.org/docs/latest/sql-ref-ansi-compliance.html
|
||||
|
||||
.. _sqlite_dialect_ref:
|
||||
|
||||
SQLite
|
||||
------
|
||||
|
||||
The dialect for `SQLite`_.
|
||||
|
||||
.. _`SQLite`: https://www.sqlite.org/
|
||||
|
||||
.. _tsql_dialect_ref:
|
||||
|
||||
T-SQL
|
||||
-----
|
||||
|
||||
The dialect for `T-SQL`_ (aka Transact-SQL).
|
||||
|
||||
.. _`T-SQL`: https://docs.microsoft.com/en-us/sql/t-sql/language-reference
|
||||
|
||||
.. _teradata_dialect_ref:
|
||||
|
||||
Teradata
|
||||
--------
|
||||
|
||||
The dialect for `Teradata`_.
|
||||
|
||||
.. _`Teradata`: https://www.teradata.co.uk/
|
||||
@@ -45,6 +45,8 @@ the best instructions for what to do next are `on the python website`_.
|
||||
.. _`instructions for all platforms here`: https://wiki.python.org/moin/BeginnersGuide/Download
|
||||
.. _`on the python website`: https://pip.pypa.io/en/stable/installation/
|
||||
|
||||
.. _installingsqlfluff:
|
||||
|
||||
Installing SQLFluff
|
||||
-------------------
|
||||
|
||||
@@ -61,7 +63,7 @@ version number.
|
||||
.. code-block:: text
|
||||
|
||||
$ sqlfluff version
|
||||
2.1.3
|
||||
3.2.5
|
||||
|
||||
Basic Usage
|
||||
-----------
|
||||
@@ -258,7 +260,7 @@ put the following content:
|
||||
[sqlfluff:indentation]
|
||||
tab_space_size = 2
|
||||
|
||||
[sqlfluff:rules:CP01]
|
||||
[sqlfluff:rules:capitalisation.keywords]
|
||||
capitalisation_policy = lower
|
||||
|
||||
Then rerun the same command as before.
|
||||
@@ -299,6 +301,8 @@ From here, there are several more things to explore.
|
||||
* To find out more about which rules are available, see :ref:`ruleref`.
|
||||
* To find out more about configuring *SQLFluff* and what other options
|
||||
are available, see :ref:`config`.
|
||||
* Once you're ready to start using *SQLFluff* on a project or with the
|
||||
rest of your team, check out :ref:`production-use`.
|
||||
|
||||
One last thing to note is that *SQLFluff* is a relatively new project
|
||||
and you may find bugs or strange things while using it. If you do find
|
||||
|
||||
131
docs/source/guides/contributing/architecture.rst
Normal file
131
docs/source/guides/contributing/architecture.rst
Normal file
@@ -0,0 +1,131 @@
|
||||
.. _architecture:
|
||||
|
||||
Architecture
|
||||
------------
|
||||
|
||||
At a high level, the behaviour of SQLFluff is divided into a few key stages.
|
||||
Whether calling `sqlfluff lint`, `sqlfluff fix` or `sqlfluff parse`, the
|
||||
internal flow is largely the same.
|
||||
|
||||
.. _templater:
|
||||
|
||||
Stage 1, the templater
|
||||
^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
This stage only applies to templated SQL. Vanilla SQL is sent straight to
|
||||
stage 2, the lexer.
|
||||
|
||||
In order to lint templated SQL, SQLFluff must first convert the 'raw' or
|
||||
pre-templated code into valid SQL, which can then be parsed. The templater
|
||||
returns both the raw and post-templated SQL so that any rule violations which
|
||||
occur in templated sections can be ignored and the rest mapped to their
|
||||
original line location for user feedback.
|
||||
|
||||
.. _Jinja: https://jinja.palletsprojects.com/
|
||||
.. _dbt: https://docs.getdbt.com/
|
||||
.. _`Python format strings`: https://docs.python.org/3/library/string.html#format-string-syntax
|
||||
|
||||
*SQLFluff* supports multiple templating engines:
|
||||
|
||||
* Jinja_
|
||||
* SQL placeholders (e.g. SQLAlchemy parameters)
|
||||
* `Python format strings`_
|
||||
* dbt_ (via plugin)
|
||||
|
||||
Under the hood dbt also uses Jinja, but in *SQLFluff* uses a separate
|
||||
mechanism which interfaces directly with the dbt python package.
|
||||
|
||||
For more details on how to configure the templater see :ref:`templateconfig`.
|
||||
|
||||
|
||||
Stage 2, the lexer
|
||||
^^^^^^^^^^^^^^^^^^
|
||||
|
||||
The lexer takes SQL and separates it into segments of whitespace and
|
||||
code. Where we can impart some high level meaning to segments, we
|
||||
do, but the result of this operation is still a flat sequence of
|
||||
typed segments (all subclasses of :code:`RawSegment`).
|
||||
|
||||
|
||||
Stage 3, the parser
|
||||
^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
The parser is arguably the most complicated element of SQLFluff, and is
|
||||
relied on by all the other elements of the tool to do most of the heavy
|
||||
lifting.
|
||||
|
||||
#. The lexed segments are parsed using the specified dialect's grammars. In
|
||||
SQLFluff, grammars describe the shape of SQL statements (or their
|
||||
components). The parser attempts to apply each potential grammar to the
|
||||
lexed segments until all the segments have been matched.
|
||||
|
||||
#. In SQLFluff, segments form a tree-like structure. The top-level segment is
|
||||
a :code:`FileSegment`, which contains zero or more
|
||||
:code:`StatementSegment`\ s, and so on. Before the segments have been parsed
|
||||
and named according to their type, they are 'raw', meaning they have no
|
||||
classification other than their literal value.
|
||||
|
||||
#. A segment's :code:`.match()` method uses the :code:`match_grammar`, on
|
||||
which :code:`.match()` is called. SQLFluff parses in a single pass through
|
||||
the file, so segments will recursively match the file based on their
|
||||
respective grammars. In the example of a :code:`FileSegment`, it
|
||||
first divides up the query into statements, and then the :code:`.match()`
|
||||
method of those segments works out the structure within them.
|
||||
|
||||
* *Segments* must implement a :code:`match_grammar`. When :code:`.match()`
|
||||
is called on a segment, this is the grammar which is used to decide
|
||||
whether there is a match.
|
||||
|
||||
* *Grammars* combine *segments* or other *grammars* together in a
|
||||
pre-defined way. For example the :code:`OneOf` grammar will match if any
|
||||
one of its child elements match.
|
||||
|
||||
#. During the recursion, the parser eventually reaches segments which have
|
||||
no children (raw segments containing a single token), and so the
|
||||
recursion naturally finishes.
|
||||
|
||||
#. If no match is found for a segment, the contents will be wrapped in an
|
||||
:code:`UnparsableSegment` which is picked up as a *parsing* error later.
|
||||
This is usually facilitated by the :code:`ParseMode` on some grammars
|
||||
which can be set to :code:`GREEDY`, allowing the grammar to capture
|
||||
additional segments as unparsable. As an example, bracketed sections
|
||||
are often configured to capture anything unexpected as unparsable rather
|
||||
than simply failing to match if there is more than expected (which would
|
||||
be the default, :code:`STRICT`, behaviour).
|
||||
|
||||
#. The result of the :code:`.match()` method is a :code:`MatchResult` which
|
||||
contains the instructions on how to turn the flat sequence of raw segments
|
||||
into a nested tree of segments. Calling :code:`.apply()` on this result
|
||||
at the end of the matching process is what finally creates the nested
|
||||
structure.
|
||||
|
||||
When working on the parser there are a couple of design principles
|
||||
to keep in mind.
|
||||
|
||||
- Grammars are contained in *dialects*, the root dialect being
|
||||
the *ansi* dialect. The ansi dialect is used to host logic common
|
||||
to all dialects, and so does not necessarily adhere to the formal
|
||||
ansi specification. Other SQL dialects inherit from the ansi dialect,
|
||||
replacing or patching any segments they need to. One reason for the *Ref*
|
||||
grammar is that it allows name resolution of grammar elements at runtime
|
||||
and so a *patched* grammar with some elements overridden can still rely on
|
||||
lower-level elements which haven't been redeclared within the dialect
|
||||
- All grammars and segments attempt to match as much as they can and will
|
||||
return partial matches where possible. It is up to the calling grammar
|
||||
or segment to decide whether a partial or complete match is required
|
||||
based on the context it is matching in.
|
||||
|
||||
|
||||
Stage 4, the linter
|
||||
^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
Given the complete parse tree, rule classes check for linting errors by
|
||||
traversing the tree, looking for segments and patterns of concern. If
|
||||
the rule discovers a violation, it returns a
|
||||
:py:class:`~sqlfluff.core.rules.base.LintResult` pointing to the segment
|
||||
which caused the violation.
|
||||
|
||||
Some rules are able to *fix* the problems they find. If this is the case,
|
||||
the rule will return a list of fixes, which describe changes to be made to
|
||||
the tree. This can include edits, inserts, or deletions. Once the fixes
|
||||
have been applied, the updated tree is written to the original file.
|
||||
1129
docs/source/guides/contributing/dialect.rst
Normal file
1129
docs/source/guides/contributing/dialect.rst
Normal file
File diff suppressed because it is too large
Load Diff
121
docs/source/guides/contributing/docs.rst
Normal file
121
docs/source/guides/contributing/docs.rst
Normal file
@@ -0,0 +1,121 @@
|
||||
Documentation Contributions
|
||||
===========================
|
||||
|
||||
Contributing to the docs is one of the easiest and most helpful ways
|
||||
to help the project. Documentation changes require relatively little
|
||||
specialist knowledge apart from being familiar with how to use SQLFluff
|
||||
and the docs are read by a very wide range of people.
|
||||
|
||||
Documentation takes two forms:
|
||||
|
||||
1. Embedded documentation found in function and module `docstrings`_.
|
||||
|
||||
2. The free-standing documentation which you're reading now, and hosted
|
||||
at `docs.sqlfluff.com`_ (built using `sphinx`_ and `ReadtheDocs`_).
|
||||
|
||||
The two are somewhat blurred by the use of `autodoc`_ (and some other custom
|
||||
integrations), where documentation is generated directly off `docstrings`_
|
||||
within the codebase, for example the :ref:`ruleref`, :ref:`cliref` and
|
||||
:ref:`dialectref`. To understand more about how the custom integrations
|
||||
we use to generate these docs, see the `generate-auto-docs.py`_ file.
|
||||
|
||||
.. _`docstrings`: https://en.wikipedia.org/wiki/Docstring
|
||||
.. _`docs.sqlfluff.com`: https://docs.sqlfluff.com
|
||||
.. _`autodoc`: https://www.sphinx-doc.org/en/master/usage/extensions/autodoc.html
|
||||
.. _`generate-auto-docs.py`: https://github.com/sqlfluff/sqlfluff/blob/main/docs/generate-auto-docs.py
|
||||
|
||||
.. _`ReadtheDocs`: https://about.readthedocs.com/
|
||||
|
||||
Docstrings
|
||||
----------
|
||||
|
||||
Embedded documentation of functions, classes and modules is most useful
|
||||
for *developer-focussed* documentation as it's most accessible in the places
|
||||
which those developers are working: *directly in the codebase*. We enforce
|
||||
that docstrings are present and correctly formatted using the
|
||||
`pydocstyle rules for ruff`_, which we have configured to enforce the
|
||||
`google style of docstrings`_.
|
||||
|
||||
.. _`pydocstyle rules for ruff`: https://docs.astral.sh/ruff/rules/#pydocstyle-d
|
||||
.. _`google style of docstrings`: https://sphinxcontrib-napoleon.readthedocs.io/en/latest/example_google.html
|
||||
|
||||
Sphinx Docs
|
||||
-----------
|
||||
|
||||
The main documentation (which you're reading now), is build using `sphinx`_,
|
||||
and written using `reStructuredText`_ (files ending with :code:`.rst`). The
|
||||
`sphinx`_ project offers a `reStructuredText primer`_ for people who are new
|
||||
to the syntax (and the SQLFluff project uses `doc8`_ in the CI process to try
|
||||
and catch any issues early).
|
||||
|
||||
On top of those docs, there are a few areas worth highlighting for new (or
|
||||
returning) users, which are either specific to the SQLFluff project, or not
|
||||
particularly clear in the sphinx docs:
|
||||
|
||||
* `reStructuredText`_ is very similar to, but differs from (the somewhat more
|
||||
well known) `Markdown`_ syntax. Importantly:
|
||||
|
||||
* :code:`*text with single asterisks*` renders as *italics*. Use
|
||||
:code:`**double asterisks**` for **bold text**.
|
||||
|
||||
* :code:`code snippets` are created using the |codesnippet|
|
||||
directive, rather than just lone backticks (|backquotes|) as found in
|
||||
most `Markdown`_.
|
||||
|
||||
* To create links to other parts of the documentation (i.e.
|
||||
`Cross-referencing`_), use either the :code:`:ref:` syntax.
|
||||
|
||||
* Docs for all the SQL dialects are auto generated with associated anchors
|
||||
to use for referencing. For example to link to the
|
||||
:ref:`postgres_dialect_ref` dialect docs, you can use the |postgresref|.
|
||||
Replace the :code:`postgres` portion with the :code:`name` of the
|
||||
dialect you want to link to.
|
||||
|
||||
* Docs for all the bundled rules and handled using a customer `sphinx`_
|
||||
plugin, which means you can refer to them using their name or code:
|
||||
|LT01ref| resolves to :sqlfluff:ref:`LT01` and |layoutspacingref|
|
||||
resolves to :sqlfluff:ref:`layout.spacing`.
|
||||
|
||||
* Docs for any of the python classes and modules handled using `autodoc`_
|
||||
can be referenced as per their docs, so the
|
||||
:py:class:`sqlfluff.core.rules.base.BaseRule` class can be referenced
|
||||
with |baseruleref|. You can also use the :code:`~` prefix (i.e.
|
||||
|shortbaseruleref|) so that it just renders as
|
||||
:py:class:`~sqlfluff.core.rules.base.BaseRule`. See the docs for
|
||||
`Cross-referencing`_ for more details.
|
||||
|
||||
.. _`sphinx`: https://www.sphinx-doc.org/en/master/
|
||||
.. _`reStructuredText`: https://www.sphinx-doc.org/en/master/usage/restructuredtext/index.html
|
||||
.. _`reStructuredText primer`: https://www.sphinx-doc.org/en/master/usage/restructuredtext/basics.html
|
||||
.. _`doc8`: https://github.com/PyCQA/doc8
|
||||
.. _`Markdown`: https://www.markdownguide.org/
|
||||
.. _`Cross-referencing`: https://www.sphinx-doc.org/en/master/usage/referencing.html
|
||||
.. _`autodoc`: https://www.sphinx-doc.org/en/master/usage/extensions/autodoc.html
|
||||
|
||||
.. |codesnippet| raw:: html
|
||||
|
||||
<code class="code docutils literal notranslate">:code:`...`</code>
|
||||
|
||||
.. |backquotes| raw:: html
|
||||
|
||||
<code class="code docutils literal notranslate">`...`</code>
|
||||
|
||||
.. |postgresref| raw:: html
|
||||
|
||||
<code class="code docutils literal notranslate">:ref:`postgres_dialect_ref`</code>
|
||||
|
||||
.. |LT01ref| raw:: html
|
||||
|
||||
<code class="code docutils literal notranslate">:sqlfluff:ref:`LT01`</code>
|
||||
|
||||
.. |layoutspacingref| raw:: html
|
||||
|
||||
<code class="code docutils literal notranslate">:sqlfluff:ref:`layout.spacing`</code>
|
||||
|
||||
.. |baseruleref| raw:: html
|
||||
|
||||
<code class="code docutils literal notranslate">:py:class:`sqlfluff.core.rules.base.BaseRule`</code>
|
||||
|
||||
.. |shortbaseruleref| raw:: html
|
||||
|
||||
<code class="code docutils literal notranslate">:py:class:`~sqlfluff.core.rules.base.BaseRule`</code>
|
||||
1024
docs/source/guides/contributing/git.rst
Normal file
1024
docs/source/guides/contributing/git.rst
Normal file
File diff suppressed because it is too large
Load Diff
BIN
docs/source/guides/contributing/github_clone_button.png
Normal file
BIN
docs/source/guides/contributing/github_clone_button.png
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 98 KiB |
BIN
docs/source/guides/contributing/github_example_merge_panel.png
Normal file
BIN
docs/source/guides/contributing/github_example_merge_panel.png
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 198 KiB |
BIN
docs/source/guides/contributing/github_example_pr.png
Normal file
BIN
docs/source/guides/contributing/github_example_pr.png
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 144 KiB |
BIN
docs/source/guides/contributing/github_fork_status.png
Normal file
BIN
docs/source/guides/contributing/github_fork_status.png
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 13 KiB |
BIN
docs/source/guides/contributing/github_reactions.png
Normal file
BIN
docs/source/guides/contributing/github_reactions.png
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 6.3 KiB |
@@ -8,6 +8,37 @@ to make linting Rules pluggable, which enable users to implement rules that
|
||||
are just too "organization specific" to be shared, or too platform specific
|
||||
to be included in the core library.
|
||||
|
||||
.. note::
|
||||
|
||||
We recommend that the module in a plugin which defines all
|
||||
of the hook implementations (anything using the ``@hookimpl`` decorator)
|
||||
must be able to fully import before any rule implementations are imported.
|
||||
More specifically, SQLFluff must be able to both *import* **and**
|
||||
*run* any implementations of ``get_configs_info()`` before any plugin
|
||||
rules (i.e. any derivatives of :py:class:`~sqlfluff.core.rules.base.BaseRule`)
|
||||
are *imported*. Because of this, we recommend that rules are defined in a
|
||||
separate module to the root of the plugin and then only imported within
|
||||
the ``get_rules()`` method.
|
||||
|
||||
Importing in the main body of the module was previously our recommendation
|
||||
and so may be the case for versions of some plugins. If one of your plugins
|
||||
does use imports in this way, a warning will be presented, recommending that
|
||||
you update your plugin.
|
||||
|
||||
.. code-block:: python
|
||||
:emphasize-lines: 7,8
|
||||
|
||||
# The root module will need to import `hookimpl`, but
|
||||
# should not yet import the rule definitions for the plugin.
|
||||
from sqlfluff.core.plugin import hookimpl
|
||||
|
||||
@hookimpl
|
||||
def get_rules():
|
||||
# Rules should be imported within the `get_rules` method instead
|
||||
from my_plugin.rules import MyRule
|
||||
return [MyRule]
|
||||
|
||||
|
||||
.. _`pluggy library`: https://pluggy.readthedocs.io/en/latest/
|
||||
|
||||
Creating a plugin
|
||||
@@ -9,6 +9,36 @@ SQLFluff crawls through the parse tree of a SQL file, calling the rule's
|
||||
the rule code to be really streamlined and only contain the logic for the rule
|
||||
itself, with all the other mechanics abstracted away.
|
||||
|
||||
Running Tests
|
||||
-------------
|
||||
|
||||
The majority of the test cases for most bundled rules are *"yaml test cases"*,
|
||||
i.e. test cases defined in `yaml`_ files. You can find those `yaml fixtures on github`_.
|
||||
While this provides a very simple way to *write* tests, it can be occasionally tedious
|
||||
to *run* specific tests.
|
||||
|
||||
Within either a `tox` environment or `virtualenv` (as described in the `contributing.md`_
|
||||
file), you can either run all of the rule yaml tests with:
|
||||
|
||||
.. code-block:: sh
|
||||
|
||||
pytest test/rules/yaml_test_cases_test.py -vv
|
||||
|
||||
...or to just run tests for a specific rule, there are two options for a syntax to select
|
||||
only those tests:
|
||||
|
||||
.. code-block:: sh
|
||||
|
||||
pytest -vv test/rules/ -k RF01
|
||||
|
||||
The :code:`-k` option simply searches for the content of the argument being in the name
|
||||
of the test, which will match any single or combo tests for that rule. By convention,
|
||||
any test cases for a rule should include the code for that rule.
|
||||
|
||||
.. _`yaml`: https://yaml.org/
|
||||
.. _`yaml fixtures on github`: https://github.com/sqlfluff/sqlfluff/tree/main/test/fixtures/rules/std_rule_cases
|
||||
.. _`contributing.md`: https://github.com/sqlfluff/sqlfluff/blob/main/CONTRIBUTING.md
|
||||
|
||||
Traversal Options
|
||||
-----------------
|
||||
|
||||
@@ -67,45 +97,3 @@ times.
|
||||
NOTE: ``post`` rules also run on the *first* pass of the ``main`` phase so that
|
||||
any issues they find will be presented in the list of issues output by
|
||||
``sqlfluff fix`` and ``sqlfluff lint``.
|
||||
|
||||
Base Rules
|
||||
----------
|
||||
|
||||
`base_rules` Module
|
||||
^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
.. automodule:: sqlfluff.core.rules.base
|
||||
:members:
|
||||
|
||||
Functional API
|
||||
--------------
|
||||
These newer modules provide a higher-level API for rules working with segments
|
||||
and slices. Rules that need to navigate or search the parse tree may benefit
|
||||
from using these. Eventually, the plan is for **all** rules to use these
|
||||
modules. As of December 30, 2021, 17+ rules use these modules.
|
||||
|
||||
The modules listed below are submodules of `sqlfluff.utils.functional`.
|
||||
|
||||
`segments` Module
|
||||
^^^^^^^^^^^^^^^^^
|
||||
|
||||
.. automodule:: sqlfluff.utils.functional.segments
|
||||
:members:
|
||||
|
||||
`segment_predicates` Module
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
.. automodule:: sqlfluff.utils.functional.segment_predicates
|
||||
:members:
|
||||
|
||||
`raw_file_slices` Module
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
.. automodule:: sqlfluff.utils.functional.raw_file_slices
|
||||
:members:
|
||||
|
||||
`raw_file_slice_predicates` Module
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
.. automodule:: sqlfluff.utils.functional.raw_file_slice_predicates
|
||||
:members:
|
||||
47
docs/source/guides/index.rst
Normal file
47
docs/source/guides/index.rst
Normal file
@@ -0,0 +1,47 @@
|
||||
.. _guides:
|
||||
|
||||
Guides & How-tos
|
||||
================
|
||||
|
||||
This section is of short guides and articles is meant to be read alongside
|
||||
the rest of the documentation which is more reference-oriented.
|
||||
|
||||
Setting up SQLFluff
|
||||
-------------------
|
||||
|
||||
.. toctree::
|
||||
:maxdepth: 1
|
||||
|
||||
setup/teamrollout
|
||||
setup/developing_custom_rules
|
||||
|
||||
Troubleshooting SQLFluff
|
||||
------------------------
|
||||
|
||||
.. toctree::
|
||||
:maxdepth: 1
|
||||
|
||||
troubleshooting/how_to
|
||||
|
||||
.. _development:
|
||||
|
||||
Contributing to SQLFluff
|
||||
------------------------
|
||||
|
||||
It is recommended that the following is read in conjunction with exploring
|
||||
the codebase. `dialect_ansi.py`_ in particular is helpful to understand the
|
||||
recursive structure of segments and grammars.
|
||||
|
||||
You may also need to reference the :ref:`internal_api_docs`.
|
||||
|
||||
.. _`dialect_ansi.py`: https://github.com/sqlfluff/sqlfluff/blob/main/src/sqlfluff/dialects/dialect_ansi.py
|
||||
|
||||
.. toctree::
|
||||
:maxdepth: 1
|
||||
|
||||
contributing/architecture
|
||||
contributing/git
|
||||
contributing/dialect
|
||||
contributing/rules
|
||||
contributing/plugins
|
||||
contributing/docs
|
||||
134
docs/source/guides/setup/developing_custom_rules.rst
Normal file
134
docs/source/guides/setup/developing_custom_rules.rst
Normal file
@@ -0,0 +1,134 @@
|
||||
.. _developing_custom_rules:
|
||||
|
||||
Developing Custom Rules
|
||||
=======================
|
||||
|
||||
It's quite common to have organisation-, or project-specific norms and
|
||||
conventions you might want to enforce using SQLFluff. With a little bit
|
||||
of python knowledge this is very achievable with SQLFluff, and there's
|
||||
a plugin architecture to support that.
|
||||
|
||||
This guide should be read alongside the code for the
|
||||
`SQLFluff example plugin`_ and the more technical documentation for
|
||||
:ref:`developingpluginsref`.
|
||||
|
||||
What Plugin do I need?
|
||||
----------------------
|
||||
|
||||
When thinking about developing a rule, the following thought process will
|
||||
help you decide what to develop:
|
||||
|
||||
1. When do I want this rule to show a warning, when should it definitely
|
||||
**not** show one? What information do I need when evaluating whether
|
||||
a the rule has been followed or not? This information will tell you
|
||||
about the two important *locations* in the parse tree which will become
|
||||
important.
|
||||
|
||||
* The *trigger* location: i.e. when should the rule be *called* for
|
||||
evaluation. e.g. :sqlfluff:ref:`CP01` triggers on keywords, because
|
||||
it only needs the information about that keyword to run, but
|
||||
:sqlfluff:ref:`LT08` triggers on ``WITH`` statements even though it's
|
||||
only interested in specific pieces of whitespace, because it needs the
|
||||
full context of the statement to evaluate. You may with to examine the
|
||||
parse structure of some example queries you'd want to handle by using
|
||||
``sqlfluff parse my_file.sql`` to identify the right segment. This is
|
||||
then specified using the ``crawl_behaviour`` attribute on the rule.
|
||||
|
||||
* The *anchor* location: i.e. which position will show up in the CLI
|
||||
readout back to the user. To continue the example of above, while
|
||||
:sqlfluff:ref:`LT08` *triggers* on a ``WITH`` statement, it *anchors*
|
||||
on a more specific segment just after where it expected whitespace.
|
||||
It specifies this using the ``anchor`` argument to the
|
||||
:py:class:`~sqlfluff.core.rules.base.LintResult` object.
|
||||
|
||||
2. How should the rule evaluate and should I implement an auto-fix? For
|
||||
the simplest rules, it the logic to evaluate whether there's an issue
|
||||
can be *very simple*. For example in the `SQLFluff example plugin`_,
|
||||
we are just checking the name of an element isn't in a configured list.
|
||||
Typically we recommend that for organisation-specific rules, **KEEP IT**
|
||||
**SIMPLE**. Some of the rules bundled with SQLFluff contain a lot of
|
||||
complexity for handling how to automatically fix lots of edge cases,
|
||||
but for your organisation it's probably not worth the overhead unless
|
||||
you're a **very big team** or **come across a huge amount of poorly**
|
||||
**formatted SQL**.
|
||||
|
||||
* Consider the information not just to *trigger*, but also whether a
|
||||
custom error message would be appropriate and how to get the information
|
||||
to construct that too. The default error message will be the first
|
||||
line of the rule docstring_. Custom messages can be configured by
|
||||
setting the ``description`` argument of the
|
||||
:py:class:`~sqlfluff.core.rules.base.LintResult` object.
|
||||
|
||||
* Do use the existing SQLFluff core rules as examples of what is possible
|
||||
and how to achieve various things - but remember that many of them
|
||||
implement a level of complexity and edge case handling which may not
|
||||
be necessary for your organisation.
|
||||
|
||||
3. How am I going to roll out my rule to the team? Thinking through this
|
||||
aspect of rule development is just as important as the technical aspect.
|
||||
Spending a lot of time on rule development for it to be rejected by
|
||||
the end users of it is both a waste of time and also counterproductive.
|
||||
|
||||
* Consider manually fixing any pre-existing issues in your project which
|
||||
would trigger the rule before rollout.
|
||||
|
||||
* Seek consensus on how strictly the rule will be enforced and what the
|
||||
step by step pathway is to strict enforcement.
|
||||
|
||||
* Consider *beta-testing* your new rule with a smaller group of users
|
||||
who are more engaged with SQLFluff or code quality in general.
|
||||
|
||||
.. _docstring: https://en.wikipedia.org/wiki/Docstring
|
||||
|
||||
Plugin Discovery
|
||||
----------------
|
||||
|
||||
One of most common questions asked with respect to custom plugins is
|
||||
*discovery*, or *"how do I tell SQLFluff where my plugin is"*. SQLFluff
|
||||
uses pluggy_ as it's plugin architecture (developed by the folks at pytest_).
|
||||
Pluggy uses the python packaging metadata for plugin discovery. This means
|
||||
that **your plugin must be installed as a python package for discovery**.
|
||||
Specifically, it must define an `entry point`_ for SQLFluff.
|
||||
When SQLFluff runs, it inspects installed python packages for this entry
|
||||
point and then can run any which define one. For example you'll see in the
|
||||
`SQLFluff example plugin`_ that the ``pyproject.toml`` file has the
|
||||
following section:
|
||||
|
||||
.. code-block:: toml
|
||||
|
||||
[project.entry-points.sqlfluff]
|
||||
# Change this name in your plugin, e.g. company name or plugin purpose.
|
||||
sqlfluff_example = "sqlfluff_plugin_example"
|
||||
|
||||
You can find equivalent examples for ``setup.cfg`` and ``setup.py`` in the
|
||||
python docs for `entry point`_. This information is registered
|
||||
*on install* of your plugin, (i.e. when running `pip install`, or equivalent
|
||||
if you're using a different package manager) so if you change it later, you
|
||||
may need to re-install your plugin.
|
||||
|
||||
You can test whether your rule has been successfully discovered by running
|
||||
``sqlfluff rules`` and reviewing whether your new rule has been included in
|
||||
the readout.
|
||||
|
||||
.. note::
|
||||
If you're struggling with rule discovery, **use the example plugin**.
|
||||
It can be much easier to take a known working example and then modify
|
||||
from there:
|
||||
|
||||
1. Copy the code from the `SQLFluff example plugin`_ into a local
|
||||
folder.
|
||||
|
||||
2. Run `pip install -e /path/to/where/you/put/it`.
|
||||
|
||||
3. Run `sqlfluff rules`, to confirm that the example plugin is present
|
||||
to demonstrate to yourself that discovery is functional.
|
||||
|
||||
4. Then edit the example plugin to do what you want now that discovery
|
||||
isn't an issue. You may have to re-run `pip install ...` if you
|
||||
change anything in the rule metadata (like the entry point, filenames
|
||||
or plugin location).
|
||||
|
||||
.. _pluggy: https://pluggy.readthedocs.io/en/latest/
|
||||
.. _pytest: https://docs.pytest.org/en/stable/
|
||||
.. _`entry point`: https://setuptools.pypa.io/en/stable/userguide/entry_point.html
|
||||
.. _`SQLFluff example plugin`: https://github.com/sqlfluff/sqlfluff/tree/main/plugins/sqlfluff-plugin-example
|
||||
144
docs/source/guides/troubleshooting/how_to.rst
Normal file
144
docs/source/guides/troubleshooting/how_to.rst
Normal file
@@ -0,0 +1,144 @@
|
||||
How to Troubleshoot SQLFluff
|
||||
============================
|
||||
|
||||
It can at times be tricky to troubleshoot SQLFluff as it exists within
|
||||
an ecosystem of other tools, and can be deployed in wide range of ways.
|
||||
|
||||
This step by step guide can help you narrow down what's likely going wrong
|
||||
and point you toward the swiftest resolution.
|
||||
|
||||
1. Common Errors
|
||||
----------------
|
||||
|
||||
There are a few error messages you may get which have relatively
|
||||
straightforward resolution paths.
|
||||
|
||||
Parsing Errors
|
||||
^^^^^^^^^^^^^^
|
||||
|
||||
SQLFluff needs to be able to parse your SQL to understand it's structure.
|
||||
That means if it fails to parse the SQL it will give you an error message.
|
||||
The intent is that if SQLFluff cannot parse the SQL, then it should mean
|
||||
the SQL is also invalid and help you understand where and why.
|
||||
|
||||
For example, this is a simple query which is not valid SQL:
|
||||
|
||||
.. code-block:: sql
|
||||
|
||||
select 1 2 3
|
||||
from my_table
|
||||
|
||||
When running ``sqlfluff lint`` or ``sqlfluff parse`` we get the following
|
||||
error message::
|
||||
|
||||
==== parsing violations ====
|
||||
L: 1 | P: 10 | PRS | Line 1, Position 10: Found unparsable section: '2 3'
|
||||
|
||||
Furthermore if we look at the full parsing output we can see an unparsable
|
||||
section in the parse tree:
|
||||
|
||||
.. code-block::
|
||||
:emphasize-lines: 12,13,14,15
|
||||
|
||||
[L: 1, P: 1] |file:
|
||||
[L: 1, P: 1] | statement:
|
||||
[L: 1, P: 1] | select_statement:
|
||||
[L: 1, P: 1] | select_clause:
|
||||
[L: 1, P: 1] | keyword: 'select'
|
||||
[L: 1, P: 7] | [META] indent:
|
||||
[L: 1, P: 7] | whitespace: ' '
|
||||
[L: 1, P: 8] | select_clause_element:
|
||||
[L: 1, P: 8] | numeric_literal: '1'
|
||||
[L: 1, P: 9] | [META] dedent:
|
||||
[L: 1, P: 9] | whitespace: ' '
|
||||
[L: 1, P: 10] | unparsable: !! Expected: 'Nothing here.'
|
||||
[L: 1, P: 10] | numeric_literal: '2'
|
||||
[L: 1, P: 11] | whitespace: ' '
|
||||
[L: 1, P: 12] | numeric_literal: '3'
|
||||
[L: 1, P: 13] | newline: '\n'
|
||||
[L: 2, P: 1] | from_clause:
|
||||
[L: 2, P: 1] | keyword: 'from'
|
||||
[L: 2, P: 5] | whitespace: ' '
|
||||
[L: 2, P: 6] | from_expression:
|
||||
[L: 2, P: 6] | [META] indent:
|
||||
[L: 2, P: 6] | from_expression_element:
|
||||
[L: 2, P: 6] | table_expression:
|
||||
[L: 2, P: 6] | table_reference:
|
||||
[L: 2, P: 6] | naked_identifier: 'my_table'
|
||||
[L: 2, P: 14] | [META] dedent:
|
||||
[L: 2, P: 14] | newline: '\n'
|
||||
[L: 3, P: 1] | [META] end_of_file:
|
||||
|
||||
SQLFluff maintains it's own version of each SQL dialect, and this may not be
|
||||
exhaustive for some of the dialects which are newer to SQLFluff or which are
|
||||
in very active development themselves. This means in some scenarios you may
|
||||
find a query which runs fine in your environment, but cannot be parsed by
|
||||
SQLFluff. This is not a *"bug"* per-se, but is an indicator of a gap in the
|
||||
SQLFluff dialect.
|
||||
|
||||
Many of the issues raised on GitHub relate to parsing errors like this, but
|
||||
it's also a great way to support the project if you feel able to contribute
|
||||
a dialect improvement yourself. We have a short guide on
|
||||
:ref:`contributing_dialect_changes` to walk you through the process. In the
|
||||
short term you can also ignore specific files from your overall project so
|
||||
that this specific file doesn't become a blocker for the rest.
|
||||
See :ref:`ignoreconfig`.
|
||||
|
||||
Configuration Issues
|
||||
^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
If you're getting ether unexpected behaviour with your config, or errors
|
||||
because config values haven't been set correctly, it's often due to config
|
||||
file discovery (i.e. whether SQLFluff can find your config file, and what
|
||||
order it's combining config files).
|
||||
|
||||
For a more general guide to this topic see :ref:`setting_config`.
|
||||
|
||||
To help troubleshoot issues, if you run ``sqlfluff`` with a more verbose
|
||||
logging setting (e.g. ``sqlfluff lint /my/model.sql -v``, or ``-vv``, or
|
||||
``-vvvvvv``) you'll get a readout of the root config that SQLFluff is using.
|
||||
This can help debug which values are being used.
|
||||
|
||||
2. Isolating SQLFluff
|
||||
---------------------
|
||||
|
||||
If you're still getting strange errors, then the next most useful thing you
|
||||
can do, both to help narrow down the cause, but also to assist with fixing
|
||||
a bug if you have found one, is to isolate SQLFluff from any other tools
|
||||
you're using in parallel:
|
||||
|
||||
1. If you're using SQLFluff with the :ref:`dbt_templater`, then try and
|
||||
recreate the error with the :ref:`jinja_templater` to remove the influence
|
||||
of ``dbt`` and any database connection related issues.
|
||||
|
||||
2. If you're getting an error in a remote CI suite (for example on GitHub
|
||||
actions, or a server like Jenkins), try and recreate the issue locally
|
||||
on your machine using the same tools.
|
||||
|
||||
3. If you're :ref:`using-pre-commit`, :ref:`diff-quality` or the
|
||||
`VSCode extension`_ try to recreate the issue by running the SQLFluff
|
||||
:ref:`cliref` directly. Often this can make debugging significantly
|
||||
easier because some of these tools hide some of the error messages
|
||||
which SQLFluff gives the user to help debugging errors.
|
||||
|
||||
.. _`VSCode extension`: https://github.com/sqlfluff/vscode-sqlfluff
|
||||
|
||||
3. Minimise the Query
|
||||
---------------------
|
||||
|
||||
Often SQL scripts can get very long, and if you're getting an error on a very
|
||||
long script, then it can be extremely difficult to work out what the issue is.
|
||||
To assist with this we recommend iteratively cutting down the file (or
|
||||
alternatively, iteratively building a file back up) until you have the smallest
|
||||
file which still exhibits the issue. Often after this step, the issue can
|
||||
become obvious.
|
||||
|
||||
1. If your file has multiple statements in it (i.e. statements separated
|
||||
by ``;``), then remove ones until SQLFluff no longer shows the issue. When
|
||||
you get to that point, add the offending one back in and remove all the
|
||||
others.
|
||||
|
||||
2. Simplify individual statements. For example in a ``SELECT`` statement, if
|
||||
you suspect the issue is coming from a particular column, then remove the
|
||||
others, or remove CTEs, until you've got the simplest query which still
|
||||
shows the issue.
|
||||
@@ -14,6 +14,10 @@ Notable releases:
|
||||
:code:`sqlfluff format` and removal of support for dbt versions pre `1.1`.
|
||||
Note, that this release brings with it some breaking changes to rule coding
|
||||
and configuration, see :ref:`upgrading_2_0`.
|
||||
* **3.0.x**: :code:`sqlfluff fix` now defaults to *not* asking for confirmation
|
||||
and the `--force` option was removed. Richer information returned by the
|
||||
:code:`sqlfluff lint` command (although in a different structure to previous
|
||||
versions). See :ref:`upgrading_3_0`.
|
||||
|
||||
For more detail on other releases, see our :ref:`releasenotes`.
|
||||
|
||||
@@ -57,20 +61,11 @@ Contents
|
||||
:caption: Documentation for SQLFluff:
|
||||
|
||||
gettingstarted
|
||||
realworld
|
||||
vision
|
||||
teamrollout
|
||||
layout
|
||||
rules
|
||||
dialects
|
||||
production
|
||||
configuration
|
||||
cli
|
||||
api
|
||||
releasenotes
|
||||
internals
|
||||
developingrules
|
||||
developingplugins
|
||||
why_sqlfluff
|
||||
guides/index
|
||||
configuration/index
|
||||
production/index
|
||||
reference/index
|
||||
inthewild
|
||||
jointhecommunity
|
||||
|
||||
|
||||
@@ -1,182 +0,0 @@
|
||||
Internals
|
||||
=========
|
||||
|
||||
It is recommended that the following is read in conjunction with exploring
|
||||
the codebase. `dialect_ansi.py` in particular is helpful to understand the
|
||||
recursive structure of segments and grammars. Some more detail is also given
|
||||
on our Wiki_ including a `Contributing Dialect Changes`_ guide.
|
||||
|
||||
.. _Wiki: https://github.com/sqlfluff/sqlfluff/wiki/
|
||||
.. _`Contributing Dialect Changes`: https://github.com/sqlfluff/sqlfluff/wiki/Contributing-Dialect-Changes
|
||||
|
||||
|
||||
Architecture
|
||||
------------
|
||||
|
||||
At a high level, the behaviour of SQLFluff is divided into a few key stages.
|
||||
Whether calling `sqlfluff lint`, `sqlfluff fix` or `sqlfluff parse`, the
|
||||
internal flow is largely the same.
|
||||
|
||||
.. _templater:
|
||||
|
||||
Stage 1, the templater
|
||||
^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
This stage only applies to templated SQL, most commonly Jinja and dbt. Vanilla
|
||||
SQL is sent straight to stage 2, the lexer.
|
||||
|
||||
In order to lint templated SQL, SQLFluff must first convert the 'raw' or
|
||||
pre-templated code into valid SQL, which can then be parsed. The templater
|
||||
returns both the raw and post-templated SQL so that any rule violations which
|
||||
occur in templated sections can be ignored and the rest mapped to their
|
||||
original line location for user feedback.
|
||||
|
||||
.. _Jinja: https://jinja.palletsprojects.com/
|
||||
.. _dbt: https://docs.getdbt.com/
|
||||
|
||||
*SQLFluff* supports two templating engines: Jinja_ and dbt_.
|
||||
|
||||
Under the hood dbt also uses Jinja, but in *SQLFluff* uses a separate
|
||||
mechanism which interfaces directly with the dbt python package.
|
||||
|
||||
For more details on how to configure the templater see :ref:`templateconfig`.
|
||||
|
||||
|
||||
Stage 2, the lexer
|
||||
^^^^^^^^^^^^^^^^^^
|
||||
|
||||
The lexer takes SQL and separates it into segments of whitespace and
|
||||
code. No meaning is imparted; that is the job of the parser.
|
||||
|
||||
|
||||
Stage 3, the parser
|
||||
^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
The parser is arguably the most complicated element of SQLFluff, and is
|
||||
relied on by all the other elements of the tool to do most of the heavy
|
||||
lifting.
|
||||
|
||||
#. The lexed segments are parsed using the specified dialect's grammars. In
|
||||
SQLFluff, grammars describe the shape of SQL statements (or their
|
||||
components). The parser attempts to apply each potential grammar to the
|
||||
lexed segments until all the segments have been matched.
|
||||
|
||||
#. In SQLFluff, segments form a tree-like structure. The top-level segment is
|
||||
a :code:`FileSegment`, which contains zero or more
|
||||
:code:`StatementSegment`\ s, and so on. Before the segments have been parsed
|
||||
and named according to their type, they are 'raw', meaning they have no
|
||||
classification other than their literal value.
|
||||
|
||||
#. The three key components to the parser are segments,
|
||||
:code:`match_grammar`\ s and :code:`parse_grammar`\ s. A segment can be a
|
||||
leaf in the parse tree, such as a :code:`NumericLiteralSegment`, which is
|
||||
simply a number, or can contain many other segments, such as a
|
||||
:code:`SelectStatementSegment`. Each segment can specify a
|
||||
:code:`parse_grammar`, and a :code:`match_grammar`. If both a
|
||||
:code:`match_grammar` and :code:`parse_grammar` are defined in a segment,
|
||||
:code:`match_grammar` is used to quickly prune the tree for branches which
|
||||
do not match segments being parsed, and the :code:`parse_grammar` is then
|
||||
used to refine the branch identified as correct. If only a
|
||||
:code:`match_grammar` is defined, then it serves the purpose of both pruning
|
||||
and refining.
|
||||
|
||||
#. A segment's :code:`.parse()` method uses the :code:`parse_grammar`, on
|
||||
which :code:`.match()` is called. The *match* method of this grammar will
|
||||
return a potentially refined structure of the segments within the
|
||||
segment in greater detail. In the example of a :code:`FileSegment`, it
|
||||
first divides up the query into statements and then finishes.
|
||||
|
||||
* *Segments* must implement a :code:`match_grammar`. When :code:`.match()`
|
||||
is called on a segment, this is the grammar which is used to decide
|
||||
whether there is a match.
|
||||
|
||||
* *Grammars* combine *segments* or other *grammars* together in a
|
||||
pre-defined way. For example the :code:`OneOf` grammar will match if any
|
||||
one of its child elements match.
|
||||
|
||||
#. Regardless of whether the :code:`parse_grammar` was used, the next step
|
||||
is to recursively call the :code:`.parse()` method of each of the child
|
||||
segments of the grammar. This operation is wrapped in a method called
|
||||
:code:`.expand()`. In the :code:`FileSegment`, the first step will have
|
||||
transformed a series of raw tokens into :code:`StatementSegment`
|
||||
segments, and the *expand* step will let each of those segments refine
|
||||
the content within them.
|
||||
|
||||
#. During the recursion, the parser eventually reaches segments which have
|
||||
no children (raw segments containing a single token), and so the
|
||||
recursion naturally finishes.
|
||||
|
||||
#. If no match is found for a segment, the contents will be wrapped in an
|
||||
:code:`UnparsableSegment` which is picked up as a *parsing* error later.
|
||||
|
||||
When working on the parser there are a couple of design principles
|
||||
to keep in mind.
|
||||
|
||||
- Grammars are contained in *dialects*, the root dialect being
|
||||
the *ansi* dialect. The ansi dialect is used to host logic common
|
||||
to all dialects, and so does not necessarily adhere to the formal
|
||||
ansi specification. Other SQL dialects inherit from the ansi dialect,
|
||||
replacing or patching any segments they need to. One reason for the *Ref*
|
||||
grammar is that it allows name resolution of grammar elements at runtime
|
||||
and so a *patched* grammar with some elements overridden can still rely on
|
||||
lower-level elements which haven't been redeclared within the dialect
|
||||
- All grammars and segments attempt to match as much as they can and will
|
||||
return partial matches where possible. It is up to the calling grammar
|
||||
or segment to decide whether a partial or complete match is required
|
||||
based on the context it is matching in.
|
||||
|
||||
|
||||
Stage 4, the linter
|
||||
^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
Given the complete parse tree, rule classes check for linting errors by
|
||||
traversing the tree, looking for segments and patterns of concern. If
|
||||
the rule discovers a violation, it returns a :code:`LintResult` pointing
|
||||
to the segment which caused the violation.
|
||||
|
||||
Some rules are able to *fix* the problems they find. If this is the case,
|
||||
the rule will return a list of fixes, which describe changes to be made to
|
||||
the tree. This can include edits, inserts, or deletions. Once the fixes
|
||||
have been applied, the updated tree is written to the original file.
|
||||
|
||||
|
||||
.. _reflowinternals:
|
||||
|
||||
Reflow Internals
|
||||
----------------
|
||||
|
||||
Many rules supported by SQLFluff involve the spacing and layout of different
|
||||
elements, either to enforce a particular layout or just to add or remove
|
||||
code elements in a way sensitive to the existing layout configuration. The
|
||||
way this is achieved is through some centralised utilities in the
|
||||
`sqlfluff.utils.reflow` module.
|
||||
|
||||
This module aims to achieve several things:
|
||||
* Less code duplication by implementing reflow logic in only one place.
|
||||
|
||||
* Provide a streamlined interface for rules to easily utilise reflow logic.
|
||||
|
||||
* Given this requirement, it's important that reflow utilities work
|
||||
within the existing framework for applying fixes to potentially
|
||||
templated code. We achieve this by returning `LintFix` objects which
|
||||
can then be returned by each rule wanting to use this logic.
|
||||
|
||||
* Provide a consistent way of *configuring* layout requirements. For more
|
||||
details on configuration see :ref:`layoutconfig`.
|
||||
|
||||
To support this, the module provides a :code:`ReflowSequence` class which
|
||||
allows access to all of the relevant operations which can be used to
|
||||
reformat sections of code, or even a whole file. Unless there is a very
|
||||
good reason, all rules should use this same approach to ensure consistent
|
||||
treatment of layout.
|
||||
|
||||
.. autoclass:: sqlfluff.utils.reflow.ReflowSequence
|
||||
:members:
|
||||
|
||||
.. autoclass:: sqlfluff.utils.reflow.elements.ReflowPoint
|
||||
:members:
|
||||
:inherited-members:
|
||||
|
||||
.. autoclass:: sqlfluff.utils.reflow.elements.ReflowBlock
|
||||
:members:
|
||||
:inherited-members:
|
||||
@@ -1,9 +1,9 @@
|
||||
.. _jointhecommunity:
|
||||
.. _sqlfluff_slack:
|
||||
|
||||
SQLFluff Slack
|
||||
====================
|
||||
==============
|
||||
|
||||
We have a fast-growing `community on Slack <https://join.slack.com/t/sqlfluff/shared_invite/zt-o1f4x0e8-pZzarAIlQmKj_6ZwD16w0g>`_, come and join us!
|
||||
We have a fast-growing `community on Slack <https://join.slack.com/t/sqlfluff/shared_invite/zt-2qtu36kdt-OS4iONPbQ3aCz2DIbYJdWg>`_, come and join us!
|
||||
|
||||
|
||||
SQLFluff on Twitter
|
||||
|
||||
2
docs/source/partials/.gitignore
vendored
2
docs/source/partials/.gitignore
vendored
@@ -1,2 +0,0 @@
|
||||
rule_table.rst
|
||||
rule_summaries.rst
|
||||
@@ -1,242 +0,0 @@
|
||||
.. _production-use:
|
||||
|
||||
Production Usage & Security
|
||||
===========================
|
||||
|
||||
SQLFluff is designed to be used both as a utility for developers but also to
|
||||
be part of `CI/CD`_ pipelines.
|
||||
|
||||
.. _security:
|
||||
|
||||
Security Considerations
|
||||
-----------------------
|
||||
|
||||
A full list of `Security Advisories is available on GitHub <https://github.com/sqlfluff/sqlfluff/security/advisories>`_.
|
||||
|
||||
Given the context of how SQLFluff is designed to be used, there are three
|
||||
different tiers of access which users may have access to manipulate how the
|
||||
tool functions in a secure environment.
|
||||
|
||||
#. *Users may have edit access to the SQL code which is being linted*. While
|
||||
SQLFluff does not execute the SQL itself, in the process of the
|
||||
:ref:`templating step <templater>` (in particular via jinja or dbt),
|
||||
certain macros may have the ability to execute arbitrary SQL code (e.g.
|
||||
the `dbt run_query macro`_). For the Jinja templater, SQLFluff uses the
|
||||
`Jinja2 SandboxedEnvironment`_ to limit the execution on unsafe code. When
|
||||
looking to further secure this situation, see below for ways to limit the
|
||||
ability of users to import other libraries.
|
||||
|
||||
#. *Users may have edit access to the SQLFluff :ref:`config-files`*. In some
|
||||
(perhaps, many) environments, the users who can edit SQL files may also
|
||||
be able to access and edit the :ref:`config-files`. It's important to note
|
||||
that because of :ref:`in_file_config`, that users who can edit SQL files
|
||||
which are designed to be linted, will also have access to the vast majority
|
||||
of any configuration options available in :ref:`config-files`. This means
|
||||
that there is minimal additional protection from restricting access to
|
||||
:ref:`config-files` for users who already have access to edit the linting
|
||||
target files (as described above).
|
||||
|
||||
#. *Users may have access to change how SQLFluff is invoked*. SQLFluff can
|
||||
be invoked either as a command line too or via the python API. Typically
|
||||
the method is fixed for a given application. When thinking about how to
|
||||
restrict the ability of users to call unsecure code, SQLFluff aims to
|
||||
provide options at the point of invocation. In particular, as described
|
||||
above, the primary risk vector for SQLFluff is the macro environment
|
||||
as described in :ref:`templateconfig`. To restrict users being able to
|
||||
bring arbitrary python methods into sqlfluff via the ``library_path``
|
||||
configuration value (see :ref:`jinja_library_templating`), we recommend
|
||||
that for secure environments you override this config value either by
|
||||
providing an ``override`` option to the :class:`FluffConfig` object if
|
||||
using the Python API or via the ``--library-path`` CLI option:
|
||||
|
||||
To disable this option entirely via the CLI:
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
$ sqlfluff lint my_path --library-path none
|
||||
|
||||
To disable this option entirely via the python API:
|
||||
|
||||
.. literalinclude:: ../../examples/04_config_overrides.py
|
||||
:language: python
|
||||
|
||||
.. _`Jinja2 SandboxedEnvironment`: https://jinja.palletsprojects.com/en/3.0.x/sandbox/#jinja2.sandbox.SandboxedEnvironment
|
||||
.. _`dbt run_query macro`: https://docs.getdbt.com/reference/dbt-jinja-functions/run_query
|
||||
|
||||
Using SQLFluff on a whole sql codebase
|
||||
--------------------------------------
|
||||
|
||||
The `exit code`_ provided by SQLFluff when run as a command line utility is
|
||||
designed to assist usefulness in deployment pipelines. If no violations
|
||||
are found then the `exit code`_ will be 0. If violations are found then
|
||||
a non-zero code will be returned which can be interrogated to find out
|
||||
more.
|
||||
|
||||
- At the moment all error states related to linting return *65*.
|
||||
- An error as a result of a SQLFluff internal error will return *1*.
|
||||
|
||||
.. _`CI/CD`: https://en.wikipedia.org/wiki/Continuous_integration
|
||||
.. _`exit code`: https://shapeshed.com/unix-exit-codes/
|
||||
|
||||
.. _diff-quality:
|
||||
|
||||
Using SQLFluff on changes using ``diff-quality``
|
||||
------------------------------------------------
|
||||
|
||||
For projects with large amounts of (potentially imperfect) SQL code, the full
|
||||
SQLFluff output could be very large, which can be distracting -- perhaps the CI
|
||||
build for a one-line SQL change shouldn't encourage the developer to fix lots
|
||||
of unrelated quality issues.
|
||||
|
||||
To support this use case, SQLFluff integrates with a quality checking tool
|
||||
called ``diff-quality``. By running SQLFluff using ``diff-quality`` (rather
|
||||
than running it directly), you can limit the the output to the new or modified
|
||||
SQL in the branch (aka pull request or PR) containing the proposed changes.
|
||||
|
||||
Currently, ``diff-quality`` requires that you are using ``git`` for version
|
||||
control.
|
||||
|
||||
NOTE: Installing SQLFluff automatically installs the ``diff_cover`` package
|
||||
that provides the ``diff-quality`` tool.
|
||||
|
||||
Adding ``diff-quality`` to your builds
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
In your CI build script:
|
||||
|
||||
1. Set the current working directory to the ``git`` repository containing the
|
||||
SQL code to be checked.
|
||||
|
||||
2. Run ``diff-quality``, specifying SQLFluff as the underlying tool:
|
||||
|
||||
.. code-block:: text
|
||||
|
||||
$ diff-quality --violations sqlfluff
|
||||
|
||||
The output will look something like:
|
||||
|
||||
.. code-block:: text
|
||||
|
||||
-------------
|
||||
Diff Quality
|
||||
Quality Report: sqlfluff
|
||||
Diff: origin/master...HEAD, staged and unstaged changes
|
||||
-------------
|
||||
sql/audience_size_queries/constraints/_postcondition_check_gdpr_compliance.sql (0.0%):
|
||||
sql/audience_size_queries/constraints/_postcondition_check_gdpr_compliance.sql:5: Unquoted Identifiers must be consistently upper case.
|
||||
-------------
|
||||
Total: 1 line
|
||||
Violations: 1 line
|
||||
% Quality: 0%
|
||||
-------------
|
||||
|
||||
These messages are basically the same as those provided directly by SQLFluff,
|
||||
although the format is a little different. Note that ``diff-quality`` only lists
|
||||
the line _numbers_, not the character position. If you need the character
|
||||
position, you will need to run SQLFluff directly.
|
||||
|
||||
For more information on ``diff-quality``, see the
|
||||
`documentation <https://diff-cover.readthedocs.io/en/latest/>`_. It covers topics
|
||||
such as:
|
||||
|
||||
* Generating HTML reports
|
||||
* Controlling which branch to compare against (i.e. to determine new/changed
|
||||
lines). The default is `origin/master`.
|
||||
* Configuring ``diff-quality`` to return an error code if the quality is
|
||||
too low.
|
||||
* Troubleshooting
|
||||
|
||||
.. _using-pre-commit:
|
||||
|
||||
Using `pre-commit`_
|
||||
^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
`pre-commit`_ is a framework to manage git "hooks"
|
||||
triggered right before a commit is made.
|
||||
|
||||
A `git hook`_ is a git feature to "fire off custom scripts"
|
||||
when specific actions occur.
|
||||
|
||||
Using `pre-commit`_ with SQLFluff is a good way
|
||||
to provide automated linting to SQL developers.
|
||||
|
||||
With `pre-commit`_, you also get the benefit of
|
||||
only linting/fixing the files that changed.
|
||||
|
||||
SQLFluff comes with two `pre-commit`_ hooks:
|
||||
|
||||
* sqlfluff-lint: returns linting errors.
|
||||
* sqlfluff-fix: attempts to fix rule violations.
|
||||
|
||||
.. warning::
|
||||
For safety reasons, ``sqlfluff-fix`` by default will not make any fixes in
|
||||
files that had templating or parse errors, even if those errors were ignored
|
||||
using ``noqa`` or `--ignore``.
|
||||
|
||||
Although it is not advised, you *can* tell SQLFluff to try and fix
|
||||
these files by overriding the ``fix_even_unparsable`` setting
|
||||
in ``.sqlfluff`` config file or using the ``sqlfluff fix --FIX-EVEN-UNPARSABLE``
|
||||
command line option.
|
||||
|
||||
*Overriding this behavior may break your SQL. If you use this override,
|
||||
always be sure to review any fixes applied to files with templating or parse
|
||||
errors to verify they are okay.*
|
||||
|
||||
You should create a file named `.pre-commit-config.yaml`
|
||||
at the root of your git project, which should look
|
||||
like this:
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
repos:
|
||||
- repo: https://github.com/sqlfluff/sqlfluff
|
||||
rev: |release|
|
||||
hooks:
|
||||
- id: sqlfluff-lint
|
||||
# For dbt projects, this installs the dbt "extras".
|
||||
# You will need to select the relevant dbt adapter for your dialect
|
||||
# (https://docs.getdbt.com/docs/available-adapters):
|
||||
# additional_dependencies: ['<dbt-adapter>', 'sqlfluff-templater-dbt']
|
||||
- id: sqlfluff-fix
|
||||
# Arbitrary arguments to show an example
|
||||
# args: [--rules, "LT02,CP02"]
|
||||
# additional_dependencies: ['<dbt-adapter>', 'sqlfluff-templater-dbt']
|
||||
|
||||
When trying to use the `dbt templater`_, uncomment the
|
||||
``additional_dependencies`` to install the extras.
|
||||
This is equivalent to running ``pip install <dbt-adapter> sqlfluff-templater-dbt``.
|
||||
|
||||
You can specify the version of ``dbt-adapter`` used in `pre-commit`_,
|
||||
for example:
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
additional_dependencies : ['dbt-bigquery==1.0.0', 'sqlfluff-templater-dbt']
|
||||
|
||||
See the list of available `dbt-adapters`_.
|
||||
|
||||
Note that you can pass the same arguments available
|
||||
through the CLI using ``args:``.
|
||||
|
||||
Using `GitHub Actions`_ to Annotate PRs
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
There are two way to utilize SQLFluff to annotate Github PRs.
|
||||
|
||||
1. When `sqlfluff lint` is run with the `--format github-annotation-native`
|
||||
option, it produces output formatted as `Github workflow commands`_ which
|
||||
are converted into pull request annotations by Github.
|
||||
|
||||
2. When `sqlfluff lint` is run with the `--format github-annotation` option, it
|
||||
produces output compatible with this `action <https://github.com/yuzutech/annotations-action>`_.
|
||||
Which uses Github API to annotate the SQL in `GitHub pull requests`.
|
||||
|
||||
For more information and examples on using SQLFluff in GitHub Actions, see the
|
||||
`sqlfluff-github-actions repository <https://github.com/sqlfluff/sqlfluff-github-actions>`_.
|
||||
|
||||
.. _`pre-commit`: https://pre-commit.com/
|
||||
.. _`git hook`: https://git-scm.com/book/en/v2/Customizing-Git-Git-Hooks
|
||||
.. _`dbt templater`: `dbt-project-configuration`
|
||||
.. _`GitHub Actions`: https://github.com/features/actions
|
||||
.. _`GitHub pull requests`: https://docs.github.com/en/github/collaborating-with-pull-requests/proposing-changes-to-your-work-with-pull-requests/about-pull-requests
|
||||
.. _`Github workflow commands`: https://docs.github.com/en/actions/using-workflows/workflow-commands-for-github-actions#setting-a-notice-message
|
||||
.. _`dbt-adapters`: https://docs.getdbt.com/docs/available-adapters
|
||||
25
docs/source/production/cli_use.rst
Normal file
25
docs/source/production/cli_use.rst
Normal file
@@ -0,0 +1,25 @@
|
||||
Using SQLFluff directly as a CLI application
|
||||
--------------------------------------------
|
||||
|
||||
The :ref:`SQLFluff CLI application <cliref>` is a python application which
|
||||
means if depends on your host python environment
|
||||
(see :ref:`installingsqlfluff`).
|
||||
|
||||
The `exit code`_ provided by SQLFluff when run as a command line utility is
|
||||
designed to assist usefulness in deployment pipelines. If no violations
|
||||
are found then the `exit code`_ will be 0. If violations are found then
|
||||
a non-zero code will be returned which can be interrogated to find out
|
||||
more.
|
||||
|
||||
- An error code of ``0`` means *operation success*, *no issues found*.
|
||||
- An error code of ``1`` means *operation success*, *issues found*. For
|
||||
example this might mean that a linting issue was found, or that one file
|
||||
could not be parsed.
|
||||
- An error code of ``2`` means an error occurred and the operation could
|
||||
not be completed. For example a configuration issue or an internal error
|
||||
within SQLFluff.
|
||||
|
||||
For details of what commands and options are available in the CLI see the
|
||||
:ref:`cliref`.
|
||||
|
||||
.. _`exit code`: https://shapeshed.com/unix-exit-codes/
|
||||
98
docs/source/production/diff_quality.rst
Normal file
98
docs/source/production/diff_quality.rst
Normal file
@@ -0,0 +1,98 @@
|
||||
.. _diff-quality:
|
||||
|
||||
Using SQLFluff on changes using ``diff-quality``
|
||||
================================================
|
||||
|
||||
For projects with large amounts of (potentially imperfect) SQL code, the full
|
||||
SQLFluff output could be very large, which can be distracting -- perhaps the CI
|
||||
build for a one-line SQL change shouldn't encourage the developer to fix lots
|
||||
of unrelated quality issues.
|
||||
|
||||
To support this use case, SQLFluff integrates with a quality checking tool
|
||||
called ``diff-quality``. By running SQLFluff using ``diff-quality`` (rather
|
||||
than running it directly), you can limit the the output to the new or modified
|
||||
SQL in the branch (aka pull request or PR) containing the proposed changes.
|
||||
|
||||
Currently, ``diff-quality`` requires that you are using ``git`` for version
|
||||
control.
|
||||
|
||||
NOTE: Installing SQLFluff automatically installs the ``diff_cover`` package
|
||||
that provides the ``diff-quality`` tool.
|
||||
|
||||
Adding ``diff-quality`` to your builds
|
||||
--------------------------------------
|
||||
|
||||
In your CI build script:
|
||||
|
||||
1. Set the current working directory to the ``git`` repository containing the
|
||||
SQL code to be checked.
|
||||
|
||||
2. Run ``diff-quality``, specifying SQLFluff as the underlying tool:
|
||||
|
||||
.. code-block:: text
|
||||
|
||||
$ diff-quality --violations sqlfluff
|
||||
|
||||
The output will look something like:
|
||||
|
||||
.. code-block:: text
|
||||
|
||||
-------------
|
||||
Diff Quality
|
||||
Quality Report: sqlfluff
|
||||
Diff: origin/master...HEAD, staged and unstaged changes
|
||||
-------------
|
||||
sql/audience_size_queries/constraints/_postcondition_check_gdpr_compliance.sql (0.0%):
|
||||
sql/audience_size_queries/constraints/_postcondition_check_gdpr_compliance.sql:5: Unquoted Identifiers must be consistently upper case.
|
||||
-------------
|
||||
Total: 1 line
|
||||
Violations: 1 line
|
||||
% Quality: 0%
|
||||
-------------
|
||||
|
||||
These messages are basically the same as those provided directly by SQLFluff,
|
||||
although the format is a little different. Note that ``diff-quality`` only lists
|
||||
the line _numbers_, not the character position. If you need the character
|
||||
position, you will need to run SQLFluff directly.
|
||||
|
||||
.. note::
|
||||
When using ``diff-quality`` with ``.sqlfluff`` :ref:`config-files`, and
|
||||
especially together with the :ref:`dbt_templater`, it can be really easy
|
||||
to run into issues with file discovery. There are a few steps you can
|
||||
take to make it much less likely that this will happen:
|
||||
|
||||
1. ``diff-quality`` needs to be run from the root of your ``git``
|
||||
repository (so that it can find the ``git`` metadata).
|
||||
|
||||
2. SQLFluff works best if the bulk of the configuration is done from a
|
||||
single ``.sqlfluff`` file, which should be in the root of your
|
||||
``git`` repository.
|
||||
|
||||
3. If using :ref:`dbt_templater`, then either place your ``dbt_project.yml``
|
||||
file in the same root folder, or if you put it in a subfolder, then
|
||||
only invoke ``diff-quality`` and ``sqlfluff`` from the root and define
|
||||
the subfolder that the ``dbt`` project lives in using the ``.sqlfluff``
|
||||
config file.
|
||||
|
||||
By aligning the paths of all three, you should be able to achieve a
|
||||
robust setup. If each is rooted in different paths if can be very
|
||||
difficult to achieve the same result, and the resulting behaviour
|
||||
can be difficult to debug.
|
||||
|
||||
To debug any issues relating to this setup, we recommend occasionally
|
||||
running ``sqlfluff`` directly using the main cli (i.e. calling
|
||||
:code:`sqlfluff lint my/project/path`) and check whether that route
|
||||
gives you the results you expect. ``diff-quality`` should behave as
|
||||
though it's calling the SQLFluff CLI *from the same path that you*
|
||||
*invoke* ``diff-quality``.
|
||||
|
||||
For more information on ``diff-quality`` and the ``diff_cover`` package, see the
|
||||
`documentation <https://github.com/Bachmann1234/diff_cover>`_ on their github
|
||||
repository. It covers topics such as:
|
||||
|
||||
* Generating HTML reports
|
||||
* Controlling which branch to compare against (i.e. to determine new/changed
|
||||
lines). The default is ``origin/main``.
|
||||
* Configuring ``diff-quality`` to return an error code if the quality is
|
||||
too low.
|
||||
* Troubleshooting
|
||||
30
docs/source/production/github_actions.rst
Normal file
30
docs/source/production/github_actions.rst
Normal file
@@ -0,0 +1,30 @@
|
||||
Using `GitHub Actions`_ to Annotate PRs
|
||||
=======================================
|
||||
|
||||
There are two way to utilize SQLFluff to annotate Github PRs.
|
||||
|
||||
1. When :code:`sqlfluff lint` is run with the :code:`--format github-annotation-native`
|
||||
option, it produces output formatted as `Github workflow commands`_ which
|
||||
are converted into pull request annotations by Github.
|
||||
|
||||
2. When :code:`sqlfluff lint` is run with the :code:`--format github-annotation`
|
||||
option, it produces output compatible with this `action from yuzutech`_.
|
||||
Which uses Github API to annotate the SQL in `GitHub pull requests`.
|
||||
|
||||
.. warning::
|
||||
At present (December 2023), limitations put in place by Github mean that only the
|
||||
first 10 annotations will be displayed if the first option (using
|
||||
:code:`github-annotation-native`) is used. This is a not something that SQLFluff
|
||||
can control itself and so we currently recommend using the the second option
|
||||
above and the `action from yuzutech`_.
|
||||
|
||||
There is an `open feature request <https://github.com/orgs/community/discussions/68471>`_
|
||||
for GitHub Actions which you can track to follow this issue.
|
||||
|
||||
For more information and examples on using SQLFluff in GitHub Actions, see the
|
||||
`sqlfluff-github-actions repository <https://github.com/sqlfluff/sqlfluff-github-actions>`_.
|
||||
|
||||
.. _`GitHub Actions`: https://github.com/features/actions
|
||||
.. _`GitHub pull requests`: https://docs.github.com/en/github/collaborating-with-pull-requests/proposing-changes-to-your-work-with-pull-requests/about-pull-requests
|
||||
.. _`Github workflow commands`: https://docs.github.com/en/actions/using-workflows/workflow-commands-for-github-actions#setting-a-notice-message
|
||||
.. _`action from yuzutech`: https://github.com/yuzutech/annotations-action
|
||||
18
docs/source/production/index.rst
Normal file
18
docs/source/production/index.rst
Normal file
@@ -0,0 +1,18 @@
|
||||
.. _production-use:
|
||||
|
||||
Production Usage & Security
|
||||
===========================
|
||||
|
||||
SQLFluff is designed to be used both as a utility for developers but also to
|
||||
be part of `CI/CD`_ pipelines.
|
||||
|
||||
.. _`CI/CD`: https://en.wikipedia.org/wiki/Continuous_integration
|
||||
|
||||
.. toctree::
|
||||
:maxdepth: 2
|
||||
|
||||
security
|
||||
cli_use
|
||||
diff_quality
|
||||
pre_commit
|
||||
github_actions
|
||||
99
docs/source/production/pre_commit.rst
Normal file
99
docs/source/production/pre_commit.rst
Normal file
@@ -0,0 +1,99 @@
|
||||
.. _using-pre-commit:
|
||||
|
||||
Using :code:`pre-commit`
|
||||
========================
|
||||
|
||||
`pre-commit`_ is a framework to manage git "hooks"
|
||||
triggered right before a commit is made.
|
||||
|
||||
A `git hook`_ is a git feature to "fire off custom scripts"
|
||||
when specific actions occur.
|
||||
|
||||
Using `pre-commit`_ with SQLFluff is a good way
|
||||
to provide automated linting to SQL developers.
|
||||
|
||||
With `pre-commit`_, you also get the benefit of
|
||||
only linting/fixing the files that changed.
|
||||
|
||||
SQLFluff comes with two `pre-commit`_ hooks:
|
||||
|
||||
* ``sqlfluff-lint``: returns linting errors.
|
||||
* ``sqlfluff-fix``: attempts to fix rule violations.
|
||||
|
||||
.. warning::
|
||||
For safety reasons, ``sqlfluff-fix`` by default will not make any fixes in
|
||||
files that had templating or parse errors, even if those errors were ignored
|
||||
using ``noqa`` or `--ignore``.
|
||||
|
||||
Although it is not advised, you *can* tell SQLFluff to try and fix
|
||||
these files by overriding the ``fix_even_unparsable`` setting
|
||||
in ``.sqlfluff`` config file or using the ``sqlfluff fix --FIX-EVEN-UNPARSABLE``
|
||||
command line option.
|
||||
|
||||
*Overriding this behavior may break your SQL. If you use this override,
|
||||
always be sure to review any fixes applied to files with templating or parse
|
||||
errors to verify they are okay.*
|
||||
|
||||
You should create a file named ``.pre-commit-config.yaml``
|
||||
at the root of your git project, which should look
|
||||
like this:
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
repos:
|
||||
- repo: https://github.com/sqlfluff/sqlfluff
|
||||
rev: |release|
|
||||
hooks:
|
||||
- id: sqlfluff-lint
|
||||
# For dbt projects, this installs the dbt "extras".
|
||||
# You will need to select the relevant dbt adapter for your dialect
|
||||
# (https://docs.getdbt.com/docs/available-adapters):
|
||||
# additional_dependencies: ['<dbt-adapter>', 'sqlfluff-templater-dbt']
|
||||
- id: sqlfluff-fix
|
||||
# Arbitrary arguments to show an example
|
||||
# args: [--rules, "LT02,CP02"]
|
||||
# additional_dependencies: ['<dbt-adapter>', 'sqlfluff-templater-dbt']
|
||||
|
||||
When trying to use the `dbt templater`_, uncomment the
|
||||
``additional_dependencies`` to install the extras.
|
||||
This is equivalent to running ``pip install <dbt-adapter> sqlfluff-templater-dbt``.
|
||||
|
||||
You can specify the version of ``dbt-adapter`` used in `pre-commit`_,
|
||||
for example:
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
additional_dependencies : ['dbt-bigquery==1.0.0', 'sqlfluff-templater-dbt']
|
||||
|
||||
See the list of available `dbt-adapters`_.
|
||||
|
||||
Note that you can pass the same arguments available
|
||||
through the CLI using ``args:``.
|
||||
|
||||
.. _`pre-commit`: https://pre-commit.com/
|
||||
.. _`git hook`: https://git-scm.com/book/en/v2/Customizing-Git-Git-Hooks
|
||||
.. _`dbt templater`: `dbt-project-configuration`
|
||||
.. _`dbt-adapters`: https://docs.getdbt.com/docs/available-adapters
|
||||
|
||||
Ignoring files while using :code:`pre-commit`
|
||||
---------------------------------------------
|
||||
|
||||
Under the hood, `pre-commit`_ works by passing specific files to *SQLFluff*.
|
||||
For example, if the only two files that are modified in your commit are
|
||||
:code:`file_a.sql` and :code:`file_b.sql`, then the command which is called
|
||||
in the background is :code:`sqlfluff lint file_a.sql file_b.sql`. While this
|
||||
is efficient, it does produce some unwanted noise when also
|
||||
using :ref:`sqlfluffignore`. This is because *SQLFluff* is designed to allow
|
||||
users to override an *ignore* configuration by passing the name of the file
|
||||
directly. This makes a lot of sense in a CLI context, but less so in the context
|
||||
of being invoked by `pre-commit`_.
|
||||
|
||||
To avoid noisy logs when using both `pre-commit`_ and :ref:`sqlfluffignore`,
|
||||
we recommend also setting the :code:`exclude` argument in your
|
||||
:code:`.pre-commit-config.yaml` file (either the `top level config`_ or the
|
||||
`hook specific config`_). This will prevent files matching the given pattern
|
||||
being passed to *SQLFluff* and so silence any warnings about the
|
||||
:ref:`sqlfluffignore` being overridden.
|
||||
|
||||
.. _`top level config`: https://pre-commit.com/#top_level-exclude
|
||||
.. _`hook specific config`: https://pre-commit.com/#config-exclude
|
||||
56
docs/source/production/security.rst
Normal file
56
docs/source/production/security.rst
Normal file
@@ -0,0 +1,56 @@
|
||||
.. _security:
|
||||
|
||||
Security Considerations
|
||||
=======================
|
||||
|
||||
A full list of `Security Advisories is available on GitHub <https://github.com/sqlfluff/sqlfluff/security/advisories>`_.
|
||||
|
||||
Given the context of how SQLFluff is designed to be used, there are three
|
||||
different tiers of access which users may have access to manipulate how the
|
||||
tool functions in a secure environment.
|
||||
|
||||
#. *Users may have edit access to the SQL code which is being linted*. While
|
||||
SQLFluff does not execute the SQL itself, in the process of the
|
||||
:ref:`templating step <templater>` (in particular via jinja or dbt),
|
||||
certain macros may have the ability to execute arbitrary SQL code (e.g.
|
||||
the `dbt run_query macro`_). For the Jinja templater, SQLFluff uses the
|
||||
`Jinja2 SandboxedEnvironment`_ to limit the execution on unsafe code. When
|
||||
looking to further secure this situation, see below for ways to limit the
|
||||
ability of users to import other libraries.
|
||||
|
||||
#. *Users may have edit access to the SQLFluff :ref:`config-files`*. In some
|
||||
(perhaps, many) environments, the users who can edit SQL files may also
|
||||
be able to access and edit the :ref:`config-files`. It's important to note
|
||||
that because of :ref:`in_file_config`, that users who can edit SQL files
|
||||
which are designed to be linted, will also have access to the vast majority
|
||||
of any configuration options available in :ref:`config-files`. This means
|
||||
that there is minimal additional protection from restricting access to
|
||||
:ref:`config-files` for users who already have access to edit the linting
|
||||
target files (as described above).
|
||||
|
||||
#. *Users may have access to change how SQLFluff is invoked*. SQLFluff can
|
||||
be invoked either as a command line too or via the python API. Typically
|
||||
the method is fixed for a given application. When thinking about how to
|
||||
restrict the ability of users to call insecure code, SQLFluff aims to
|
||||
provide options at the point of invocation. In particular, as described
|
||||
above, the primary risk vector for SQLFluff is the macro environment
|
||||
as described in :ref:`templateconfig`. To restrict users being able to
|
||||
bring arbitrary python methods into sqlfluff via the ``library_path``
|
||||
configuration value (see :ref:`jinja_library_templating`), we recommend
|
||||
that for secure environments you override this config value either by
|
||||
providing an ``override`` option to the :class:`FluffConfig` object if
|
||||
using the Python API or via the ``--library-path`` CLI option:
|
||||
|
||||
To disable this option entirely via the CLI:
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
$ sqlfluff lint my_path --library-path none
|
||||
|
||||
To disable this option entirely via the python API:
|
||||
|
||||
.. literalinclude:: ../../../examples/04_config_overrides.py
|
||||
:language: python
|
||||
|
||||
.. _`Jinja2 SandboxedEnvironment`: https://jinja.palletsprojects.com/en/3.0.x/sandbox/#jinja2.sandbox.SandboxedEnvironment
|
||||
.. _`dbt run_query macro`: https://docs.getdbt.com/reference/dbt-jinja-functions/run_query
|
||||
@@ -1,14 +1,14 @@
|
||||
.. _apiref:
|
||||
|
||||
API Reference
|
||||
=============
|
||||
Python API
|
||||
==========
|
||||
|
||||
SQLFluff exposes a public api for other python applications to use.
|
||||
A basic example of this usage is given here, with the documentation
|
||||
for each of the methods below.
|
||||
|
||||
|
||||
.. literalinclude:: ../../examples/01_basic_api_usage.py
|
||||
.. literalinclude:: ../../../examples/01_basic_api_usage.py
|
||||
:language: python
|
||||
|
||||
|
||||
@@ -32,5 +32,34 @@ future release. If you come to rely on the internals of SQLFluff, please
|
||||
post an issue on GitHub to share what you're up to. This will help shape
|
||||
a more reliable, tidy and well documented public API for use.
|
||||
|
||||
Configuring SQLFluff
|
||||
~~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
You can use :code:`FluffConfig()` class to configure SQLFluff behaviour.
|
||||
|
||||
|
||||
.. literalinclude:: ../../../examples/04_config_overrides.py
|
||||
:language: python
|
||||
|
||||
|
||||
Instances of :code:`FluffConfig()` can be created manually, or parsed.
|
||||
|
||||
|
||||
.. literalinclude:: ../../../examples/05_simple_api_config.py
|
||||
:language: python
|
||||
|
||||
|
||||
Supported dialects and rules are available through :code:`list_dialects()`
|
||||
and :code:`list_rules()`.
|
||||
|
||||
|
||||
.. literalinclude:: ../../../examples/03_getting_rules_and_dialects.py
|
||||
:language: python
|
||||
|
||||
|
||||
Advanced API reference
|
||||
~~~~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
|
||||
.. automodule:: sqlfluff.core
|
||||
:members: Linter, Lexer, Parser, FluffConfig
|
||||
:members: Linter, Lexer, Parser
|
||||
48
docs/source/reference/dialects.rst
Normal file
48
docs/source/reference/dialects.rst
Normal file
@@ -0,0 +1,48 @@
|
||||
.. _dialectref:
|
||||
|
||||
Dialects Reference
|
||||
==================
|
||||
|
||||
SQLFluff is designed to be flexible in supporting a variety of dialects.
|
||||
Not all potential dialects are supported so far, but several have been
|
||||
implemented by the community. Below are a list of the currently available
|
||||
dialects. Each inherits from another, up to the root `ansi` dialect.
|
||||
|
||||
For a canonical list of supported dialects, run the
|
||||
:program:`sqlfluff dialects` command, which will output a list of the
|
||||
current dialects available on your installation of SQLFluff.
|
||||
|
||||
.. note::
|
||||
|
||||
For technical users looking to add new dialects or add new features
|
||||
to existing ones, the dependent nature of how dialects have been
|
||||
implemented is to try and reduce the amount of repetition in how
|
||||
different elements are defined. As an example, when we say that
|
||||
the :ref:`redshift_dialect_ref` dialect *inherits* from the
|
||||
:ref:`postgres_dialect_ref` dialect this is not because there
|
||||
is an agreement between those projects which means that features
|
||||
in one must end up in the other, but that the design of the
|
||||
:ref:`redshift_dialect_ref` dialect was heavily *inspired* by the
|
||||
postgres dialect and therefore when defining the dialect within
|
||||
sqlfuff it makes sense to use :ref:`postgres_dialect_ref` as a
|
||||
starting point rather than starting from scratch.
|
||||
|
||||
Consider when adding new features to a dialect:
|
||||
|
||||
- Should I be adding it just to this dialect, or adding it to
|
||||
a *parent* dialect?
|
||||
- If I'm creating a new dialect, which dialect would be best to
|
||||
inherit from?
|
||||
- Will the feature I'm adding break any *downstream* dependencies
|
||||
within dialects which inherit from this one?
|
||||
|
||||
.. We define a shortcut to render double backticks here, which can
|
||||
then be referenced by individual dialects when they want to say
|
||||
how backtick quotes behave in that dialect. They would otherwise
|
||||
be interpreted as markup and so not shown as back quotes.
|
||||
|
||||
.. |back_quotes| raw:: html
|
||||
|
||||
<code class="code docutils literal notranslate">``</code>
|
||||
|
||||
.. include:: ../_partials/dialect_summaries.rst
|
||||
14
docs/source/reference/index.rst
Normal file
14
docs/source/reference/index.rst
Normal file
@@ -0,0 +1,14 @@
|
||||
.. _reference:
|
||||
|
||||
Reference
|
||||
=========
|
||||
|
||||
.. toctree::
|
||||
:maxdepth: 2
|
||||
|
||||
dialects
|
||||
rules
|
||||
cli
|
||||
api
|
||||
internals/index
|
||||
releasenotes
|
||||
70
docs/source/reference/internals/config.rst
Normal file
70
docs/source/reference/internals/config.rst
Normal file
@@ -0,0 +1,70 @@
|
||||
.. _fluffconfig:
|
||||
|
||||
:code:`sqlfluff.core.config`: Configuration & ``FluffConfig``
|
||||
-------------------------------------------------------------
|
||||
|
||||
When using the Python API, there are additional options for configuration
|
||||
beyond those specified in the :ref:`setting_config` section of the main
|
||||
docs. Internally, SQLFluff uses a consistent
|
||||
:py:class:`~sqlfluff.core.config.fluffconfig.FluffConfig` class which is
|
||||
then made accessible to different parts of the tool during linting and fixing.
|
||||
|
||||
As described in the :ref:`nesting` section of the configuration docs, multiple
|
||||
nested documentation files can be used in a project and the result is a
|
||||
combined config object which contains the resulting union of those files.
|
||||
Under the hood, this is stored in a dict object, and it's possible get and
|
||||
set individual values, using
|
||||
:py:meth:`~sqlfluff.core.config.fluffconfig.FluffConfig.get`
|
||||
& :py:meth:`~sqlfluff.core.config.fluffconfig.FluffConfig.set_value`, but
|
||||
also get entire portions of that config dict using
|
||||
:py:meth:`~sqlfluff.core.config.fluffconfig.FluffConfig.get_section`.
|
||||
|
||||
Methods for creating config mappings
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
When instantiating a :py:class:`~sqlfluff.core.config.fluffconfig.FluffConfig`
|
||||
object, there are a few options to set specific config values (such as
|
||||
``dialect`` or ``rules``), but to access the full available set of features
|
||||
it's best to pass in a :obj:`dict` of the values you want to set.
|
||||
|
||||
This config :obj:`dict` is a nested object, where the colon (`:`) characters
|
||||
from the ``.sqlfluff`` config files, delimit the keys. For example, take the
|
||||
following config file:
|
||||
|
||||
.. code-block:: cfg
|
||||
|
||||
[sqlfluff:rules:capitalisation.keywords]
|
||||
capitalisation_policy = lower
|
||||
|
||||
This would be represented in the config dict as below. See that the nested
|
||||
structure has been created by splitting the keys on the colon (`:`) characters:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
configs = {
|
||||
"rules":{
|
||||
"capitalisation.keywords": {
|
||||
"capitalisation_policy": "lower"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
The following methods are provided to allow conversion of a selection of file
|
||||
formats into a consistent mapping object for instantiating a
|
||||
:py:class:`~sqlfluff.core.config.fluffconfig.FluffConfig` object.
|
||||
|
||||
.. autofunction:: sqlfluff.core.config.loader.load_config_string
|
||||
|
||||
.. autofunction:: sqlfluff.core.config.loader.load_config_file
|
||||
|
||||
.. autofunction:: sqlfluff.core.config.loader.load_config_resource
|
||||
|
||||
.. autofunction:: sqlfluff.core.config.loader.load_config_at_path
|
||||
|
||||
.. autofunction:: sqlfluff.core.config.loader.load_config_up_to_path
|
||||
|
||||
The ``FluffConfig`` object
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
.. autoclass:: sqlfluff.core.config.fluffconfig.FluffConfig
|
||||
:members:
|
||||
33
docs/source/reference/internals/functional.rst
Normal file
33
docs/source/reference/internals/functional.rst
Normal file
@@ -0,0 +1,33 @@
|
||||
:code:`sqlfluff.utils.functional`: Functional Traversal API
|
||||
-----------------------------------------------------------
|
||||
|
||||
These newer modules provide a higher-level API for rules working with segments
|
||||
and slices. Rules that need to navigate or search the parse tree may benefit
|
||||
from using these. Eventually, the plan is for **all** rules to use these
|
||||
modules. As of December 30, 2021, 17+ rules use these modules.
|
||||
|
||||
The modules listed below are submodules of `sqlfluff.utils.functional`.
|
||||
|
||||
:code:`segments` Module
|
||||
^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
.. automodule:: sqlfluff.utils.functional.segments
|
||||
:members:
|
||||
|
||||
:code:`segment_predicates` Module
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
.. automodule:: sqlfluff.utils.functional.segment_predicates
|
||||
:members:
|
||||
|
||||
:code:`raw_file_slices` Module
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
.. automodule:: sqlfluff.utils.functional.raw_file_slices
|
||||
:members:
|
||||
|
||||
:code:`raw_file_slice_predicates` Module
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
.. automodule:: sqlfluff.utils.functional.raw_file_slice_predicates
|
||||
:members:
|
||||
24
docs/source/reference/internals/index.rst
Normal file
24
docs/source/reference/internals/index.rst
Normal file
@@ -0,0 +1,24 @@
|
||||
.. _internal_api_docs:
|
||||
|
||||
Internal API
|
||||
============
|
||||
|
||||
Anything within this section should only be necessary for people who are
|
||||
developing plugins or rules to interact with SQLFluff on a deeper level or
|
||||
people who've decided to help the project by
|
||||
:ref:`contributing to SQLFluff <development>`.
|
||||
|
||||
As these docs are some of the least commonly used, you may find that not
|
||||
all modules are documented directly here, and so you may find it instructive
|
||||
to read this together with docstrings and comments directly within the
|
||||
SQLFluff codebase on `GitHub`_.
|
||||
|
||||
.. _`GitHub`: https://github.com/sqlfluff/sqlfluff
|
||||
|
||||
.. toctree::
|
||||
:maxdepth: 2
|
||||
|
||||
config
|
||||
rules
|
||||
functional
|
||||
reflow
|
||||
41
docs/source/reference/internals/reflow.rst
Normal file
41
docs/source/reference/internals/reflow.rst
Normal file
@@ -0,0 +1,41 @@
|
||||
.. _reflowinternals:
|
||||
|
||||
:code:`sqlfluff.utils.reflow`: Whitespace Reflow API
|
||||
----------------------------------------------------
|
||||
|
||||
Many rules supported by SQLFluff involve the spacing and layout of different
|
||||
elements, either to enforce a particular layout or just to add or remove
|
||||
code elements in a way sensitive to the existing layout configuration. The
|
||||
way this is achieved is through some centralised utilities in the
|
||||
`sqlfluff.utils.reflow` module.
|
||||
|
||||
This module aims to achieve several things:
|
||||
|
||||
* Less code duplication by implementing reflow logic in only one place.
|
||||
|
||||
* Provide a streamlined interface for rules to easily utilise reflow logic.
|
||||
|
||||
* Given this requirement, it's important that reflow utilities work
|
||||
within the existing framework for applying fixes to potentially
|
||||
templated code. We achieve this by returning `LintFix` objects which
|
||||
can then be returned by each rule wanting to use this logic.
|
||||
|
||||
* Provide a consistent way of *configuring* layout requirements. For more
|
||||
details on configuration see :ref:`layoutconfig`.
|
||||
|
||||
To support this, the module provides a :code:`ReflowSequence` class which
|
||||
allows access to all of the relevant operations which can be used to
|
||||
reformat sections of code, or even a whole file. Unless there is a very
|
||||
good reason, all rules should use this same approach to ensure consistent
|
||||
treatment of layout.
|
||||
|
||||
.. autoclass:: sqlfluff.utils.reflow.ReflowSequence
|
||||
:members:
|
||||
|
||||
.. autoclass:: sqlfluff.utils.reflow.elements.ReflowPoint
|
||||
:members:
|
||||
:inherited-members:
|
||||
|
||||
.. autoclass:: sqlfluff.utils.reflow.elements.ReflowBlock
|
||||
:members:
|
||||
:inherited-members:
|
||||
5
docs/source/reference/internals/rules.rst
Normal file
5
docs/source/reference/internals/rules.rst
Normal file
@@ -0,0 +1,5 @@
|
||||
:code:`sqlfluff.core.rules.base`: Base Rule Classes
|
||||
---------------------------------------------------
|
||||
|
||||
.. automodule:: sqlfluff.core.rules.base
|
||||
:members:
|
||||
@@ -10,8 +10,136 @@ of each individual release, see the detailed changelog_.
|
||||
|
||||
.. _changelog: https://github.com/sqlfluff/sqlfluff/blob/main/CHANGELOG.md
|
||||
|
||||
.. _upgrading_2_0:
|
||||
.. _upgrading_3_0:
|
||||
|
||||
Upgrading to 3.x
|
||||
----------------
|
||||
|
||||
This release makes a couple of potentially breaking changes:
|
||||
|
||||
* It drops support for python 3.7, which reached end of life in June 2023.
|
||||
|
||||
* It migrates to :code:`pyproject.toml` rather than :code:`setup.cfg` as
|
||||
the python packaging configuration file (although keeping :code:`setuptools`
|
||||
as the default backend).
|
||||
|
||||
* The serialised output for :code:`sqlfluff lint` now contains more information
|
||||
about the span of linting issues and initial proposed fixes. Beside the *new*
|
||||
fields, the original fields of :code:`line_pos` and :code:`line_no` have been
|
||||
renamed to :code:`start_line_pos` and :code:`start_line_no`, to distinguish
|
||||
them from the new fields starting :code:`end_*`.
|
||||
|
||||
* When linting from stdin, if there are no violations found - before this version,
|
||||
the serialised response would be simply an empty list (:code:`[]`). From 3.0
|
||||
onwards, there will now be a record for the *file* with some statistics,
|
||||
but the *violations* section of the response for that file will still be an
|
||||
empty list.
|
||||
|
||||
* The default :code:`annotation_level` set by the :code:`--annotation-level`
|
||||
option on the :code:`sqlfluff lint` command has been changed from :code:`notice`
|
||||
to :code:`warning`, to better distinguish linting errors from warnings, which
|
||||
always now have the level of :code:`notice`. This is only relevant when using
|
||||
the :code:`github-annotation` or :code:`github-annotation-native` formats.
|
||||
|
||||
* The previously deprecated :code:`--disable_progress_bar` on `:code:lint`,
|
||||
:code:`fix` and :code:`format` has now been removed entirely. Please migrate
|
||||
to :code:`--disable-progress-bar` to continue using this option.
|
||||
|
||||
* The :code:`--force` option on :code:`sqlfluff fix` is now the default behaviour
|
||||
and so the option has been deprecated. A new :code:`--check` option has been
|
||||
introduced which mimics the old default behaviour. This has been changed as it
|
||||
enables significantly lower memory overheads when linting and fixing large
|
||||
projects.
|
||||
|
||||
Upgrading to 2.3
|
||||
----------------
|
||||
|
||||
This release include two minor breaking changes which will only affect
|
||||
users engaged in performance optimisation of SQLFluff itself.
|
||||
|
||||
* The :code:`--profiler` option on :code:`sqlfluff parse` has been removed.
|
||||
It was only present on the `parse` command and not `lint` or `fix`, and
|
||||
it is just as simple to invoke the python `cProfiler` directly.
|
||||
|
||||
* The :code:`--recurse` cli option and :code:`sqlfluff.recurse` configuration
|
||||
option have both been removed. They both existed purely for debugging the
|
||||
parser, and were never used in a production setting. The improvement in
|
||||
other debugging messages when unparsable sections are found means that
|
||||
this option is no longer necessary.
|
||||
|
||||
Upgrading to 2.2
|
||||
----------------
|
||||
|
||||
This release changes some of the interfaces between SQLFluff core and
|
||||
our plugin ecosystem. The only *breaking* change is in the interface
|
||||
between SQLFluff and *templater* plugins (which are not common in the
|
||||
ecosystem, hence why this is only a minor and not a major release).
|
||||
|
||||
For all plugins, we also recommend a different structure for their
|
||||
imports (especially for rule plugins which are more common in the
|
||||
ecosystem) - for performance and stability reasons. Some users had
|
||||
been experiencing very long import times with previous releases as
|
||||
a result of the layout of plugin imports. Users with affected plugins
|
||||
will begin to see a warning from this release onward, which can be
|
||||
resolved for their plugin by updating to a new version of that plugin
|
||||
which follows the guidelines.
|
||||
|
||||
Templater plugins
|
||||
^^^^^^^^^^^^^^^^^
|
||||
|
||||
Templaters before this version would pass a :code:`make_template()`
|
||||
callable to the slicing methods as part of being able to map the source
|
||||
file. This method would accept a :code:`str` and return a
|
||||
:code:`jinja2.environment.Template` object to allow the templater to
|
||||
render multiple variants of the template to do the slicing operation
|
||||
(which allows linting issues found in templated files to be mapped
|
||||
accurately back to their position in the unrendered source file).
|
||||
This approach is not very generalisable, and did not support templating
|
||||
operations with libraries other than :code:`jinja2`.
|
||||
|
||||
As a result, we have amended the interface to instead pass a
|
||||
:code:`render_func()` callable, which accepts a :code:`str` and returns
|
||||
a :code:`str`. This works fine for the :code:`jinja` templater (and
|
||||
by extension the :code:`dbt` templater) as they can simply wrap the
|
||||
original callable with a method that calls :code:`render()` on the
|
||||
original :code:`Template` object. It also however opens up the door
|
||||
to other templating engines, and in particular to *remote* templaters
|
||||
which might pass unrendered code over a HTTP connection for rendering.
|
||||
|
||||
Specifically:
|
||||
|
||||
* The :code:`slice_file()` method of the base templater classes no longer
|
||||
accepts an optional :code:`make_template` argument or a
|
||||
:code:`templated_str` argument.
|
||||
|
||||
* Instead a :code:`render_func` callable should be passed which can be
|
||||
called to generate the :code:`templated_str` on demand.
|
||||
|
||||
* Unlike the optional :code:`make_template` - :code:`render_func` is **not**
|
||||
optional and should always be present.
|
||||
|
||||
Rule plugins
|
||||
^^^^^^^^^^^^
|
||||
|
||||
We recommend that the module in a plugin which defines all
|
||||
of the hook implementations (anything using the :code:`@hookimpl` decorator)
|
||||
must be able to fully import before any rule implementations are imported.
|
||||
More specifically, SQLFluff must be able to both *import* **and**
|
||||
*run* any implementations of :code:`get_configs_info()` before any plugin
|
||||
rules (i.e. any derivatives of
|
||||
:py:class:`BaseRule <sqlfluff.core.rules.base.BaseRule>`) are *imported*.
|
||||
Because of this, we recommend that rules are defined in a
|
||||
separate module to the root of the plugin and then only imported *within*
|
||||
the :code:`get_rules()` method.
|
||||
|
||||
Importing in the main body of the module was previously our recommendation
|
||||
and so may be the case for versions of some plugins. If one of your plugins
|
||||
does use imports in this way, a warning will be presented from this version
|
||||
onward, recommending that you update your plugin.
|
||||
|
||||
See the :ref:`developingpluginsref` section of the docs for an example.
|
||||
|
||||
.. _upgrading_2_0:
|
||||
|
||||
Upgrading from 1.x to 2.0
|
||||
-------------------------
|
||||
@@ -126,7 +254,7 @@ sets only the values which differ from the default config.
|
||||
|
||||
[sqlfluff:indentation]
|
||||
# Enabling implicit indents for this project.
|
||||
# See https://docs.sqlfluff.com/en/stable/layout.html#configuring-indent-locations
|
||||
# See https://docs.sqlfluff.com/en/stable/perma/indent_locations.html
|
||||
allow_implicit_indents = True
|
||||
|
||||
# Add a few specific rule configurations, referenced by the rule names
|
||||
46
docs/source/reference/rules.rst
Normal file
46
docs/source/reference/rules.rst
Normal file
@@ -0,0 +1,46 @@
|
||||
.. _ruleref:
|
||||
|
||||
Rules Reference
|
||||
===============
|
||||
|
||||
This page is an index of available rules which are bundled with SQLFluff.
|
||||
|
||||
* For information on how to configure which rules are enabled for your
|
||||
project see :ref:`ruleselection`.
|
||||
|
||||
* If you just want to turn rules on or off for specific files, or specific
|
||||
sections of files, see :ref:`ignoreconfig`.
|
||||
|
||||
* For more information on how to configure the rules which you do enable
|
||||
see :ref:`ruleconfig`.
|
||||
|
||||
Core Rules
|
||||
----------
|
||||
|
||||
Certain rules belong to the :code:`core` rule group. In order for
|
||||
a rule to be designated as :code:`core`, it must meet the following
|
||||
criteria:
|
||||
|
||||
* Stable
|
||||
* Applies to most dialects
|
||||
* Could detect a syntax issue
|
||||
* Isn’t too opinionated toward one style (e.g. the :code:`dbt` style guide)
|
||||
|
||||
Core rules can also make it easier to roll out SQLFluff to a team by
|
||||
only needing to follow a 'common sense' subset of rules initially,
|
||||
rather than spending time understanding and configuring all the
|
||||
rules, some of which your team may not necessarily agree with.
|
||||
|
||||
We believe teams will eventually want to enforce more than just
|
||||
the core rules, and we encourage everyone to explore all the rules
|
||||
and customize a rule set that best suites their organization.
|
||||
|
||||
See the :ref:`config` section for more information on how to enable
|
||||
only :code:`core` rules by default.
|
||||
|
||||
Rule Index
|
||||
----------
|
||||
|
||||
.. include:: ../_partials/rule_table.rst
|
||||
|
||||
.. include:: ../_partials/rule_summaries.rst
|
||||
@@ -1,103 +0,0 @@
|
||||
.. _ruleref:
|
||||
|
||||
Rules Reference
|
||||
===============
|
||||
|
||||
`Rules` in `SQLFluff` are implemented as `crawlers`. These are entities
|
||||
which work their way through the parsed structure of a query to evaluate
|
||||
a particular rule or set of rules. The intent is that the definition of
|
||||
each specific rule should be really streamlined and only contain the logic
|
||||
for the rule itself, with all the other mechanics abstracted away. To
|
||||
understand how rules are enabled and disabled see :ref:`ruleselection`.
|
||||
|
||||
Core Rules
|
||||
----------
|
||||
|
||||
Certain rules belong to the :code:`core` rule group. In order for
|
||||
a rule to be designated as :code:`core`, it must meet the following
|
||||
criteria:
|
||||
|
||||
* Stable
|
||||
* Applies to most dialects
|
||||
* Could detect a syntax issue
|
||||
* Isn’t too opinionated toward one style (e.g. the :code:`dbt` style guide)
|
||||
|
||||
Core rules can also make it easier to roll out SQLFluff to a team by
|
||||
only needing to follow a 'common sense' subset of rules initially,
|
||||
rather than spending time understanding and configuring all the
|
||||
rules, some of which your team may not necessarily agree with.
|
||||
|
||||
We believe teams will eventually want to enforce more than just
|
||||
the core rules, and we encourage everyone to explore all the rules
|
||||
and customize a rule set that best suites their organization.
|
||||
|
||||
See the :ref:`config` section for more information on how to enable
|
||||
only :code:`core` rules by default.
|
||||
|
||||
Inline Ignoring Errors
|
||||
-----------------------
|
||||
|
||||
`SQLFluff` features inline error ignoring. For example, the following will
|
||||
ignore the lack of whitespace surrounding the ``*`` operator.
|
||||
|
||||
.. code-block:: sql
|
||||
|
||||
a.a*a.b AS bad_1 -- noqa: LT01
|
||||
|
||||
Multiple rules can be ignored by placing them in a comma-delimited list.
|
||||
|
||||
.. code-block:: sql
|
||||
|
||||
a.a * a.b AS bad_2, -- noqa: LT01, LT03
|
||||
|
||||
It is also possible to ignore non-rule based errors, and instead opt to
|
||||
ignore templating (``TMP``) & parsing (``PRS``) errors.
|
||||
|
||||
.. code-block:: sql
|
||||
|
||||
WHERE
|
||||
col1 = 2 AND
|
||||
dt >= DATE_ADD(CURRENT_DATE(), INTERVAL -2 DAY) -- noqa: PRS
|
||||
|
||||
.. note::
|
||||
It should be noted that ignoring ``TMP`` and ``PRS`` errors can lead to
|
||||
incorrect ``sqlfluff lint`` and ``sqfluff fix`` results as `SQLFluff` can
|
||||
misinterpret the SQL being analysed.
|
||||
|
||||
Should the need arise, not specifying specific rules to ignore will ignore
|
||||
all rules on the given line.
|
||||
|
||||
.. code-block:: sql
|
||||
|
||||
a.a*a.b AS bad_3 -- noqa
|
||||
|
||||
.. _inline_ignoring_errors:
|
||||
|
||||
Ignoring line ranges
|
||||
^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
Similar to `pylint's "pylint" directive"`_, ranges of lines can be ignored by
|
||||
adding :code:`-- noqa:disable=<rule>[,...] | all` to the line. Following this
|
||||
directive, specified rules (or all rules, if "all" was specified) will be
|
||||
ignored until a corresponding `-- noqa:enable=<rule>[,...] | all` directive.
|
||||
|
||||
.. code-block:: sql
|
||||
|
||||
-- Ignore rule AL02 from this line forward
|
||||
SELECT col_a a FROM foo -- noqa: disable=AL02
|
||||
|
||||
-- Ignore all rules from this line forward
|
||||
SELECT col_a a FROM foo -- noqa: disable=all
|
||||
|
||||
-- Enforce all rules from this line forward
|
||||
SELECT col_a a FROM foo -- noqa: enable=all
|
||||
|
||||
|
||||
.. _`pylint's "pylint" directive"`: http://pylint.pycqa.org/en/latest/user_guide/message-control.html
|
||||
|
||||
Rule Index
|
||||
----------
|
||||
|
||||
.. include:: partials/rule_table.rst
|
||||
|
||||
.. include:: partials/rule_summaries.rst
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user