forked from github/plane
Compare commits
1144 Commits
chore/work
...
preview
Author | SHA1 | Date | |
---|---|---|---|
dbc8cd182a | |||
55457758ec | |||
534d4e4dc6 | |||
e0a20dbb0a | |||
|
8b6035d315 | ||
|
b90ca97461 | ||
|
b0ab17021f | ||
|
cd395fa3d2 | ||
|
9e3fedd0df | ||
|
5250c64be5 | ||
|
5c6170507e | ||
|
4f15c03f91 | ||
|
43534dcaed | ||
|
cfbc1a91af | ||
|
7aa1d750ea | ||
|
91ed27837e | ||
|
f45c2d12fd | ||
|
d0cb00f28a | ||
|
c80638090f | ||
|
549790ee8a | ||
|
1dac70ecbe | ||
|
3c2b2e3ed6 | ||
|
f805acbcef | ||
|
b349de556a | ||
|
699017014e | ||
|
2d4547601d | ||
|
9b918b727a | ||
|
74a88fc028 | ||
|
e86397b649 | ||
|
7e0520d1cf | ||
|
95580d0c62 | ||
|
03df410b52 | ||
|
c843a1757f | ||
|
2ea6d70fac | ||
|
7bec244a67 | ||
|
ddb07dbe5f | ||
|
9b0949148f | ||
|
8d009187ab | ||
|
39b5a58ce8 | ||
|
986f81e3ae | ||
|
fd2cacb0cd | ||
|
4f138ac3f9 | ||
|
2c8c139c62 | ||
|
57f2445bb8 | ||
|
90609b306f | ||
|
62dac421dc | ||
|
3742ea91bf | ||
|
b4cc58d5dd | ||
|
71b73000d2 | ||
|
4cba7ff2f5 | ||
|
840bc51537 | ||
|
0ab03c963c | ||
|
1fb8791941 | ||
|
e9518ced89 | ||
|
bc0752f7e8 | ||
|
4c97098218 | ||
|
91d85ffed0 | ||
|
d485446ee2 | ||
|
92fd7b6977 | ||
|
e7fc942514 | ||
|
68ebcfd04e | ||
|
fed5916907 | ||
|
204e4a8c6d | ||
|
bd470f7a90 | ||
|
84e73291cc | ||
|
1423955a34 | ||
|
a771bab64c | ||
|
fafdac68f0 | ||
|
3a466cfe40 | ||
|
86715f5b20 | ||
|
97fb08928f | ||
|
8bdf8fa24f | ||
|
f1530688f8 | ||
|
265ce7778a | ||
|
79d227b906 | ||
|
5d4b3cea4c | ||
|
2c510a7cb9 | ||
|
478535422b | ||
|
35ca495f40 | ||
|
68fa7a485b | ||
|
ecfd3b68e3 | ||
|
9249e6d5b9 | ||
|
30cee78170 | ||
|
4c46b075b4 | ||
|
f3d9053d81 | ||
|
a136931168 | ||
|
5235b78cb8 | ||
|
1c3619a4d6 | ||
|
f3fd48dd43 | ||
|
c6f36a056b | ||
|
51683e6b2f | ||
|
fb189ca447 | ||
|
ea728a385f | ||
|
7452e40134 | ||
|
599809dd2e | ||
|
ad5559afe4 | ||
|
11c3d4fbd8 | ||
|
20795db9e8 | ||
|
7e9daf8a20 | ||
|
07106f9161 | ||
|
4b8a1353fc | ||
|
8205961d2a | ||
|
53c63c8a2b | ||
|
564d64a30e | ||
|
961fdc17d5 | ||
|
7e0037b06b | ||
|
9f9e508bb7 | ||
|
2429ac053d | ||
|
31b2fa2c14 | ||
|
ec837a42d5 | ||
|
b5ed602e05 | ||
|
9652d56e49 | ||
|
2f637fc9f1 | ||
|
95d2b6f1c1 | ||
|
5c7886d7f3 | ||
|
e4211e5817 | ||
|
1caceca1e7 | ||
|
53f3357149 | ||
|
cdf86391f0 | ||
|
72392d5731 | ||
|
db8cf1fb24 | ||
|
d262eb4ffb | ||
|
60aea62739 | ||
|
baab6ce99f | ||
|
5aed04eb41 | ||
|
165bec9aa4 | ||
|
231fd52992 | ||
|
9642b761b7 | ||
|
86ae79f851 | ||
|
991b8e7703 | ||
|
56d4e25431 | ||
|
23f77328d4 | ||
|
58a45c96c0 | ||
|
15e04e55bb | ||
|
8911eaf676 | ||
|
0d2a8cca50 | ||
|
061be85a5d | ||
|
4d1b5adfc4 | ||
|
3ff0f6187a | ||
|
37c5ce54d5 | ||
|
054dd2bb7d | ||
|
180f1d74e8 | ||
|
6ab8588afb | ||
|
e1f04356fc | ||
|
4eefc7f692 | ||
|
7142889c23 | ||
|
bca3e13242 | ||
|
e02fa4d9e7 | ||
|
621624e29f | ||
|
4ea616f1cd | ||
|
901a7703ff | ||
|
2f883e4939 | ||
|
cb4cfa1dd5 | ||
|
7d3a96b3d0 | ||
|
0f79c6d7d8 | ||
|
6a245e121a | ||
|
293854fb3a | ||
|
53ddef1cd5 | ||
|
473dfc7a5b | ||
|
1a462711e1 | ||
|
3139f8e109 | ||
|
aa3702cd46 | ||
|
aed87ef472 | ||
|
1b742f66c6 | ||
|
568701881e | ||
|
2dbea54d2a | ||
|
82ba9833f2 | ||
|
0759666b75 | ||
|
e9774e1af3 | ||
|
4a93fdbdb4 | ||
|
861a1c4132 | ||
|
5244ba72c9 | ||
|
36bc7bf996 | ||
|
0789238282 | ||
|
c17748eec2 | ||
|
94f1e6d957 | ||
|
13bbb9cde4 | ||
|
92a077dce1 | ||
|
ed2e4ad6f7 | ||
|
cbe5d9a047 | ||
|
d25fcfdd88 | ||
|
aaad37575b | ||
|
6bc133e3b1 | ||
|
42b524b16a | ||
|
0bc4b6cece | ||
|
43c75f4457 | ||
|
80761d3507 | ||
|
9c13dbd957 | ||
|
8d9adf4d87 | ||
|
4ccb505f36 | ||
|
884856b021 | ||
|
552c66457a | ||
|
1363ef0b2d | ||
|
898cf98be3 | ||
|
cb632408f9 | ||
|
b930d98665 | ||
|
69e110f4a8 | ||
|
c3c6ef8830 | ||
|
8aca74c68d | ||
|
c97b994311 | ||
|
443b93f897 | ||
|
730e556bea | ||
|
f77f4b8221 | ||
|
5c4c3f5c04 | ||
|
73c91654eb | ||
|
578bd29f6f | ||
|
2e5e14556d | ||
|
ecac45e885 | ||
|
6ec9c64f7c | ||
|
f493a03f56 | ||
|
48c9b78397 | ||
|
9c29ad1a28 | ||
|
e3ac075ee2 | ||
|
8c9d328c24 | ||
|
b2146098e2 | ||
|
4a06572f73 | ||
|
d34df5c805 | ||
|
01702e9f66 | ||
|
b57c389c75 | ||
|
27acd1bec1 | ||
|
539afb0792 | ||
|
6c7ba3bc79 | ||
|
c8ab650008 | ||
|
89d019df57 | ||
|
535731141f | ||
|
4b30339a59 | ||
|
899771a678 | ||
|
8997ee2e3e | ||
|
e05bc3965c | ||
|
b07fec533c | ||
|
cc069b61aa | ||
|
2074bb97db | ||
|
f5151ae717 | ||
|
deb5ac0578 | ||
|
cead56cc52 | ||
|
cb78ccad1f | ||
|
6c6b7156bb | ||
|
8cc372679c | ||
|
94327b8311 | ||
|
7b88a2a88c | ||
|
47a7f60611 | ||
|
7a8aef4599 | ||
|
bc02e56e3c | ||
|
b03f6a81e2 | ||
|
b535d8a23c | ||
|
bce69bcbe1 | ||
|
1fa47a6c04 | ||
|
c16a5b9b71 | ||
|
a852e3cc52 | ||
|
ed8782757d | ||
|
549f6d0943 | ||
|
cb5198c883 | ||
|
b4adb82d40 | ||
|
3f1ce9907d | ||
|
da735f318a | ||
|
a08f401452 | ||
|
4861be2773 | ||
|
6a6ab5544a | ||
|
466f69a0b9 | ||
|
f188c9fdc5 | ||
|
dd579f83ee | ||
|
66f2492e60 | ||
|
c08d6987d0 | ||
|
e4f48d6878 | ||
|
b3d3c0fb06 | ||
|
cace132a2a | ||
|
3d09a69d58 | ||
|
921b9078f1 | ||
|
e1db39ffc8 | ||
|
2b05d23470 | ||
|
69fa1708cc | ||
|
666b7ea577 | ||
|
7b76df6868 | ||
|
dbdd14493b | ||
|
4572b7378d | ||
|
5a32d10f96 | ||
|
126d01bdc5 | ||
|
87eadc3c5d | ||
|
53367a6bc4 | ||
|
c06ef4d1d7 | ||
|
73334130be | ||
|
4d0f641ee0 | ||
|
50318190f5 | ||
|
d07dd65022 | ||
|
f8f9dd3331 | ||
|
af70722e34 | ||
|
d99529b109 | ||
|
6eb7014ea4 | ||
|
59c9b3bdce | ||
|
894de80f41 | ||
|
4b706437d7 | ||
|
e4bccea824 | ||
|
d39f2526a2 | ||
|
bc6e48fcd6 | ||
|
e6f33eb262 | ||
|
5cfebb8dae | ||
|
e4988ee940 | ||
|
850bf01d65 | ||
|
f183e389ea | ||
|
5d7c0a2a64 | ||
|
92e994990c | ||
|
d1087820f6 | ||
|
65024fe5ec | ||
|
62693abb09 | ||
|
9326fb0762 | ||
|
56805203f1 | ||
|
39136d3a9f | ||
|
34301e4399 | ||
|
51f795fbd7 | ||
|
7abfbac479 | ||
|
c4028efd71 | ||
|
0215b697a5 | ||
|
37fb3cd4e2 | ||
|
30cc923fdb | ||
|
b1520783cf | ||
|
0f56945321 | ||
|
16d7b3c7d1 | ||
|
5033b1ba7e | ||
|
6dd785b5dd | ||
|
ef467f7f6b | ||
|
fe64208e84 | ||
|
ec43c8e634 | ||
|
fece947eb5 | ||
|
1f5d54260a | ||
|
895ff03cf2 | ||
|
7e46cbcb52 | ||
|
6c70d3854a | ||
|
bd142989b4 | ||
|
002b2505f3 | ||
|
34d6b135f2 | ||
|
c858b76054 | ||
|
aba170dbde | ||
|
58e0170cac | ||
|
e20681bb6c | ||
|
c950e3d3f7 | ||
|
ee57f815fd | ||
|
67fdafb720 | ||
|
888268ac11 | ||
|
87888a55ce | ||
|
1866474569 | ||
|
e1d73057ae | ||
|
01e09873e6 | ||
|
dad682a7c3 | ||
|
5c089f0223 | ||
|
e1f13af084 | ||
|
8b6206f28b | ||
|
e1ef830f39 | ||
|
a8c5b558b1 | ||
|
510eeb7a8f | ||
|
b4fb9f1aa2 | ||
|
31ebecba52 | ||
|
395098417c | ||
|
812df59d1d | ||
|
7c85ee7e55 | ||
|
9c1d496165 | ||
|
d6a32ef75d | ||
|
6240b17063 | ||
|
33c99ded77 | ||
|
ba6479674c | ||
|
46e1d46005 | ||
|
8c1f169f61 | ||
|
0aaca709da | ||
|
5f6c9a4166 | ||
|
9f055840ef | ||
|
50cbb2f002 | ||
|
849d3a66c1 | ||
|
1f7565ce52 | ||
|
34f89ba45b | ||
|
27fcfcf620 | ||
|
5571d42e10 | ||
|
e0a4d7a12a | ||
|
5c64933927 | ||
|
9c50ee39c3 | ||
|
18b5115546 | ||
|
3372e21759 | ||
|
03f8bfae10 | ||
|
03e5f4a5bd | ||
|
62607ade6f | ||
|
7927b7678d | ||
|
ebad7f0cdf | ||
|
b1bf125916 | ||
|
9b54fe80a8 | ||
|
02182e05c4 | ||
|
e92417037c | ||
|
d73cd2ec9d | ||
|
acf81f30f5 | ||
|
1cc2a4e679 | ||
|
c61e8fdb24 | ||
|
b1a5e4872b | ||
|
b1592adc66 | ||
|
e86d2ba743 | ||
|
b10e89fdd7 | ||
|
efadc728af | ||
|
370decc8aa | ||
|
ac6e710623 | ||
|
56f4df4cb5 | ||
|
fcbe690665 | ||
|
487e961df1 | ||
|
71901efcdf | ||
|
022a286eba | ||
|
c851ec7034 | ||
|
fb442d6dfe | ||
|
e9fdb0ff5d | ||
|
614096fd2f | ||
|
133c9b3ddb | ||
|
48b55ef261 | ||
|
7464c1090a | ||
|
ab3c3a6cf9 | ||
|
92becbc617 | ||
|
ee318ce91b | ||
|
fb4f4260fa | ||
|
3efb7fc070 | ||
|
95871b0049 | ||
|
952eb871df | ||
|
6bf9d84bea | ||
|
a6a28d46c7 | ||
|
4a44bb2a6c | ||
|
d16a0b61d0 | ||
|
c7db290718 | ||
|
e433a835fd | ||
|
cf3b888465 | ||
|
a64e1e04db | ||
|
07a4cb1f7d | ||
|
e1bf318317 | ||
|
bbbd7047d3 | ||
|
17e5663e81 | ||
|
170f30c7dd | ||
|
7381a818a9 | ||
|
261013b794 | ||
|
ce9ed6b25e | ||
|
10057377dc | ||
|
eba5ed24ad | ||
|
41e812a811 | ||
|
a94c607031 | ||
|
665a07f15a | ||
|
85a8af5125 | ||
|
7628419a26 | ||
|
2cd16c61ba | ||
|
8f7b05b73f | ||
|
98d545cfb9 | ||
|
2548a9f062 | ||
|
d901277a20 | ||
|
489555f788 | ||
|
b827a1af27 | ||
|
1d2e331cec | ||
|
e51e4761b9 | ||
|
9299478539 | ||
|
fb4cffdd1c | ||
|
83bf28bb83 | ||
|
25a2816a76 | ||
|
571b89632c | ||
|
5b5698ad97 | ||
|
b1989bae1b | ||
|
83139989c2 | ||
|
1bf06821bb | ||
|
eea3b4fa54 | ||
|
f64284f6a0 | ||
|
06496ff0f0 | ||
|
247937d93a | ||
|
e0a18578f5 | ||
|
e93bbec4cd | ||
|
d90aaba842 | ||
|
a303e52039 | ||
|
73d12cf1bb | ||
|
7651640e29 | ||
|
41a9dc3603 | ||
|
afda3ee6ca | ||
|
fc8edab59b | ||
|
9cf5348019 | ||
|
042ed04a03 | ||
|
e29edfc02b | ||
|
eb0af8de59 | ||
|
0fb43c6fc5 | ||
|
963d26ccda | ||
|
3eb819c4ae | ||
|
4b67a41b41 | ||
|
0a36850d03 | ||
|
5bce2014c5 | ||
|
3f7f91e120 | ||
|
1927fdd437 | ||
|
b86c6c906a | ||
|
99975d0ba0 | ||
|
4f72ebded9 | ||
|
be5d1eb9f9 | ||
|
8d730e6680 | ||
|
41a3cb708c | ||
|
27037a2177 | ||
|
e2affc3fa6 | ||
|
3a14f19c99 | ||
|
eb4c3a4db5 | ||
|
2e129682b7 | ||
|
4a145f7a06 | ||
|
e69fcd410c | ||
|
55afef204d | ||
|
f9e187d8b9 | ||
|
9545dc77d6 | ||
|
1fc987c6c9 | ||
|
c1c0297b6d | ||
|
1a7b5d7222 | ||
|
fb3dd77b66 | ||
|
a43dfc097d | ||
|
346c6f5414 | ||
|
729b6ac79e | ||
|
0a35fcfbc0 | ||
|
75b4c6e7d6 | ||
|
a1d6c40627 | ||
|
4a2e648f6d | ||
|
76db394ab1 | ||
|
4563b50fad | ||
|
065226f8b2 | ||
|
0a99a1a091 | ||
|
4b2a9c8335 | ||
|
751b15a7a7 | ||
|
ac22769220 | ||
|
46ae0f98dc | ||
|
30aaec9097 | ||
|
4041c5bc5b | ||
|
403595a897 | ||
|
0ee93dfd8c | ||
|
ee0e3e2e25 | ||
|
0165abab3e | ||
|
7d07afd59c | ||
|
efaba43494 | ||
|
a8ec2b6914 | ||
|
39eb8c98d1 | ||
|
138d06868b | ||
|
2eab3b41a2 | ||
|
662b497082 | ||
|
67cf1785b8 | ||
|
7d08a57be6 | ||
|
b0ad48e35a | ||
|
d68669df51 | ||
|
4e600e4e9b | ||
|
4fc4da7982 | ||
|
6f210e1f4b | ||
|
f7803dab56 | ||
|
70172f8e3d | ||
|
21bc668a56 | ||
|
dc5a5f4a91 | ||
|
2c67aced15 | ||
|
3a4c893368 | ||
|
0d036e6bf5 | ||
|
3ef0570f6a | ||
|
c9d2ea36b8 | ||
|
f0836ceb10 | ||
|
888665783e | ||
|
817737b2c0 | ||
|
638c1e21c9 | ||
|
c67e097fc2 | ||
|
804dd8300d | ||
|
c6d6b9a0e9 | ||
|
9debd81a50 | ||
|
d53a086206 | ||
|
ef8472ce5e | ||
|
4aa34f3eda | ||
|
c7616fda11 | ||
|
483fc57601 | ||
|
09a1a55da8 | ||
|
61f92563a9 | ||
|
00e07443b0 | ||
|
c4efdcd704 | ||
|
3c9679dff9 | ||
|
b3393f5c48 | ||
|
f995736642 | ||
|
f7f1f2bea4 | ||
|
532da80375 | ||
|
212f2b54f8 | ||
|
9ecdcc6fde | ||
|
ddae745669 | ||
|
e78c1f2060 | ||
|
ebc891b985 | ||
|
60b5589c48 | ||
|
f8208b1b5e | ||
|
6c6b764421 | ||
|
5c912b8821 | ||
|
ff19980502 | ||
|
a1a24e4574 | ||
|
c1c2a6ddce | ||
|
ec3cad1f25 | ||
|
336c97d336 | ||
|
e4a3d0db5c | ||
|
7f2e99dd2d | ||
|
a104cc4814 | ||
|
03cbad5110 | ||
|
2956c43ed5 | ||
|
eae32593cb | ||
|
7fd625e0e3 | ||
|
f007dcff26 | ||
|
adf091fa07 | ||
|
b66f07845a | ||
|
911211cf3d | ||
|
53b41481a2 | ||
|
9d9d703c62 | ||
|
a2f34e9573 | ||
|
81f84f24f7 | ||
|
87f39d7372 | ||
|
1a1594e818 | ||
|
8d3ea5bb3e | ||
|
6a2be6afc4 | ||
|
338d58f79d | ||
|
e23e4bc392 | ||
|
c1598c3d38 | ||
|
4a436eeee2 | ||
|
47681fe9f8 | ||
|
f27efb80e1 | ||
|
c1e1b81b99 | ||
|
d9db765ae3 | ||
|
0531dc3308 | ||
|
e36b7a5ab9 | ||
|
c6b756d918 | ||
|
f3ae57bc85 | ||
|
2374161030 | ||
|
512ad83c08 | ||
|
801f75f406 | ||
|
f88109ef04 | ||
|
bb50df0dff | ||
|
2986769f28 | ||
|
fd5326dec6 | ||
|
49452a68ab | ||
|
b3ac9def8d | ||
|
be62662bb1 | ||
|
4eba5c115a | ||
|
24442ccc9c | ||
|
e32a50fba6 | ||
|
4baf2a2f09 | ||
|
577118ca02 | ||
|
4a26f11e23 | ||
|
9926e321f6 | ||
|
7f5028a4f6 | ||
|
06a7bdffd7 | ||
|
d3dedc8e51 | ||
|
d656f8e62a | ||
|
864519e770 | ||
|
034f0a06db | ||
|
7263cb072c | ||
|
ea3a0362b0 | ||
|
c9337d4a41 | ||
|
f347c1cd69 | ||
|
3f07c48b35 | ||
|
04b2214bf2 | ||
|
1adb38655a | ||
|
4ab64b6905 | ||
|
4b0d85591e | ||
|
8e2789af3e | ||
|
3592feb3d7 | ||
|
bc6a2542f5 | ||
|
5e2d93df52 | ||
|
cce349b805 | ||
|
fadda7cf04 | ||
|
c4093d29a7 | ||
|
8c89e9cc01 | ||
|
5625a3581a | ||
|
ee387c4222 | ||
|
6a16a98b03 | ||
|
11f84a986c | ||
|
4b0d48b290 | ||
|
9789068880 | ||
|
151c355177 | ||
|
38580c3940 | ||
|
7ff91fdb82 | ||
|
a679b42200 | ||
|
27762ea500 | ||
|
2cd5dbcd02 | ||
|
96868760a3 | ||
|
df97b35a99 | ||
|
4611ec0b83 | ||
|
e6b31e2550 | ||
|
59fb371e3d | ||
|
23e5306f6d | ||
|
0f99fb302b | ||
|
0e49d616b7 | ||
|
e72920d33e | ||
|
80dc38b649 | ||
|
43b503c756 | ||
|
68d370fd86 | ||
|
5e03f3dd82 | ||
|
1257a88089 | ||
|
12a3392722 | ||
|
b62a1b11b1 | ||
|
0a05aef046 | ||
|
efd3ebf067 | ||
|
266f14d550 | ||
|
9eb8f41008 | ||
|
b340232e76 | ||
|
64ca267db4 | ||
|
46e79dde27 | ||
|
7272c54439 | ||
|
543636eb40 | ||
|
6c2fecd322 | ||
|
af5057defa | ||
|
c5e7c2f6a8 | ||
|
eda1e46a2d | ||
|
ee78b4fe52 | ||
|
a19598fec1 | ||
|
afff9790d4 | ||
|
c0cd201b7c | ||
|
942785b7c0 | ||
|
a70f551d17 | ||
|
2580e66d4b | ||
|
d887b780ae | ||
|
4b0ccea146 | ||
|
02a776396b | ||
|
5cd93f5e59 | ||
|
69b1d0a929 | ||
|
b522de99ba | ||
|
b58d7a715a | ||
|
87cd44bcd2 | ||
|
804b7d8663 | ||
|
1539340113 | ||
|
d9ee692ce9 | ||
|
c3ba9f61d8 | ||
|
6e702d6cc7 | ||
|
447a8bc2f8 | ||
|
34d872beca | ||
|
4263e9b507 | ||
|
768b4abf22 | ||
|
6f0af4689b | ||
|
22bbdd5ab8 | ||
|
28a9c53202 | ||
|
33d88f56da | ||
|
23001d425f | ||
|
5de94c575a | ||
|
f200acc1e8 | ||
|
f132fe59ae | ||
|
e584259c04 | ||
|
a77ceb636f | ||
|
10ab081a0b | ||
|
1d5a3a02c1 | ||
|
62b9b259c0 | ||
|
91e84aede1 | ||
|
71bf049e89 | ||
|
685e62a72f | ||
|
d9bd43f43c | ||
|
10bdcc906c | ||
|
54964924f0 | ||
|
ff8008cbed | ||
|
78428fb564 | ||
|
b2824366a8 | ||
|
05eb728c40 | ||
|
ad2471c5a7 | ||
|
440cfc0f20 | ||
|
9a32d722dc | ||
|
37fe9a185c | ||
|
eabb31a764 | ||
|
1e27c7936d | ||
|
0a617eec26 | ||
|
ad20079e0c | ||
|
e14baf17a7 | ||
|
542b18a585 | ||
|
37d88cc05b | ||
|
b652d1a8f1 | ||
|
98b7a941f9 | ||
|
dc131ee05b | ||
|
0e3d15215d | ||
|
816b6abf3b | ||
|
6ecaa661a7 | ||
|
0b1efb173f | ||
|
aceee7d2e2 | ||
|
0a41eff435 | ||
|
5f681973a0 | ||
|
9c65657a66 | ||
|
6fab75f9ab | ||
|
111070bdb9 | ||
|
d76840ff6f | ||
|
d4b6f4faf1 | ||
|
fe9519314a | ||
|
e141091e99 | ||
|
40b8b0ac35 | ||
|
47d6b152a0 | ||
|
216a7c8fda | ||
|
e7468292c7 | ||
|
7bff8d2ec5 | ||
|
6f2cce081f | ||
|
a86dafc11c | ||
|
81256d6373 | ||
|
c9792da4a1 | ||
|
37df0bcdd8 | ||
|
18c86bd8cc | ||
|
0ee6c20272 | ||
|
184db0156c | ||
|
b7a0f3c693 | ||
|
e40f38e2e1 | ||
|
05e7afab8d | ||
|
969a51f425 | ||
|
849bc92aea | ||
|
a37dec45d9 | ||
|
e1793dda74 | ||
|
31fdaf2659 | ||
|
8a1a6c6f62 | ||
|
3d83101f69 | ||
|
d473ba9d0d | ||
|
e5902152ab | ||
|
f03a9a6de8 | ||
|
b7e2f1e57a | ||
|
885de6f679 | ||
|
ce9714ff12 | ||
|
74b141eea2 | ||
|
08425c9614 | ||
|
ecfcc03ef0 | ||
|
829c08f0ee | ||
|
2edd2d947e | ||
|
5b67f27345 | ||
|
7684a2c091 | ||
|
4e2bf24e8d | ||
|
aafac9ed1d | ||
|
3adf48e429 | ||
|
1c546e3cc5 | ||
|
5c7382d894 | ||
|
1f8ae3a5ad | ||
|
f1ed0c979c | ||
|
78b29eb81b | ||
|
8d3a0a2eec | ||
|
910d1a1de3 | ||
|
6004f29bbc | ||
|
9d0056cfee | ||
|
4bb99d5fbf | ||
|
6c61fbd102 | ||
|
2605b938f0 | ||
|
b4f51cb5af | ||
|
fe80ca3e1c | ||
|
b78e83d81b | ||
|
ee68c3ae86 | ||
|
239f68e260 | ||
|
f949d57fa0 | ||
|
dadd2cf39b | ||
|
13d7832d35 | ||
|
0f892d4670 | ||
|
e8945f244d | ||
|
a0588be405 | ||
|
29a0ba4ddc | ||
|
644073f063 | ||
|
63a15f2bf9 | ||
|
936452758a | ||
|
4fdac437e9 | ||
|
48fe6f9b9a | ||
|
05d675c138 | ||
|
09b4f6dedd | ||
|
449ac06fd7 | ||
|
c7d50bb7ce | ||
|
90de11c08d | ||
|
24c02495aa | ||
|
1795916042 | ||
|
43cbe44a35 | ||
|
361ee16567 | ||
|
de24b02a0a | ||
|
1d3745157d | ||
|
07c15fcc1f | ||
|
c33cfeb227 | ||
|
f119d702c7 | ||
|
0f752f93b6 | ||
|
ae2e1a4b64 | ||
|
472a5d8047 | ||
|
3b12332704 | ||
|
c7cad452ab | ||
|
82c0ee00a3 | ||
|
8041b23a63 | ||
|
f38278f465 | ||
|
73b58e91ee | ||
|
b515c0ffa6 | ||
|
b629263bc2 | ||
|
1bf064df15 | ||
|
9918d5242c | ||
|
dd87bd0ee2 | ||
|
26d37fbd38 | ||
|
bf2c6e36ef | ||
|
62e66acc37 | ||
|
0f28008fa5 | ||
|
5b0066140f | ||
|
e5ae139178 | ||
|
02e2c6f848 | ||
|
8d15b9e7de | ||
|
5eba682128 | ||
|
457ed9bfe6 | ||
|
842048b2f2 | ||
|
d16a6402cd | ||
|
09253e3f55 | ||
|
9538d9ee9b | ||
|
b487e2defe | ||
|
fffcb2d947 | ||
|
01874cb2db | ||
|
b82f04ea04 | ||
|
29e8e6c997 | ||
|
c68ade11ec | ||
|
cb2577d259 | ||
|
5ac2c17e53 | ||
|
f198d14cc7 | ||
|
3400e31d0a | ||
|
6c8c61c53b | ||
|
dd9056c165 | ||
|
89d4851ff5 | ||
|
49a4c466b7 | ||
|
aebf8daf08 | ||
|
d4a4046cd3 | ||
|
39d1916ab2 | ||
|
d0d106901e | ||
|
9147b58b99 | ||
|
4ff3a34a65 | ||
|
74ca187659 | ||
|
2a5ff3397f | ||
|
557fb2306b | ||
|
8409a84004 | ||
|
539c7a3455 | ||
|
c455f03ced | ||
|
68dcfcd451 | ||
|
151ec259d8 | ||
|
e755ce3272 | ||
|
9d16b39c15 | ||
|
9c5bf47ace | ||
|
b5ac2f8078 | ||
|
95c7403efc | ||
|
92cb1834a5 | ||
|
691666e5e2 | ||
|
e585255c4c | ||
|
55ce748aa1 | ||
|
c3f3578e8b | ||
|
c4602951c9 | ||
|
7a96e12523 | ||
|
4c53157b0e | ||
|
1f860312c6 | ||
|
13667d491b | ||
|
aba4592b73 | ||
|
c5cc706978 | ||
|
b44dd26347 | ||
|
b35874e294 | ||
|
37c03ff239 | ||
|
8486983aa9 | ||
|
24a28e44ff | ||
|
a56e7b17f1 | ||
|
97bc153ef9 | ||
|
be2cf2e842 | ||
|
f481957818 | ||
|
1bddaf75b2 | ||
|
91cb15c2e3 | ||
|
a035cee165 | ||
|
a4d7b2423e | ||
|
e6eef7eb0b | ||
|
dbc8150852 | ||
|
de9c1a60e0 | ||
|
1b51892489 | ||
|
d12bd9507e | ||
|
41f0d55dab | ||
|
eda0b32b97 | ||
|
882cf91a91 | ||
|
25b7e22b70 | ||
|
a36aa4d093 | ||
|
c346d82b0b | ||
|
054691d80e | ||
|
59a1b6ca77 | ||
|
979e6fe383 | ||
|
6392a24098 | ||
|
f1b748947a | ||
|
7bc05b0bdc | ||
|
d7457ed5f4 | ||
|
f969ed0662 | ||
|
1296b6af42 | ||
|
a515c59518 | ||
|
a276bd2301 | ||
|
83026e8b2f | ||
|
1ef0a86c9d | ||
|
f3f71f4f9e | ||
|
43cd0554fb | ||
|
ff03f8badb | ||
|
df647cc82a | ||
|
a96edca190 | ||
|
abd6e32fca | ||
|
46c7f98c9d | ||
|
c95a6522ab | ||
|
c598d458f6 | ||
|
344ae5d551 | ||
|
5ccc226498 | ||
|
fd5b7d20a8 | ||
|
ee30eb0590 | ||
|
804313413b | ||
|
b2a948dcae | ||
|
468e117492 | ||
|
90ca459b4a | ||
|
f7fa4d8b65 | ||
|
56fb9414f1 | ||
|
ffa74e21ac | ||
|
c2b90df498 | ||
|
a477161fca | ||
|
220389e74e | ||
|
b30e41f324 | ||
|
53950f0684 | ||
|
e060a4dbf0 | ||
|
926643e597 | ||
|
7f6d59559d | ||
|
d71ba47262 | ||
|
44bc199385 | ||
|
621cf7b83d | ||
|
c2c0dde824 | ||
|
011db50da6 | ||
|
e16e468b8f | ||
|
d2a3d00e82 | ||
|
72b592b9ec | ||
|
18587395c9 | ||
|
eb366887d7 | ||
|
03387848fe | ||
|
accdd02ce7 | ||
|
e01ca97fc9 | ||
|
0fcadca53a | ||
|
ad22ff222f | ||
|
c4fb543372 | ||
|
d84e043c93 | ||
|
10cde58363 | ||
|
041c3af35a | ||
|
3a1b722d31 | ||
|
2e3476ab3c | ||
|
27478ee4bd | ||
|
bdc85ae10d | ||
|
983b0debcd | ||
|
8062800bd9 | ||
|
21e32ce863 | ||
|
0887d35596 | ||
|
726f4668e0 | ||
|
6e940399cb | ||
|
f79bd9df60 | ||
|
70994d1da7 | ||
|
6d46771109 | ||
|
7ad0360920 | ||
|
7b5eea8722 | ||
|
a7e446f134 | ||
|
1c29f0b0a9 | ||
|
acc6b5ed5c | ||
|
20fe27e086 | ||
|
398f35d36d | ||
|
bf060cc8eb | ||
|
a9ea5b6d90 | ||
|
1c2761000a | ||
|
ecea744657 | ||
|
1bd38ad4c7 | ||
|
af3267ac5a | ||
|
192fe9b057 | ||
|
0669dab1c4 | ||
|
34e6ef0d8d | ||
|
c305cf2c72 | ||
|
267cf75004 | ||
|
86de38d3a0 | ||
|
910bd11e86 | ||
|
77af7311ba | ||
|
f56067f372 | ||
|
0530410201 | ||
|
1e104e85a4 | ||
|
dfffa63151 | ||
|
bf525aa2c4 | ||
|
fc523c6485 | ||
|
8c1f9e720a | ||
|
fa8ae6b8ce | ||
|
d6abb87a3a | ||
|
db75eced0a | ||
|
3c89ef8cc3 | ||
|
f9590929dc | ||
|
7169463ee7 | ||
|
152a920788 | ||
|
a1a9015df2 | ||
|
37559fd69f | ||
|
1559822a1b | ||
|
9215134e32 | ||
|
51cf93c9a8 | ||
|
3dd3499b3e | ||
|
20f3d7ce09 | ||
|
b106e15268 | ||
|
666d46de58 | ||
|
734f27122b | ||
|
d43db7fc88 | ||
|
e57b95f99e | ||
|
e21acf1341 | ||
|
7825dd7f77 | ||
|
cfbb4c9579 | ||
|
7200cbf58e | ||
|
aea9a40a73 | ||
|
d14ca3a141 | ||
|
88ef24788e | ||
|
4416419c9b | ||
|
2a2e504ebb | ||
|
7978c8277c | ||
|
28c4703bf4 | ||
|
6512b8205f | ||
|
9d5f835bea | ||
|
3ea926a908 | ||
|
8e9f9cf6df | ||
|
97c50b2957 | ||
|
af8804eb12 | ||
|
d3c85b1336 | ||
|
a5ee049692 | ||
|
2f75611662 | ||
|
b210fc8032 | ||
|
78fee22fec | ||
|
33be52792f | ||
|
bd5ebc2760 | ||
|
2abc5eb68c | ||
|
ced5bfd930 | ||
|
728213e3fd | ||
|
63b6150b9c | ||
|
d933c73343 | ||
|
2cca0b1e76 | ||
|
3d8da99eec | ||
|
eb53876af3 | ||
|
34ab188a99 | ||
|
1f61ad141e | ||
|
3a1b64d8f8 | ||
|
eb6809c015 | ||
|
ca899b2ac7 | ||
|
eead64ba49 | ||
|
870c4403e4 | ||
|
20fd57b793 | ||
|
bdbdacd68c | ||
|
1f904e88e1 | ||
|
a675cd5755 | ||
|
ad3e511328 | ||
|
d158fe8193 | ||
|
2d1536e44d | ||
|
002dc7a5f3 | ||
|
e96f059f65 | ||
|
964e880fc4 | ||
|
93abff5a4b | ||
|
4fcc66fd54 | ||
|
4c09e46de7 | ||
|
4e37916616 | ||
|
48ed439523 | ||
|
794bfd6e3b | ||
|
16292de8d3 | ||
|
21988e8528 | ||
|
4d35c931cd | ||
|
5cbec23d5e | ||
|
942323f81c | ||
|
ee2ad400ba | ||
|
00e61a8753 | ||
|
733fed76cc | ||
|
e78dd4b1c0 | ||
|
d479781fce | ||
|
c449b46bf4 | ||
|
fd6430c3e3 | ||
|
6f580ce2d9 | ||
|
2748133bd0 | ||
|
884b219508 | ||
|
162faf8339 | ||
|
c291ff05ee | ||
|
446981422e | ||
|
630e21b954 | ||
|
894ffb6c21 | ||
|
515dba02d3 | ||
|
0ceb9974f6 | ||
|
e0fcc0c876 |
@ -1,17 +0,0 @@
|
|||||||
version = 1
|
|
||||||
|
|
||||||
[[analyzers]]
|
|
||||||
name = "shell"
|
|
||||||
|
|
||||||
[[analyzers]]
|
|
||||||
name = "javascript"
|
|
||||||
|
|
||||||
[analyzers.meta]
|
|
||||||
plugins = ["react"]
|
|
||||||
environment = ["nodejs"]
|
|
||||||
|
|
||||||
[[analyzers]]
|
|
||||||
name = "python"
|
|
||||||
|
|
||||||
[analyzers.meta]
|
|
||||||
runtime_version = "3.x.x"
|
|
@ -2,5 +2,16 @@
|
|||||||
*.pyc
|
*.pyc
|
||||||
.env
|
.env
|
||||||
venv
|
venv
|
||||||
node_modules
|
node_modules/
|
||||||
|
**/node_modules/
|
||||||
npm-debug.log
|
npm-debug.log
|
||||||
|
.next/
|
||||||
|
**/.next/
|
||||||
|
.turbo/
|
||||||
|
**/.turbo/
|
||||||
|
build/
|
||||||
|
**/build/
|
||||||
|
out/
|
||||||
|
**/out/
|
||||||
|
dist/
|
||||||
|
**/dist/
|
20
.env.example
20
.env.example
@ -1,14 +1,12 @@
|
|||||||
# Database Settings
|
# Database Settings
|
||||||
PGUSER="plane"
|
POSTGRES_USER="plane"
|
||||||
PGPASSWORD="plane"
|
POSTGRES_PASSWORD="plane"
|
||||||
PGHOST="plane-db"
|
POSTGRES_DB="plane"
|
||||||
PGDATABASE="plane"
|
PGDATA="/var/lib/postgresql/data"
|
||||||
DATABASE_URL=postgresql://${PGUSER}:${PGPASSWORD}@${PGHOST}/${PGDATABASE}
|
|
||||||
|
|
||||||
# Redis Settings
|
# Redis Settings
|
||||||
REDIS_HOST="plane-redis"
|
REDIS_HOST="plane-redis"
|
||||||
REDIS_PORT="6379"
|
REDIS_PORT="6379"
|
||||||
REDIS_URL="redis://${REDIS_HOST}:6379/"
|
|
||||||
|
|
||||||
# AWS Settings
|
# AWS Settings
|
||||||
AWS_REGION=""
|
AWS_REGION=""
|
||||||
@ -21,15 +19,15 @@ AWS_S3_BUCKET_NAME="uploads"
|
|||||||
FILE_SIZE_LIMIT=5242880
|
FILE_SIZE_LIMIT=5242880
|
||||||
|
|
||||||
# GPT settings
|
# GPT settings
|
||||||
OPENAI_API_BASE="https://api.openai.com/v1" # change if using a custom endpoint
|
OPENAI_API_BASE="https://api.openai.com/v1" # deprecated
|
||||||
OPENAI_API_KEY="sk-" # add your openai key here
|
OPENAI_API_KEY="sk-" # deprecated
|
||||||
GPT_ENGINE="gpt-3.5-turbo" # use "gpt-4" if you have access
|
GPT_ENGINE="gpt-3.5-turbo" # deprecated
|
||||||
|
|
||||||
# Settings related to Docker
|
# Settings related to Docker
|
||||||
DOCKERIZED=1
|
DOCKERIZED=1 # deprecated
|
||||||
|
|
||||||
# set to 1 If using the pre-configured minio setup
|
# set to 1 If using the pre-configured minio setup
|
||||||
USE_MINIO=1
|
USE_MINIO=1
|
||||||
|
|
||||||
# Nginx Configuration
|
# Nginx Configuration
|
||||||
NGINX_PORT=80
|
NGINX_PORT=80
|
||||||
|
|
||||||
|
5
.github/ISSUE_TEMPLATE/--bug-report.yaml
vendored
5
.github/ISSUE_TEMPLATE/--bug-report.yaml
vendored
@ -1,7 +1,8 @@
|
|||||||
name: Bug report
|
name: Bug report
|
||||||
description: Create a bug report to help us improve Plane
|
description: Create a bug report to help us improve Plane
|
||||||
title: "[bug]: "
|
title: "[bug]: "
|
||||||
labels: [bug, need testing]
|
labels: [🐛bug]
|
||||||
|
assignees: [srinivaspendem, pushya22]
|
||||||
body:
|
body:
|
||||||
- type: markdown
|
- type: markdown
|
||||||
attributes:
|
attributes:
|
||||||
@ -44,7 +45,7 @@ body:
|
|||||||
- Deploy preview
|
- Deploy preview
|
||||||
validations:
|
validations:
|
||||||
required: true
|
required: true
|
||||||
type: dropdown
|
- type: dropdown
|
||||||
id: browser
|
id: browser
|
||||||
attributes:
|
attributes:
|
||||||
label: Browser
|
label: Browser
|
||||||
|
@ -1,7 +1,8 @@
|
|||||||
name: Feature request
|
name: Feature request
|
||||||
description: Suggest a feature to improve Plane
|
description: Suggest a feature to improve Plane
|
||||||
title: "[feature]: "
|
title: "[feature]: "
|
||||||
labels: [feature]
|
labels: [✨feature]
|
||||||
|
assignees: [srinivaspendem, pushya22]
|
||||||
body:
|
body:
|
||||||
- type: markdown
|
- type: markdown
|
||||||
attributes:
|
attributes:
|
||||||
|
84
.github/workflows/auto-merge.yml
vendored
Normal file
84
.github/workflows/auto-merge.yml
vendored
Normal file
@ -0,0 +1,84 @@
|
|||||||
|
name: Auto Merge or Create PR on Push
|
||||||
|
|
||||||
|
on:
|
||||||
|
workflow_dispatch:
|
||||||
|
push:
|
||||||
|
branches:
|
||||||
|
- "sync/**"
|
||||||
|
|
||||||
|
env:
|
||||||
|
CURRENT_BRANCH: ${{ github.ref_name }}
|
||||||
|
SOURCE_BRANCH: ${{ secrets.SYNC_SOURCE_BRANCH_NAME }} # The sync branch such as "sync/ce"
|
||||||
|
TARGET_BRANCH: ${{ secrets.SYNC_TARGET_BRANCH_NAME }} # The target branch that you would like to merge changes like develop
|
||||||
|
GITHUB_TOKEN: ${{ secrets.ACCESS_TOKEN }} # Personal access token required to modify contents and workflows
|
||||||
|
REVIEWER: ${{ secrets.SYNC_PR_REVIEWER }}
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
Check_Branch:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
outputs:
|
||||||
|
BRANCH_MATCH: ${{ steps.check-branch.outputs.MATCH }}
|
||||||
|
steps:
|
||||||
|
- name: Check if current branch matches the secret
|
||||||
|
id: check-branch
|
||||||
|
run: |
|
||||||
|
if [ "$CURRENT_BRANCH" = "$SOURCE_BRANCH" ]; then
|
||||||
|
echo "MATCH=true" >> $GITHUB_OUTPUT
|
||||||
|
else
|
||||||
|
echo "MATCH=false" >> $GITHUB_OUTPUT
|
||||||
|
fi
|
||||||
|
|
||||||
|
Auto_Merge:
|
||||||
|
if: ${{ needs.Check_Branch.outputs.BRANCH_MATCH == 'true' }}
|
||||||
|
needs: [Check_Branch]
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
permissions:
|
||||||
|
pull-requests: write
|
||||||
|
contents: write
|
||||||
|
steps:
|
||||||
|
- name: Checkout code
|
||||||
|
uses: actions/checkout@v4.1.1
|
||||||
|
with:
|
||||||
|
fetch-depth: 0 # Fetch all history for all branches and tags
|
||||||
|
|
||||||
|
- name: Setup Git
|
||||||
|
run: |
|
||||||
|
git config user.name "GitHub Actions"
|
||||||
|
git config user.email "actions@github.com"
|
||||||
|
|
||||||
|
- name: Setup GH CLI and Git Config
|
||||||
|
run: |
|
||||||
|
type -p curl >/dev/null || (sudo apt update && sudo apt install curl -y)
|
||||||
|
curl -fsSL https://cli.github.com/packages/githubcli-archive-keyring.gpg | sudo dd of=/usr/share/keyrings/githubcli-archive-keyring.gpg
|
||||||
|
sudo chmod go+r /usr/share/keyrings/githubcli-archive-keyring.gpg
|
||||||
|
echo "deb [arch=$(dpkg --print-architecture) signed-by=/usr/share/keyrings/githubcli-archive-keyring.gpg] https://cli.github.com/packages stable main" | sudo tee /etc/apt/sources.list.d/github-cli.list > /dev/null
|
||||||
|
sudo apt update
|
||||||
|
sudo apt install gh -y
|
||||||
|
|
||||||
|
- name: Check for merge conflicts
|
||||||
|
id: conflicts
|
||||||
|
run: |
|
||||||
|
git fetch origin $TARGET_BRANCH
|
||||||
|
git checkout $TARGET_BRANCH
|
||||||
|
# Attempt to merge the main branch into the current branch
|
||||||
|
if $(git merge --no-commit --no-ff $SOURCE_BRANCH); then
|
||||||
|
echo "No merge conflicts detected."
|
||||||
|
echo "HAS_CONFLICTS=false" >> $GITHUB_ENV
|
||||||
|
else
|
||||||
|
echo "Merge conflicts detected."
|
||||||
|
echo "HAS_CONFLICTS=true" >> $GITHUB_ENV
|
||||||
|
git merge --abort
|
||||||
|
fi
|
||||||
|
|
||||||
|
- name: Merge Change to Target Branch
|
||||||
|
if: env.HAS_CONFLICTS == 'false'
|
||||||
|
run: |
|
||||||
|
git commit -m "Merge branch '$SOURCE_BRANCH' into $TARGET_BRANCH"
|
||||||
|
git push origin $TARGET_BRANCH
|
||||||
|
|
||||||
|
- name: Create PR to Target Branch
|
||||||
|
if: env.HAS_CONFLICTS == 'true'
|
||||||
|
run: |
|
||||||
|
# Replace 'username' with the actual GitHub username of the reviewer.
|
||||||
|
PR_URL=$(gh pr create --base $TARGET_BRANCH --head $SOURCE_BRANCH --title "sync: merge conflicts need to be resolved" --body "" --reviewer $REVIEWER)
|
||||||
|
echo "Pull Request created: $PR_URL"
|
310
.github/workflows/build-branch.yml
vendored
310
.github/workflows/build-branch.yml
vendored
@ -1,120 +1,123 @@
|
|||||||
|
|
||||||
name: Branch Build
|
name: Branch Build
|
||||||
|
|
||||||
on:
|
on:
|
||||||
pull_request:
|
workflow_dispatch:
|
||||||
types:
|
push:
|
||||||
- closed
|
|
||||||
branches:
|
branches:
|
||||||
- master
|
- master
|
||||||
- release
|
- preview
|
||||||
- qa
|
release:
|
||||||
- develop
|
types: [released, prereleased]
|
||||||
|
|
||||||
env:
|
env:
|
||||||
TARGET_BRANCH: ${{ github.event.pull_request.base.ref }}
|
TARGET_BRANCH: ${{ github.ref_name || github.event.release.target_commitish }}
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
branch_build_and_push:
|
branch_build_setup:
|
||||||
if: ${{ (github.event_name == 'pull_request' && github.event.action =='closed' && github.event.pull_request.merged == true) }}
|
|
||||||
name: Build-Push Web/Space/API/Proxy Docker Image
|
name: Build-Push Web/Space/API/Proxy Docker Image
|
||||||
runs-on: ubuntu-20.04
|
runs-on: ubuntu-latest
|
||||||
|
outputs:
|
||||||
|
gh_branch_name: ${{ steps.set_env_variables.outputs.TARGET_BRANCH }}
|
||||||
|
gh_buildx_driver: ${{ steps.set_env_variables.outputs.BUILDX_DRIVER }}
|
||||||
|
gh_buildx_version: ${{ steps.set_env_variables.outputs.BUILDX_VERSION }}
|
||||||
|
gh_buildx_platforms: ${{ steps.set_env_variables.outputs.BUILDX_PLATFORMS }}
|
||||||
|
gh_buildx_endpoint: ${{ steps.set_env_variables.outputs.BUILDX_ENDPOINT }}
|
||||||
|
build_frontend: ${{ steps.changed_files.outputs.frontend_any_changed }}
|
||||||
|
build_space: ${{ steps.changed_files.outputs.space_any_changed }}
|
||||||
|
build_backend: ${{ steps.changed_files.outputs.backend_any_changed }}
|
||||||
|
build_proxy: ${{ steps.changed_files.outputs.proxy_any_changed }}
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- name: Check out the repo
|
- id: set_env_variables
|
||||||
uses: actions/checkout@v3.3.0
|
name: Set Environment Variables
|
||||||
|
run: |
|
||||||
|
if [ "${{ env.TARGET_BRANCH }}" == "master" ] || [ "${{ github.event_name }}" == "release" ]; then
|
||||||
|
echo "BUILDX_DRIVER=cloud" >> $GITHUB_OUTPUT
|
||||||
|
echo "BUILDX_VERSION=lab:latest" >> $GITHUB_OUTPUT
|
||||||
|
echo "BUILDX_PLATFORMS=linux/amd64,linux/arm64" >> $GITHUB_OUTPUT
|
||||||
|
echo "BUILDX_ENDPOINT=makeplane/plane-dev" >> $GITHUB_OUTPUT
|
||||||
|
else
|
||||||
|
echo "BUILDX_DRIVER=docker-container" >> $GITHUB_OUTPUT
|
||||||
|
echo "BUILDX_VERSION=latest" >> $GITHUB_OUTPUT
|
||||||
|
echo "BUILDX_PLATFORMS=linux/amd64" >> $GITHUB_OUTPUT
|
||||||
|
echo "BUILDX_ENDPOINT=" >> $GITHUB_OUTPUT
|
||||||
|
fi
|
||||||
|
echo "TARGET_BRANCH=${{ env.TARGET_BRANCH }}" >> $GITHUB_OUTPUT
|
||||||
|
|
||||||
# - name: Set Target Branch Name on PR close
|
- id: checkout_files
|
||||||
# if: ${{ github.event_name == 'pull_request' && github.event.action =='closed' }}
|
name: Checkout Files
|
||||||
# run: echo "TARGET_BRANCH=${{ github.event.pull_request.base.ref }}" >> $GITHUB_ENV
|
uses: actions/checkout@v4
|
||||||
|
|
||||||
# - name: Set Target Branch Name on other than PR close
|
- name: Get changed files
|
||||||
# if: ${{ github.event_name == 'push' }}
|
id: changed_files
|
||||||
# run: echo "TARGET_BRANCH=${{ github.ref_name }}" >> $GITHUB_ENV
|
uses: tj-actions/changed-files@v42
|
||||||
|
|
||||||
- uses: ASzc/change-string-case-action@v2
|
|
||||||
id: gh_branch_upper_lower
|
|
||||||
with:
|
with:
|
||||||
string: ${{env.TARGET_BRANCH}}
|
files_yaml: |
|
||||||
|
frontend:
|
||||||
- uses: mad9000/actions-find-and-replace-string@2
|
- web/**
|
||||||
id: gh_branch_replace_slash
|
- packages/**
|
||||||
with:
|
- 'package.json'
|
||||||
source: ${{ steps.gh_branch_upper_lower.outputs.lowercase }}
|
- 'yarn.lock'
|
||||||
find: '/'
|
- 'tsconfig.json'
|
||||||
replace: '-'
|
- 'turbo.json'
|
||||||
|
space:
|
||||||
- uses: mad9000/actions-find-and-replace-string@2
|
- space/**
|
||||||
id: gh_branch_replace_dot
|
- packages/**
|
||||||
with:
|
- 'package.json'
|
||||||
source: ${{ steps.gh_branch_replace_slash.outputs.value }}
|
- 'yarn.lock'
|
||||||
find: '.'
|
- 'tsconfig.json'
|
||||||
replace: ''
|
- 'turbo.json'
|
||||||
|
backend:
|
||||||
- uses: mad9000/actions-find-and-replace-string@2
|
- apiserver/**
|
||||||
id: gh_branch_clean
|
proxy:
|
||||||
with:
|
- nginx/**
|
||||||
source: ${{ steps.gh_branch_replace_dot.outputs.value }}
|
|
||||||
find: '_'
|
|
||||||
replace: ''
|
|
||||||
- name: Uploading Proxy Source
|
|
||||||
uses: actions/upload-artifact@v3
|
|
||||||
with:
|
|
||||||
name: proxy-src-code
|
|
||||||
path: ./nginx
|
|
||||||
- name: Uploading Backend Source
|
|
||||||
uses: actions/upload-artifact@v3
|
|
||||||
with:
|
|
||||||
name: backend-src-code
|
|
||||||
path: ./apiserver
|
|
||||||
- name: Uploading Web Source
|
|
||||||
uses: actions/upload-artifact@v3
|
|
||||||
with:
|
|
||||||
name: web-src-code
|
|
||||||
path: |
|
|
||||||
./
|
|
||||||
!./apiserver
|
|
||||||
!./nginx
|
|
||||||
!./deploy
|
|
||||||
!./space
|
|
||||||
|
|
||||||
- name: Uploading Space Source
|
|
||||||
uses: actions/upload-artifact@v3
|
|
||||||
with:
|
|
||||||
name: space-src-code
|
|
||||||
path: |
|
|
||||||
./
|
|
||||||
!./apiserver
|
|
||||||
!./nginx
|
|
||||||
!./deploy
|
|
||||||
!./web
|
|
||||||
outputs:
|
|
||||||
gh_branch_name: ${{ steps.gh_branch_clean.outputs.value }}
|
|
||||||
|
|
||||||
branch_build_push_frontend:
|
branch_build_push_frontend:
|
||||||
|
if: ${{ needs.branch_build_setup.outputs.build_frontend == 'true' || github.event_name == 'workflow_dispatch' || github.event_name == 'release' || needs.branch_build_setup.outputs.gh_branch_name == 'master' }}
|
||||||
runs-on: ubuntu-20.04
|
runs-on: ubuntu-20.04
|
||||||
needs: [ branch_build_and_push ]
|
needs: [branch_build_setup]
|
||||||
|
env:
|
||||||
|
FRONTEND_TAG: ${{ secrets.DOCKERHUB_USERNAME }}/plane-frontend:${{ needs.branch_build_setup.outputs.gh_branch_name }}
|
||||||
|
TARGET_BRANCH: ${{ needs.branch_build_setup.outputs.gh_branch_name }}
|
||||||
|
BUILDX_DRIVER: ${{ needs.branch_build_setup.outputs.gh_buildx_driver }}
|
||||||
|
BUILDX_VERSION: ${{ needs.branch_build_setup.outputs.gh_buildx_version }}
|
||||||
|
BUILDX_PLATFORMS: ${{ needs.branch_build_setup.outputs.gh_buildx_platforms }}
|
||||||
|
BUILDX_ENDPOINT: ${{ needs.branch_build_setup.outputs.gh_buildx_endpoint }}
|
||||||
steps:
|
steps:
|
||||||
- name: Set up Docker Buildx
|
- name: Set Frontend Docker Tag
|
||||||
uses: docker/setup-buildx-action@v2.5.0
|
run: |
|
||||||
|
if [ "${{ github.event_name }}" == "release" ]; then
|
||||||
|
TAG=${{ secrets.DOCKERHUB_USERNAME }}/plane-frontend:stable,${{ secrets.DOCKERHUB_USERNAME }}/plane-frontend:${{ github.event.release.tag_name }}
|
||||||
|
elif [ "${{ env.TARGET_BRANCH }}" == "master" ]; then
|
||||||
|
TAG=${{ secrets.DOCKERHUB_USERNAME }}/plane-frontend:latest
|
||||||
|
else
|
||||||
|
TAG=${{ env.FRONTEND_TAG }}
|
||||||
|
fi
|
||||||
|
echo "FRONTEND_TAG=${TAG}" >> $GITHUB_ENV
|
||||||
|
|
||||||
- name: Login to Docker Hub
|
- name: Login to Docker Hub
|
||||||
uses: docker/login-action@v2.1.0
|
uses: docker/login-action@v3
|
||||||
with:
|
with:
|
||||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||||
- name: Downloading Web Source Code
|
|
||||||
uses: actions/download-artifact@v3
|
- name: Set up Docker Buildx
|
||||||
|
uses: docker/setup-buildx-action@v3
|
||||||
with:
|
with:
|
||||||
name: web-src-code
|
driver: ${{ env.BUILDX_DRIVER }}
|
||||||
|
version: ${{ env.BUILDX_VERSION }}
|
||||||
|
endpoint: ${{ env.BUILDX_ENDPOINT }}
|
||||||
|
|
||||||
|
- name: Check out the repo
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
|
||||||
- name: Build and Push Frontend to Docker Container Registry
|
- name: Build and Push Frontend to Docker Container Registry
|
||||||
uses: docker/build-push-action@v4.0.0
|
uses: docker/build-push-action@v5.1.0
|
||||||
with:
|
with:
|
||||||
context: .
|
context: .
|
||||||
file: ./web/Dockerfile.web
|
file: ./web/Dockerfile.web
|
||||||
platforms: linux/amd64
|
platforms: ${{ env.BUILDX_PLATFORMS }}
|
||||||
tags: ${{ secrets.DOCKERHUB_USERNAME }}/plane-frontend-private:${{ needs.branch_build_and_push.outputs.gh_branch_name }}
|
tags: ${{ env.FRONTEND_TAG }}
|
||||||
push: true
|
push: true
|
||||||
env:
|
env:
|
||||||
DOCKER_BUILDKIT: 1
|
DOCKER_BUILDKIT: 1
|
||||||
@ -122,29 +125,51 @@ jobs:
|
|||||||
DOCKER_PASSWORD: ${{ secrets.DOCKERHUB_TOKEN }}
|
DOCKER_PASSWORD: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||||
|
|
||||||
branch_build_push_space:
|
branch_build_push_space:
|
||||||
|
if: ${{ needs.branch_build_setup.outputs.build_space == 'true' || github.event_name == 'workflow_dispatch' || github.event_name == 'release' || needs.branch_build_setup.outputs.gh_branch_name == 'master' }}
|
||||||
runs-on: ubuntu-20.04
|
runs-on: ubuntu-20.04
|
||||||
needs: [ branch_build_and_push ]
|
needs: [branch_build_setup]
|
||||||
|
env:
|
||||||
|
SPACE_TAG: ${{ secrets.DOCKERHUB_USERNAME }}/plane-space:${{ needs.branch_build_setup.outputs.gh_branch_name }}
|
||||||
|
TARGET_BRANCH: ${{ needs.branch_build_setup.outputs.gh_branch_name }}
|
||||||
|
BUILDX_DRIVER: ${{ needs.branch_build_setup.outputs.gh_buildx_driver }}
|
||||||
|
BUILDX_VERSION: ${{ needs.branch_build_setup.outputs.gh_buildx_version }}
|
||||||
|
BUILDX_PLATFORMS: ${{ needs.branch_build_setup.outputs.gh_buildx_platforms }}
|
||||||
|
BUILDX_ENDPOINT: ${{ needs.branch_build_setup.outputs.gh_buildx_endpoint }}
|
||||||
steps:
|
steps:
|
||||||
- name: Set up Docker Buildx
|
- name: Set Space Docker Tag
|
||||||
uses: docker/setup-buildx-action@v2.5.0
|
run: |
|
||||||
|
if [ "${{ github.event_name }}" == "release" ]; then
|
||||||
|
TAG=${{ secrets.DOCKERHUB_USERNAME }}/plane-space:stable,${{ secrets.DOCKERHUB_USERNAME }}/plane-space:${{ github.event.release.tag_name }}
|
||||||
|
elif [ "${{ env.TARGET_BRANCH }}" == "master" ]; then
|
||||||
|
TAG=${{ secrets.DOCKERHUB_USERNAME }}/plane-space:latest
|
||||||
|
else
|
||||||
|
TAG=${{ env.SPACE_TAG }}
|
||||||
|
fi
|
||||||
|
echo "SPACE_TAG=${TAG}" >> $GITHUB_ENV
|
||||||
|
|
||||||
- name: Login to Docker Hub
|
- name: Login to Docker Hub
|
||||||
uses: docker/login-action@v2.1.0
|
uses: docker/login-action@v3
|
||||||
with:
|
with:
|
||||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||||
- name: Downloading Space Source Code
|
|
||||||
uses: actions/download-artifact@v3
|
- name: Set up Docker Buildx
|
||||||
|
uses: docker/setup-buildx-action@v3
|
||||||
with:
|
with:
|
||||||
name: space-src-code
|
driver: ${{ env.BUILDX_DRIVER }}
|
||||||
|
version: ${{ env.BUILDX_VERSION }}
|
||||||
|
endpoint: ${{ env.BUILDX_ENDPOINT }}
|
||||||
|
|
||||||
|
- name: Check out the repo
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
|
||||||
- name: Build and Push Space to Docker Hub
|
- name: Build and Push Space to Docker Hub
|
||||||
uses: docker/build-push-action@v4.0.0
|
uses: docker/build-push-action@v5.1.0
|
||||||
with:
|
with:
|
||||||
context: .
|
context: .
|
||||||
file: ./space/Dockerfile.space
|
file: ./space/Dockerfile.space
|
||||||
platforms: linux/amd64
|
platforms: ${{ env.BUILDX_PLATFORMS }}
|
||||||
tags: ${{ secrets.DOCKERHUB_USERNAME }}/plane-space-private:${{ needs.branch_build_and_push.outputs.gh_branch_name }}
|
tags: ${{ env.SPACE_TAG }}
|
||||||
push: true
|
push: true
|
||||||
env:
|
env:
|
||||||
DOCKER_BUILDKIT: 1
|
DOCKER_BUILDKIT: 1
|
||||||
@ -152,60 +177,103 @@ jobs:
|
|||||||
DOCKER_PASSWORD: ${{ secrets.DOCKERHUB_TOKEN }}
|
DOCKER_PASSWORD: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||||
|
|
||||||
branch_build_push_backend:
|
branch_build_push_backend:
|
||||||
|
if: ${{ needs.branch_build_setup.outputs.build_backend == 'true' || github.event_name == 'workflow_dispatch' || github.event_name == 'release' || needs.branch_build_setup.outputs.gh_branch_name == 'master' }}
|
||||||
runs-on: ubuntu-20.04
|
runs-on: ubuntu-20.04
|
||||||
needs: [ branch_build_and_push ]
|
needs: [branch_build_setup]
|
||||||
|
env:
|
||||||
|
BACKEND_TAG: ${{ secrets.DOCKERHUB_USERNAME }}/plane-backend:${{ needs.branch_build_setup.outputs.gh_branch_name }}
|
||||||
|
TARGET_BRANCH: ${{ needs.branch_build_setup.outputs.gh_branch_name }}
|
||||||
|
BUILDX_DRIVER: ${{ needs.branch_build_setup.outputs.gh_buildx_driver }}
|
||||||
|
BUILDX_VERSION: ${{ needs.branch_build_setup.outputs.gh_buildx_version }}
|
||||||
|
BUILDX_PLATFORMS: ${{ needs.branch_build_setup.outputs.gh_buildx_platforms }}
|
||||||
|
BUILDX_ENDPOINT: ${{ needs.branch_build_setup.outputs.gh_buildx_endpoint }}
|
||||||
steps:
|
steps:
|
||||||
- name: Set up Docker Buildx
|
- name: Set Backend Docker Tag
|
||||||
uses: docker/setup-buildx-action@v2.5.0
|
run: |
|
||||||
|
if [ "${{ github.event_name }}" == "release" ]; then
|
||||||
|
TAG=${{ secrets.DOCKERHUB_USERNAME }}/plane-backend:stable,${{ secrets.DOCKERHUB_USERNAME }}/plane-backend:${{ github.event.release.tag_name }}
|
||||||
|
elif [ "${{ env.TARGET_BRANCH }}" == "master" ]; then
|
||||||
|
TAG=${{ secrets.DOCKERHUB_USERNAME }}/plane-backend:latest
|
||||||
|
else
|
||||||
|
TAG=${{ env.BACKEND_TAG }}
|
||||||
|
fi
|
||||||
|
echo "BACKEND_TAG=${TAG}" >> $GITHUB_ENV
|
||||||
|
|
||||||
- name: Login to Docker Hub
|
- name: Login to Docker Hub
|
||||||
uses: docker/login-action@v2.1.0
|
uses: docker/login-action@v3
|
||||||
with:
|
with:
|
||||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||||
- name: Downloading Backend Source Code
|
|
||||||
uses: actions/download-artifact@v3
|
- name: Set up Docker Buildx
|
||||||
|
uses: docker/setup-buildx-action@v3
|
||||||
with:
|
with:
|
||||||
name: backend-src-code
|
driver: ${{ env.BUILDX_DRIVER }}
|
||||||
|
version: ${{ env.BUILDX_VERSION }}
|
||||||
|
endpoint: ${{ env.BUILDX_ENDPOINT }}
|
||||||
|
|
||||||
|
- name: Check out the repo
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
|
||||||
- name: Build and Push Backend to Docker Hub
|
- name: Build and Push Backend to Docker Hub
|
||||||
uses: docker/build-push-action@v4.0.0
|
uses: docker/build-push-action@v5.1.0
|
||||||
with:
|
with:
|
||||||
context: .
|
context: ./apiserver
|
||||||
file: ./Dockerfile.api
|
file: ./apiserver/Dockerfile.api
|
||||||
platforms: linux/amd64
|
platforms: ${{ env.BUILDX_PLATFORMS }}
|
||||||
push: true
|
push: true
|
||||||
tags: ${{ secrets.DOCKERHUB_USERNAME }}/plane-backend-private:${{ needs.branch_build_and_push.outputs.gh_branch_name }}
|
tags: ${{ env.BACKEND_TAG }}
|
||||||
env:
|
env:
|
||||||
DOCKER_BUILDKIT: 1
|
DOCKER_BUILDKIT: 1
|
||||||
DOCKER_USERNAME: ${{ secrets.DOCKERHUB_USERNAME }}
|
DOCKER_USERNAME: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||||
DOCKER_PASSWORD: ${{ secrets.DOCKERHUB_TOKEN }}
|
DOCKER_PASSWORD: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||||
|
|
||||||
branch_build_push_proxy:
|
branch_build_push_proxy:
|
||||||
|
if: ${{ needs.branch_build_setup.outputs.build_proxy == 'true' || github.event_name == 'workflow_dispatch' || github.event_name == 'release' || needs.branch_build_setup.outputs.gh_branch_name == 'master' }}
|
||||||
runs-on: ubuntu-20.04
|
runs-on: ubuntu-20.04
|
||||||
needs: [ branch_build_and_push ]
|
needs: [branch_build_setup]
|
||||||
|
env:
|
||||||
|
PROXY_TAG: ${{ secrets.DOCKERHUB_USERNAME }}/plane-proxy:${{ needs.branch_build_setup.outputs.gh_branch_name }}
|
||||||
|
TARGET_BRANCH: ${{ needs.branch_build_setup.outputs.gh_branch_name }}
|
||||||
|
BUILDX_DRIVER: ${{ needs.branch_build_setup.outputs.gh_buildx_driver }}
|
||||||
|
BUILDX_VERSION: ${{ needs.branch_build_setup.outputs.gh_buildx_version }}
|
||||||
|
BUILDX_PLATFORMS: ${{ needs.branch_build_setup.outputs.gh_buildx_platforms }}
|
||||||
|
BUILDX_ENDPOINT: ${{ needs.branch_build_setup.outputs.gh_buildx_endpoint }}
|
||||||
steps:
|
steps:
|
||||||
- name: Set up Docker Buildx
|
- name: Set Proxy Docker Tag
|
||||||
uses: docker/setup-buildx-action@v2.5.0
|
run: |
|
||||||
|
if [ "${{ github.event_name }}" == "release" ]; then
|
||||||
|
TAG=${{ secrets.DOCKERHUB_USERNAME }}/plane-proxy:stable,${{ secrets.DOCKERHUB_USERNAME }}/plane-proxy:${{ github.event.release.tag_name }}
|
||||||
|
elif [ "${{ env.TARGET_BRANCH }}" == "master" ]; then
|
||||||
|
TAG=${{ secrets.DOCKERHUB_USERNAME }}/plane-proxy:latest
|
||||||
|
else
|
||||||
|
TAG=${{ env.PROXY_TAG }}
|
||||||
|
fi
|
||||||
|
echo "PROXY_TAG=${TAG}" >> $GITHUB_ENV
|
||||||
|
|
||||||
- name: Login to Docker Hub
|
- name: Login to Docker Hub
|
||||||
uses: docker/login-action@v2.1.0
|
uses: docker/login-action@v3
|
||||||
with:
|
with:
|
||||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||||
|
|
||||||
- name: Downloading Proxy Source Code
|
- name: Set up Docker Buildx
|
||||||
uses: actions/download-artifact@v3
|
uses: docker/setup-buildx-action@v3
|
||||||
with:
|
with:
|
||||||
name: proxy-src-code
|
driver: ${{ env.BUILDX_DRIVER }}
|
||||||
|
version: ${{ env.BUILDX_VERSION }}
|
||||||
|
endpoint: ${{ env.BUILDX_ENDPOINT }}
|
||||||
|
|
||||||
|
- name: Check out the repo
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
|
||||||
- name: Build and Push Plane-Proxy to Docker Hub
|
- name: Build and Push Plane-Proxy to Docker Hub
|
||||||
uses: docker/build-push-action@v4.0.0
|
uses: docker/build-push-action@v5.1.0
|
||||||
with:
|
with:
|
||||||
context: .
|
context: ./nginx
|
||||||
file: ./Dockerfile
|
file: ./nginx/Dockerfile
|
||||||
platforms: linux/amd64
|
platforms: ${{ env.BUILDX_PLATFORMS }}
|
||||||
tags: ${{ secrets.DOCKERHUB_USERNAME }}/plane-proxy-private:${{ needs.branch_build_and_push.outputs.gh_branch_name }}
|
tags: ${{ env.PROXY_TAG }}
|
||||||
push: true
|
push: true
|
||||||
env:
|
env:
|
||||||
DOCKER_BUILDKIT: 1
|
DOCKER_BUILDKIT: 1
|
||||||
|
110
.github/workflows/build-test-pull-request.yml
vendored
110
.github/workflows/build-test-pull-request.yml
vendored
@ -1,48 +1,104 @@
|
|||||||
name: Build Pull Request Contents
|
name: Build and Lint on Pull Request
|
||||||
|
|
||||||
on:
|
on:
|
||||||
|
workflow_dispatch:
|
||||||
pull_request:
|
pull_request:
|
||||||
types: ["opened", "synchronize"]
|
types: ["opened", "synchronize"]
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
build-pull-request-contents:
|
get-changed-files:
|
||||||
name: Build Pull Request Contents
|
runs-on: ubuntu-latest
|
||||||
runs-on: ubuntu-20.04
|
outputs:
|
||||||
permissions:
|
apiserver_changed: ${{ steps.changed-files.outputs.apiserver_any_changed }}
|
||||||
pull-requests: read
|
web_changed: ${{ steps.changed-files.outputs.web_any_changed }}
|
||||||
|
space_changed: ${{ steps.changed-files.outputs.deploy_any_changed }}
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout Repository to Actions
|
- uses: actions/checkout@v3
|
||||||
uses: actions/checkout@v3.3.0
|
|
||||||
|
|
||||||
- name: Setup Node.js 18.x
|
|
||||||
uses: actions/setup-node@v2
|
|
||||||
with:
|
|
||||||
node-version: 18.x
|
|
||||||
cache: 'yarn'
|
|
||||||
|
|
||||||
- name: Get changed files
|
- name: Get changed files
|
||||||
id: changed-files
|
id: changed-files
|
||||||
uses: tj-actions/changed-files@v38
|
uses: tj-actions/changed-files@v41
|
||||||
with:
|
with:
|
||||||
files_yaml: |
|
files_yaml: |
|
||||||
apiserver:
|
apiserver:
|
||||||
- apiserver/**
|
- apiserver/**
|
||||||
web:
|
web:
|
||||||
- web/**
|
- web/**
|
||||||
|
- packages/**
|
||||||
|
- 'package.json'
|
||||||
|
- 'yarn.lock'
|
||||||
|
- 'tsconfig.json'
|
||||||
|
- 'turbo.json'
|
||||||
deploy:
|
deploy:
|
||||||
- space/**
|
- space/**
|
||||||
|
- packages/**
|
||||||
|
- 'package.json'
|
||||||
|
- 'yarn.lock'
|
||||||
|
- 'tsconfig.json'
|
||||||
|
- 'turbo.json'
|
||||||
|
|
||||||
- name: Build Plane's Main App
|
lint-apiserver:
|
||||||
if: steps.changed-files.outputs.web_any_changed == 'true'
|
needs: get-changed-files
|
||||||
run: |
|
runs-on: ubuntu-latest
|
||||||
yarn
|
if: needs.get-changed-files.outputs.apiserver_changed == 'true'
|
||||||
yarn build --filter=web
|
steps:
|
||||||
|
- uses: actions/checkout@v3
|
||||||
|
- name: Set up Python
|
||||||
|
uses: actions/setup-python@v4
|
||||||
|
with:
|
||||||
|
python-version: '3.x' # Specify the Python version you need
|
||||||
|
- name: Install Pylint
|
||||||
|
run: python -m pip install ruff
|
||||||
|
- name: Install Apiserver Dependencies
|
||||||
|
run: cd apiserver && pip install -r requirements.txt
|
||||||
|
- name: Lint apiserver
|
||||||
|
run: ruff check --fix apiserver
|
||||||
|
|
||||||
- name: Build Plane's Deploy App
|
lint-web:
|
||||||
if: steps.changed-files.outputs.deploy_any_changed == 'true'
|
needs: get-changed-files
|
||||||
run: |
|
if: needs.get-changed-files.outputs.web_changed == 'true'
|
||||||
yarn
|
runs-on: ubuntu-latest
|
||||||
yarn build --filter=space
|
steps:
|
||||||
|
- uses: actions/checkout@v3
|
||||||
|
- name: Setup Node.js
|
||||||
|
uses: actions/setup-node@v2
|
||||||
|
with:
|
||||||
|
node-version: 18.x
|
||||||
|
- run: yarn install
|
||||||
|
- run: yarn lint --filter=web
|
||||||
|
|
||||||
|
lint-space:
|
||||||
|
needs: get-changed-files
|
||||||
|
if: needs.get-changed-files.outputs.space_changed == 'true'
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v3
|
||||||
|
- name: Setup Node.js
|
||||||
|
uses: actions/setup-node@v2
|
||||||
|
with:
|
||||||
|
node-version: 18.x
|
||||||
|
- run: yarn install
|
||||||
|
- run: yarn lint --filter=space
|
||||||
|
|
||||||
|
build-web:
|
||||||
|
needs: lint-web
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v3
|
||||||
|
- name: Setup Node.js
|
||||||
|
uses: actions/setup-node@v2
|
||||||
|
with:
|
||||||
|
node-version: 18.x
|
||||||
|
- run: yarn install
|
||||||
|
- run: yarn build --filter=web
|
||||||
|
|
||||||
|
build-space:
|
||||||
|
needs: lint-space
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v3
|
||||||
|
- name: Setup Node.js
|
||||||
|
uses: actions/setup-node@v2
|
||||||
|
with:
|
||||||
|
node-version: 18.x
|
||||||
|
- run: yarn install
|
||||||
|
- run: yarn build --filter=space
|
||||||
|
45
.github/workflows/check-version.yml
vendored
Normal file
45
.github/workflows/check-version.yml
vendored
Normal file
@ -0,0 +1,45 @@
|
|||||||
|
name: Version Change Before Release
|
||||||
|
|
||||||
|
on:
|
||||||
|
pull_request:
|
||||||
|
branches:
|
||||||
|
- master
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
check-version:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- name: Checkout repository
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
with:
|
||||||
|
ref: ${{ github.head_ref }}
|
||||||
|
fetch-depth: 0
|
||||||
|
|
||||||
|
- name: Setup Node.js
|
||||||
|
uses: actions/setup-node@v4
|
||||||
|
with:
|
||||||
|
node-version: '18'
|
||||||
|
|
||||||
|
- name: Get PR Branch version
|
||||||
|
run: echo "PR_VERSION=$(node -p "require('./package.json').version")" >> $GITHUB_ENV
|
||||||
|
|
||||||
|
- name: Fetch base branch
|
||||||
|
run: git fetch origin master:master
|
||||||
|
|
||||||
|
- name: Get Master Branch version
|
||||||
|
run: |
|
||||||
|
git checkout master
|
||||||
|
echo "MASTER_VERSION=$(node -p "require('./package.json').version")" >> $GITHUB_ENV
|
||||||
|
|
||||||
|
- name: Get master branch version and compare
|
||||||
|
run: |
|
||||||
|
echo "Comparing versions: PR version is $PR_VERSION, Master version is $MASTER_VERSION"
|
||||||
|
if [ "$PR_VERSION" == "$MASTER_VERSION" ]; then
|
||||||
|
echo "Version in PR branch is the same as in master. Failing the CI."
|
||||||
|
exit 1
|
||||||
|
else
|
||||||
|
echo "Version check passed. Versions are different."
|
||||||
|
fi
|
||||||
|
env:
|
||||||
|
PR_VERSION: ${{ env.PR_VERSION }}
|
||||||
|
MASTER_VERSION: ${{ env.MASTER_VERSION }}
|
64
.github/workflows/codeql.yml
vendored
Normal file
64
.github/workflows/codeql.yml
vendored
Normal file
@ -0,0 +1,64 @@
|
|||||||
|
name: "CodeQL"
|
||||||
|
|
||||||
|
on:
|
||||||
|
workflow_dispatch:
|
||||||
|
push:
|
||||||
|
branches: ["develop", "preview", "master"]
|
||||||
|
pull_request:
|
||||||
|
branches: ["develop", "preview", "master"]
|
||||||
|
schedule:
|
||||||
|
- cron: "53 19 * * 5"
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
analyze:
|
||||||
|
name: Analyze
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
permissions:
|
||||||
|
actions: read
|
||||||
|
contents: read
|
||||||
|
security-events: write
|
||||||
|
|
||||||
|
strategy:
|
||||||
|
fail-fast: false
|
||||||
|
matrix:
|
||||||
|
language: ["python", "javascript"]
|
||||||
|
# CodeQL supports [ 'cpp', 'csharp', 'go', 'java', 'javascript', 'python', 'ruby' ]
|
||||||
|
# Use only 'java' to analyze code written in Java, Kotlin or both
|
||||||
|
# Use only 'javascript' to analyze code written in JavaScript, TypeScript or both
|
||||||
|
# Learn more about CodeQL language support at https://aka.ms/codeql-docs/language-support
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- name: Checkout repository
|
||||||
|
uses: actions/checkout@v3
|
||||||
|
|
||||||
|
# Initializes the CodeQL tools for scanning.
|
||||||
|
- name: Initialize CodeQL
|
||||||
|
uses: github/codeql-action/init@v2
|
||||||
|
with:
|
||||||
|
languages: ${{ matrix.language }}
|
||||||
|
# If you wish to specify custom queries, you can do so here or in a config file.
|
||||||
|
# By default, queries listed here will override any specified in a config file.
|
||||||
|
# Prefix the list here with "+" to use these queries and those in the config file.
|
||||||
|
|
||||||
|
# For more details on CodeQL's query packs, refer to: https://docs.github.com/en/code-security/code-scanning/automatically-scanning-your-code-for-vulnerabilities-and-errors/configuring-code-scanning#using-queries-in-ql-packs
|
||||||
|
# queries: security-extended,security-and-quality
|
||||||
|
|
||||||
|
# Autobuild attempts to build any compiled languages (C/C++, C#, Go, Java, or Swift).
|
||||||
|
# If this step fails, then you should remove it and run the build manually (see below)
|
||||||
|
- name: Autobuild
|
||||||
|
uses: github/codeql-action/autobuild@v2
|
||||||
|
|
||||||
|
# ℹ️ Command-line programs to run using the OS shell.
|
||||||
|
# 📚 See https://docs.github.com/en/actions/using-workflows/workflow-syntax-for-github-actions#jobsjob_idstepsrun
|
||||||
|
|
||||||
|
# If the Autobuild fails above, remove it and uncomment the following three lines.
|
||||||
|
# modify them (or add more) to build your code if your project, please refer to the EXAMPLE below for guidance.
|
||||||
|
|
||||||
|
# - run: |
|
||||||
|
# echo "Run, Build Application using script"
|
||||||
|
# ./location_of_script_within_repo/buildscript.sh
|
||||||
|
|
||||||
|
- name: Perform CodeQL Analysis
|
||||||
|
uses: github/codeql-action/analyze@v2
|
||||||
|
with:
|
||||||
|
category: "/language:${{matrix.language}}"
|
70
.github/workflows/create-sync-pr.yml
vendored
70
.github/workflows/create-sync-pr.yml
vendored
@ -1,42 +1,28 @@
|
|||||||
name: Create PR in Plane EE Repository to sync the changes
|
name: Create Sync Action
|
||||||
|
|
||||||
on:
|
on:
|
||||||
pull_request:
|
workflow_dispatch:
|
||||||
|
push:
|
||||||
branches:
|
branches:
|
||||||
- master
|
- preview
|
||||||
types:
|
|
||||||
- closed
|
env:
|
||||||
|
SOURCE_BRANCH_NAME: ${{ github.ref_name }}
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
create_pr:
|
sync_changes:
|
||||||
# Only run the job when a PR is merged
|
|
||||||
if: github.event.pull_request.merged == true
|
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
permissions:
|
permissions:
|
||||||
pull-requests: write
|
pull-requests: write
|
||||||
contents: read
|
contents: read
|
||||||
steps:
|
steps:
|
||||||
- name: Check SOURCE_REPO
|
|
||||||
id: check_repo
|
|
||||||
env:
|
|
||||||
SOURCE_REPO: ${{ secrets.SOURCE_REPO_NAME }}
|
|
||||||
run: |
|
|
||||||
echo "::set-output name=is_correct_repo::$(if [[ "$SOURCE_REPO" == "makeplane/plane" ]]; then echo 'true'; else echo 'false'; fi)"
|
|
||||||
|
|
||||||
- name: Checkout Code
|
- name: Checkout Code
|
||||||
if: steps.check_repo.outputs.is_correct_repo == 'true'
|
uses: actions/checkout@v4.1.1
|
||||||
uses: actions/checkout@v2
|
|
||||||
with:
|
with:
|
||||||
persist-credentials: false
|
persist-credentials: false
|
||||||
fetch-depth: 0
|
fetch-depth: 0
|
||||||
|
|
||||||
- name: Set up Branch Name
|
|
||||||
if: steps.check_repo.outputs.is_correct_repo == 'true'
|
|
||||||
run: |
|
|
||||||
echo "SOURCE_BRANCH_NAME=${{ github.head_ref }}" >> $GITHUB_ENV
|
|
||||||
|
|
||||||
- name: Setup GH CLI
|
- name: Setup GH CLI
|
||||||
if: steps.check_repo.outputs.is_correct_repo == 'true'
|
|
||||||
run: |
|
run: |
|
||||||
type -p curl >/dev/null || (sudo apt update && sudo apt install curl -y)
|
type -p curl >/dev/null || (sudo apt update && sudo apt install curl -y)
|
||||||
curl -fsSL https://cli.github.com/packages/githubcli-archive-keyring.gpg | sudo dd of=/usr/share/keyrings/githubcli-archive-keyring.gpg
|
curl -fsSL https://cli.github.com/packages/githubcli-archive-keyring.gpg | sudo dd of=/usr/share/keyrings/githubcli-archive-keyring.gpg
|
||||||
@ -45,35 +31,25 @@ jobs:
|
|||||||
sudo apt update
|
sudo apt update
|
||||||
sudo apt install gh -y
|
sudo apt install gh -y
|
||||||
|
|
||||||
- name: Create Pull Request
|
- name: Push Changes to Target Repo A
|
||||||
if: steps.check_repo.outputs.is_correct_repo == 'true'
|
|
||||||
env:
|
env:
|
||||||
GH_TOKEN: ${{ secrets.ACCESS_TOKEN }}
|
GH_TOKEN: ${{ secrets.ACCESS_TOKEN }}
|
||||||
run: |
|
run: |
|
||||||
TARGET_REPO="${{ secrets.TARGET_REPO_NAME }}"
|
TARGET_REPO="${{ secrets.TARGET_REPO_A }}"
|
||||||
TARGET_BRANCH="${{ secrets.TARGET_REPO_BRANCH }}"
|
TARGET_BRANCH="${{ secrets.TARGET_REPO_A_BRANCH_NAME }}"
|
||||||
SOURCE_BRANCH="${{ env.SOURCE_BRANCH_NAME }}"
|
SOURCE_BRANCH="${{ env.SOURCE_BRANCH_NAME }}"
|
||||||
|
|
||||||
git checkout $SOURCE_BRANCH
|
git checkout $SOURCE_BRANCH
|
||||||
git remote add target "https://$GH_TOKEN@github.com/$TARGET_REPO.git"
|
git remote add target-origin-a "https://$GH_TOKEN@github.com/$TARGET_REPO.git"
|
||||||
git push target $SOURCE_BRANCH:$SOURCE_BRANCH
|
git push target-origin-a $SOURCE_BRANCH:$TARGET_BRANCH
|
||||||
|
|
||||||
PR_TITLE="${{ github.event.pull_request.title }}"
|
- name: Push Changes to Target Repo B
|
||||||
PR_BODY="${{ github.event.pull_request.body }}"
|
env:
|
||||||
|
GH_TOKEN: ${{ secrets.ACCESS_TOKEN }}
|
||||||
|
run: |
|
||||||
|
TARGET_REPO="${{ secrets.TARGET_REPO_B }}"
|
||||||
|
TARGET_BRANCH="${{ secrets.TARGET_REPO_B_BRANCH_NAME }}"
|
||||||
|
SOURCE_BRANCH="${{ env.SOURCE_BRANCH_NAME }}"
|
||||||
|
|
||||||
# Remove double quotes
|
git remote add target-origin-b "https://$GH_TOKEN@github.com/$TARGET_REPO.git"
|
||||||
PR_TITLE_CLEANED="${PR_TITLE//\"/}"
|
git push target-origin-b $SOURCE_BRANCH:$TARGET_BRANCH
|
||||||
PR_BODY_CLEANED="${PR_BODY//\"/}"
|
|
||||||
|
|
||||||
# Construct PR_BODY_CONTENT using a here-document
|
|
||||||
PR_BODY_CONTENT=$(cat <<EOF
|
|
||||||
$PR_BODY_CLEANED
|
|
||||||
EOF
|
|
||||||
)
|
|
||||||
|
|
||||||
gh pr create \
|
|
||||||
--base $TARGET_BRANCH \
|
|
||||||
--head $SOURCE_BRANCH \
|
|
||||||
--title "[SYNC] $PR_TITLE_CLEANED" \
|
|
||||||
--body "$PR_BODY_CONTENT" \
|
|
||||||
--repo $TARGET_REPO
|
|
||||||
|
199
.github/workflows/feature-deployment.yml
vendored
Normal file
199
.github/workflows/feature-deployment.yml
vendored
Normal file
@ -0,0 +1,199 @@
|
|||||||
|
name: Feature Preview
|
||||||
|
|
||||||
|
on:
|
||||||
|
workflow_dispatch:
|
||||||
|
inputs:
|
||||||
|
web-build:
|
||||||
|
required: false
|
||||||
|
description: 'Build Web'
|
||||||
|
type: boolean
|
||||||
|
default: true
|
||||||
|
space-build:
|
||||||
|
required: false
|
||||||
|
description: 'Build Space'
|
||||||
|
type: boolean
|
||||||
|
default: false
|
||||||
|
|
||||||
|
env:
|
||||||
|
BUILD_WEB: ${{ github.event.inputs.web-build }}
|
||||||
|
BUILD_SPACE: ${{ github.event.inputs.space-build }}
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
setup-feature-build:
|
||||||
|
name: Feature Build Setup
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- name: Checkout
|
||||||
|
run: |
|
||||||
|
echo "BUILD_WEB=$BUILD_WEB"
|
||||||
|
echo "BUILD_SPACE=$BUILD_SPACE"
|
||||||
|
outputs:
|
||||||
|
web-build: ${{ env.BUILD_WEB}}
|
||||||
|
space-build: ${{env.BUILD_SPACE}}
|
||||||
|
|
||||||
|
feature-build-web:
|
||||||
|
if: ${{ needs.setup-feature-build.outputs.web-build == 'true' }}
|
||||||
|
needs: setup-feature-build
|
||||||
|
name: Feature Build Web
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
env:
|
||||||
|
AWS_ACCESS_KEY_ID: ${{ vars.FEATURE_PREVIEW_AWS_ACCESS_KEY_ID }}
|
||||||
|
AWS_SECRET_ACCESS_KEY: ${{ secrets.FEATURE_PREVIEW_AWS_SECRET_ACCESS_KEY }}
|
||||||
|
AWS_BUCKET: ${{ vars.FEATURE_PREVIEW_AWS_BUCKET }}
|
||||||
|
NEXT_PUBLIC_API_BASE_URL: ${{ vars.FEATURE_PREVIEW_NEXT_PUBLIC_API_BASE_URL }}
|
||||||
|
steps:
|
||||||
|
- name: Set up Node.js
|
||||||
|
uses: actions/setup-node@v4
|
||||||
|
with:
|
||||||
|
node-version: '18'
|
||||||
|
- name: Install AWS cli
|
||||||
|
run: |
|
||||||
|
sudo apt-get update
|
||||||
|
sudo apt-get install -y python3-pip
|
||||||
|
pip3 install awscli
|
||||||
|
- name: Checkout
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
with:
|
||||||
|
path: plane
|
||||||
|
- name: Install Dependencies
|
||||||
|
run: |
|
||||||
|
cd $GITHUB_WORKSPACE/plane
|
||||||
|
yarn install
|
||||||
|
- name: Build Web
|
||||||
|
id: build-web
|
||||||
|
run: |
|
||||||
|
cd $GITHUB_WORKSPACE/plane
|
||||||
|
yarn build --filter=web
|
||||||
|
cd $GITHUB_WORKSPACE
|
||||||
|
|
||||||
|
TAR_NAME="web.tar.gz"
|
||||||
|
tar -czf $TAR_NAME ./plane
|
||||||
|
|
||||||
|
FILE_EXPIRY=$(date -u -d "+2 days" +"%Y-%m-%dT%H:%M:%SZ")
|
||||||
|
aws s3 cp $TAR_NAME s3://${{ env.AWS_BUCKET }}/${{github.sha}}/$TAR_NAME --expires $FILE_EXPIRY
|
||||||
|
|
||||||
|
feature-build-space:
|
||||||
|
if: ${{ needs.setup-feature-build.outputs.space-build == 'true' }}
|
||||||
|
needs: setup-feature-build
|
||||||
|
name: Feature Build Space
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
env:
|
||||||
|
AWS_ACCESS_KEY_ID: ${{ vars.FEATURE_PREVIEW_AWS_ACCESS_KEY_ID }}
|
||||||
|
AWS_SECRET_ACCESS_KEY: ${{ secrets.FEATURE_PREVIEW_AWS_SECRET_ACCESS_KEY }}
|
||||||
|
AWS_BUCKET: ${{ vars.FEATURE_PREVIEW_AWS_BUCKET }}
|
||||||
|
NEXT_PUBLIC_DEPLOY_WITH_NGINX: 1
|
||||||
|
NEXT_PUBLIC_API_BASE_URL: ${{ vars.FEATURE_PREVIEW_NEXT_PUBLIC_API_BASE_URL }}
|
||||||
|
outputs:
|
||||||
|
do-build: ${{ needs.setup-feature-build.outputs.space-build }}
|
||||||
|
s3-url: ${{ steps.build-space.outputs.S3_PRESIGNED_URL }}
|
||||||
|
steps:
|
||||||
|
- name: Set up Node.js
|
||||||
|
uses: actions/setup-node@v4
|
||||||
|
with:
|
||||||
|
node-version: '18'
|
||||||
|
- name: Install AWS cli
|
||||||
|
run: |
|
||||||
|
sudo apt-get update
|
||||||
|
sudo apt-get install -y python3-pip
|
||||||
|
pip3 install awscli
|
||||||
|
- name: Checkout
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
with:
|
||||||
|
path: plane
|
||||||
|
- name: Install Dependencies
|
||||||
|
run: |
|
||||||
|
cd $GITHUB_WORKSPACE/plane
|
||||||
|
yarn install
|
||||||
|
- name: Build Space
|
||||||
|
id: build-space
|
||||||
|
run: |
|
||||||
|
cd $GITHUB_WORKSPACE/plane
|
||||||
|
yarn build --filter=space
|
||||||
|
cd $GITHUB_WORKSPACE
|
||||||
|
|
||||||
|
TAR_NAME="space.tar.gz"
|
||||||
|
tar -czf $TAR_NAME ./plane
|
||||||
|
|
||||||
|
FILE_EXPIRY=$(date -u -d "+2 days" +"%Y-%m-%dT%H:%M:%SZ")
|
||||||
|
aws s3 cp $TAR_NAME s3://${{ env.AWS_BUCKET }}/${{github.sha}}/$TAR_NAME --expires $FILE_EXPIRY
|
||||||
|
|
||||||
|
feature-deploy:
|
||||||
|
if: ${{ always() && (needs.setup-feature-build.outputs.web-build == 'true' || needs.setup-feature-build.outputs.space-build == 'true') }}
|
||||||
|
needs: [feature-build-web, feature-build-space]
|
||||||
|
name: Feature Deploy
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
env:
|
||||||
|
AWS_ACCESS_KEY_ID: ${{ vars.FEATURE_PREVIEW_AWS_ACCESS_KEY_ID }}
|
||||||
|
AWS_SECRET_ACCESS_KEY: ${{ secrets.FEATURE_PREVIEW_AWS_SECRET_ACCESS_KEY }}
|
||||||
|
AWS_BUCKET: ${{ vars.FEATURE_PREVIEW_AWS_BUCKET }}
|
||||||
|
KUBE_CONFIG_FILE: ${{ secrets.FEATURE_PREVIEW_KUBE_CONFIG }}
|
||||||
|
steps:
|
||||||
|
- name: Install AWS cli
|
||||||
|
run: |
|
||||||
|
sudo apt-get update
|
||||||
|
sudo apt-get install -y python3-pip
|
||||||
|
pip3 install awscli
|
||||||
|
- name: Tailscale
|
||||||
|
uses: tailscale/github-action@v2
|
||||||
|
with:
|
||||||
|
oauth-client-id: ${{ secrets.TAILSCALE_OAUTH_CLIENT_ID }}
|
||||||
|
oauth-secret: ${{ secrets.TAILSCALE_OAUTH_SECRET }}
|
||||||
|
tags: tag:ci
|
||||||
|
- name: Kubectl Setup
|
||||||
|
run: |
|
||||||
|
curl -LO "https://dl.k8s.io/release/${{ vars.FEATURE_PREVIEW_KUBE_VERSION }}/bin/linux/amd64/kubectl"
|
||||||
|
chmod +x kubectl
|
||||||
|
|
||||||
|
mkdir -p ~/.kube
|
||||||
|
echo "$KUBE_CONFIG_FILE" > ~/.kube/config
|
||||||
|
chmod 600 ~/.kube/config
|
||||||
|
- name: HELM Setup
|
||||||
|
run: |
|
||||||
|
curl -fsSL -o get_helm.sh https://raw.githubusercontent.com/helm/helm/main/scripts/get-helm-3
|
||||||
|
chmod 700 get_helm.sh
|
||||||
|
./get_helm.sh
|
||||||
|
- name: App Deploy
|
||||||
|
run: |
|
||||||
|
WEB_S3_URL=""
|
||||||
|
if [ ${{ env.BUILD_WEB }} == true ]; then
|
||||||
|
WEB_S3_URL=$(aws s3 presign s3://${{ vars.FEATURE_PREVIEW_AWS_BUCKET }}/${{github.sha}}/web.tar.gz --expires-in 3600)
|
||||||
|
fi
|
||||||
|
|
||||||
|
SPACE_S3_URL=""
|
||||||
|
if [ ${{ env.BUILD_SPACE }} == true ]; then
|
||||||
|
SPACE_S3_URL=$(aws s3 presign s3://${{ vars.FEATURE_PREVIEW_AWS_BUCKET }}/${{github.sha}}/space.tar.gz --expires-in 3600)
|
||||||
|
fi
|
||||||
|
|
||||||
|
if [ ${{ env.BUILD_WEB }} == true ] || [ ${{ env.BUILD_SPACE }} == true ]; then
|
||||||
|
|
||||||
|
helm --kube-insecure-skip-tls-verify repo add feature-preview ${{ vars.FEATURE_PREVIEW_HELM_CHART_URL }}
|
||||||
|
|
||||||
|
APP_NAMESPACE="${{ vars.FEATURE_PREVIEW_NAMESPACE }}"
|
||||||
|
DEPLOY_SCRIPT_URL="${{ vars.FEATURE_PREVIEW_DEPLOY_SCRIPT_URL }}"
|
||||||
|
|
||||||
|
METADATA=$(helm --kube-insecure-skip-tls-verify install feature-preview/${{ vars.FEATURE_PREVIEW_HELM_CHART_NAME }} \
|
||||||
|
--generate-name \
|
||||||
|
--namespace $APP_NAMESPACE \
|
||||||
|
--set ingress.primaryDomain=${{vars.FEATURE_PREVIEW_PRIMARY_DOMAIN || 'feature.plane.tools' }} \
|
||||||
|
--set web.image=${{vars.FEATURE_PREVIEW_DOCKER_BASE}} \
|
||||||
|
--set web.enabled=${{ env.BUILD_WEB || false }} \
|
||||||
|
--set web.artifact_url=$WEB_S3_URL \
|
||||||
|
--set space.image=${{vars.FEATURE_PREVIEW_DOCKER_BASE}} \
|
||||||
|
--set space.enabled=${{ env.BUILD_SPACE || false }} \
|
||||||
|
--set space.artifact_url=$SPACE_S3_URL \
|
||||||
|
--set shared_config.deploy_script_url=$DEPLOY_SCRIPT_URL \
|
||||||
|
--set shared_config.api_base_url=${{vars.FEATURE_PREVIEW_NEXT_PUBLIC_API_BASE_URL}} \
|
||||||
|
--output json \
|
||||||
|
--timeout 1000s)
|
||||||
|
|
||||||
|
APP_NAME=$(echo $METADATA | jq -r '.name')
|
||||||
|
|
||||||
|
INGRESS_HOSTNAME=$(kubectl get ingress -n feature-builds --insecure-skip-tls-verify \
|
||||||
|
-o jsonpath='{.items[?(@.metadata.annotations.meta\.helm\.sh\/release-name=="'$APP_NAME'")]}' | \
|
||||||
|
jq -r '.spec.rules[0].host')
|
||||||
|
|
||||||
|
echo "****************************************"
|
||||||
|
echo "APP NAME ::: $APP_NAME"
|
||||||
|
echo "INGRESS HOSTNAME ::: $INGRESS_HOSTNAME"
|
||||||
|
echo "****************************************"
|
||||||
|
fi
|
107
.github/workflows/update-docker-images.yml
vendored
107
.github/workflows/update-docker-images.yml
vendored
@ -1,107 +0,0 @@
|
|||||||
name: Update Docker Images for Plane on Release
|
|
||||||
|
|
||||||
on:
|
|
||||||
release:
|
|
||||||
types: [released, prereleased]
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
build_push_backend:
|
|
||||||
name: Build and Push Api Server Docker Image
|
|
||||||
runs-on: ubuntu-20.04
|
|
||||||
|
|
||||||
steps:
|
|
||||||
- name: Check out the repo
|
|
||||||
uses: actions/checkout@v3.3.0
|
|
||||||
|
|
||||||
- name: Set up Docker Buildx
|
|
||||||
uses: docker/setup-buildx-action@v2.5.0
|
|
||||||
|
|
||||||
- name: Login to Docker Hub
|
|
||||||
uses: docker/login-action@v2.1.0
|
|
||||||
with:
|
|
||||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
|
||||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
|
||||||
|
|
||||||
- name: Extract metadata (tags, labels) for Docker (Docker Hub) from Github Release
|
|
||||||
id: metaFrontend
|
|
||||||
uses: docker/metadata-action@v4.3.0
|
|
||||||
with:
|
|
||||||
images: ${{ secrets.DOCKERHUB_USERNAME }}/plane-frontend
|
|
||||||
tags: |
|
|
||||||
type=ref,event=tag
|
|
||||||
|
|
||||||
- name: Extract metadata (tags, labels) for Docker (Docker Hub) from Github Release
|
|
||||||
id: metaBackend
|
|
||||||
uses: docker/metadata-action@v4.3.0
|
|
||||||
with:
|
|
||||||
images: ${{ secrets.DOCKERHUB_USERNAME }}/plane-backend
|
|
||||||
tags: |
|
|
||||||
type=ref,event=tag
|
|
||||||
|
|
||||||
- name: Extract metadata (tags, labels) for Docker (Docker Hub) from Github Release
|
|
||||||
id: metaSpace
|
|
||||||
uses: docker/metadata-action@v4.3.0
|
|
||||||
with:
|
|
||||||
images: ${{ secrets.DOCKERHUB_USERNAME }}/plane-space
|
|
||||||
tags: |
|
|
||||||
type=ref,event=tag
|
|
||||||
|
|
||||||
- name: Extract metadata (tags, labels) for Docker (Docker Hub) from Github Release
|
|
||||||
id: metaProxy
|
|
||||||
uses: docker/metadata-action@v4.3.0
|
|
||||||
with:
|
|
||||||
images: ${{ secrets.DOCKERHUB_USERNAME }}/plane-proxy
|
|
||||||
tags: |
|
|
||||||
type=ref,event=tag
|
|
||||||
|
|
||||||
- name: Build and Push Frontend to Docker Container Registry
|
|
||||||
uses: docker/build-push-action@v4.0.0
|
|
||||||
with:
|
|
||||||
context: .
|
|
||||||
file: ./web/Dockerfile.web
|
|
||||||
platforms: linux/amd64
|
|
||||||
tags: ${{ steps.metaFrontend.outputs.tags }}
|
|
||||||
push: true
|
|
||||||
env:
|
|
||||||
DOCKER_BUILDKIT: 1
|
|
||||||
DOCKER_USERNAME: ${{ secrets.DOCKERHUB_USERNAME }}
|
|
||||||
DOCKET_PASSWORD: ${{ secrets.DOCKERHUB_TOKEN }}
|
|
||||||
|
|
||||||
- name: Build and Push Backend to Docker Hub
|
|
||||||
uses: docker/build-push-action@v4.0.0
|
|
||||||
with:
|
|
||||||
context: ./apiserver
|
|
||||||
file: ./apiserver/Dockerfile.api
|
|
||||||
platforms: linux/amd64
|
|
||||||
push: true
|
|
||||||
tags: ${{ steps.metaBackend.outputs.tags }}
|
|
||||||
env:
|
|
||||||
DOCKER_BUILDKIT: 1
|
|
||||||
DOCKER_USERNAME: ${{ secrets.DOCKERHUB_USERNAME }}
|
|
||||||
DOCKET_PASSWORD: ${{ secrets.DOCKERHUB_TOKEN }}
|
|
||||||
|
|
||||||
- name: Build and Push Plane-Deploy to Docker Hub
|
|
||||||
uses: docker/build-push-action@v4.0.0
|
|
||||||
with:
|
|
||||||
context: .
|
|
||||||
file: ./space/Dockerfile.space
|
|
||||||
platforms: linux/amd64
|
|
||||||
push: true
|
|
||||||
tags: ${{ steps.metaSpace.outputs.tags }}
|
|
||||||
env:
|
|
||||||
DOCKER_BUILDKIT: 1
|
|
||||||
DOCKER_USERNAME: ${{ secrets.DOCKERHUB_USERNAME }}
|
|
||||||
DOCKET_PASSWORD: ${{ secrets.DOCKERHUB_TOKEN }}
|
|
||||||
|
|
||||||
- name: Build and Push Plane-Proxy to Docker Hub
|
|
||||||
uses: docker/build-push-action@v4.0.0
|
|
||||||
with:
|
|
||||||
context: ./nginx
|
|
||||||
file: ./nginx/Dockerfile
|
|
||||||
platforms: linux/amd64
|
|
||||||
push: true
|
|
||||||
tags: ${{ steps.metaProxy.outputs.tags }}
|
|
||||||
env:
|
|
||||||
DOCKER_BUILDKIT: 1
|
|
||||||
DOCKER_USERNAME: ${{ secrets.DOCKERHUB_USERNAME }}
|
|
||||||
DOCKET_PASSWORD: ${{ secrets.DOCKERHUB_TOKEN }}
|
|
6
.gitignore
vendored
6
.gitignore
vendored
@ -1,3 +1,7 @@
|
|||||||
|
pg_data
|
||||||
|
redis_data
|
||||||
|
minio_data
|
||||||
|
|
||||||
node_modules
|
node_modules
|
||||||
.next
|
.next
|
||||||
|
|
||||||
@ -51,6 +55,7 @@ staticfiles
|
|||||||
mediafiles
|
mediafiles
|
||||||
.env
|
.env
|
||||||
.DS_Store
|
.DS_Store
|
||||||
|
logs/
|
||||||
|
|
||||||
node_modules/
|
node_modules/
|
||||||
assets/dist/
|
assets/dist/
|
||||||
@ -79,3 +84,4 @@ pnpm-workspace.yaml
|
|||||||
tmp/
|
tmp/
|
||||||
## packages
|
## packages
|
||||||
dist
|
dist
|
||||||
|
.temp/
|
||||||
|
@ -33,8 +33,8 @@ The backend is a django project which is kept inside apiserver
|
|||||||
1. Clone the repo
|
1. Clone the repo
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
git clone https://github.com/makeplane/plane
|
git clone https://github.com/makeplane/plane.git [folder-name]
|
||||||
cd plane
|
cd [folder-name]
|
||||||
chmod +x setup.sh
|
chmod +x setup.sh
|
||||||
```
|
```
|
||||||
|
|
||||||
@ -44,32 +44,10 @@ chmod +x setup.sh
|
|||||||
./setup.sh
|
./setup.sh
|
||||||
```
|
```
|
||||||
|
|
||||||
3. Define `NEXT_PUBLIC_API_BASE_URL=http://localhost` in **web/.env** and **space/.env** file
|
3. Start the containers
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
echo "\nNEXT_PUBLIC_API_BASE_URL=http://localhost\n" >> ./web/.env
|
docker compose -f docker-compose-local.yml up
|
||||||
```
|
|
||||||
|
|
||||||
```bash
|
|
||||||
echo "\nNEXT_PUBLIC_API_BASE_URL=http://localhost\n" >> ./space/.env
|
|
||||||
```
|
|
||||||
|
|
||||||
4. Run Docker compose up
|
|
||||||
|
|
||||||
```bash
|
|
||||||
docker compose up -d
|
|
||||||
```
|
|
||||||
|
|
||||||
5. Install dependencies
|
|
||||||
|
|
||||||
```bash
|
|
||||||
yarn install
|
|
||||||
```
|
|
||||||
|
|
||||||
6. Run the web app in development mode
|
|
||||||
|
|
||||||
```bash
|
|
||||||
yarn dev
|
|
||||||
```
|
```
|
||||||
|
|
||||||
## Missing a Feature?
|
## Missing a Feature?
|
||||||
|
210
Dockerfile
210
Dockerfile
@ -1,132 +1,110 @@
|
|||||||
FROM node:18-alpine AS builder
|
FROM git.orionkindel.com/tpl/asdf:bookworm AS system
|
||||||
RUN apk add --no-cache libc6-compat
|
|
||||||
# Set working directory
|
|
||||||
WORKDIR /app
|
|
||||||
ENV NEXT_PUBLIC_API_BASE_URL=http://NEXT_PUBLIC_API_BASE_URL_PLACEHOLDER
|
|
||||||
|
|
||||||
RUN yarn global add turbo
|
ARG S6_OVERLAY_VERSION=3.1.6.2
|
||||||
RUN apk add tree
|
|
||||||
COPY . .
|
|
||||||
|
|
||||||
RUN turbo prune --scope=app --scope=plane-deploy --docker
|
ADD https://github.com/just-containers/s6-overlay/releases/download/v${S6_OVERLAY_VERSION}/s6-overlay-noarch.tar.xz /tmp
|
||||||
CMD tree -I node_modules/
|
RUN tar -C / -Jxpf /tmp/s6-overlay-noarch.tar.xz
|
||||||
|
|
||||||
# Add lockfile and package.json's of isolated subworkspace
|
ADD https://github.com/just-containers/s6-overlay/releases/download/v${S6_OVERLAY_VERSION}/s6-overlay-x86_64.tar.xz /tmp
|
||||||
FROM node:18-alpine AS installer
|
RUN tar -C / -Jxpf /tmp/s6-overlay-x86_64.tar.xz
|
||||||
|
|
||||||
RUN apk add --no-cache libc6-compat
|
RUN apt-get update
|
||||||
WORKDIR /app
|
RUN apt-get install -y \
|
||||||
ARG NEXT_PUBLIC_API_BASE_URL=http://localhost:8000
|
build-essential \
|
||||||
# First install the dependencies (as they change less often)
|
zlib1g-dev \
|
||||||
COPY .gitignore .gitignore
|
libncurses5-dev \
|
||||||
COPY --from=builder /app/out/json/ .
|
libgdbm-dev \
|
||||||
COPY --from=builder /app/out/yarn.lock ./yarn.lock
|
libnss3-dev \
|
||||||
RUN yarn install
|
libssl-dev \
|
||||||
|
libreadline-dev \
|
||||||
|
libffi-dev \
|
||||||
|
libsqlite3-dev \
|
||||||
|
wget \
|
||||||
|
libbz2-dev \
|
||||||
|
uuid-dev \
|
||||||
|
nginx \
|
||||||
|
procps
|
||||||
|
|
||||||
# # Build the project
|
RUN asdf plugin add nodejs \
|
||||||
COPY --from=builder /app/out/full/ .
|
&& asdf plugin add python \
|
||||||
COPY turbo.json turbo.json
|
&& asdf plugin add postgres
|
||||||
COPY replace-env-vars.sh /usr/local/bin/
|
|
||||||
USER root
|
|
||||||
RUN chmod +x /usr/local/bin/replace-env-vars.sh
|
|
||||||
|
|
||||||
RUN yarn turbo run build
|
RUN --mount=type=cache,target=/.asdf-build \
|
||||||
|
export ASDF_DOWNLOAD_PATH=/.asdf-build \
|
||||||
|
&& export TMPDIR=/.asdf-build \
|
||||||
|
&& export POSTGRES_SKIP_INITDB=y \
|
||||||
|
&& asdf install nodejs 20.9.0 \
|
||||||
|
&& asdf install python 3.11.1 \
|
||||||
|
&& asdf install postgres 15.3
|
||||||
|
|
||||||
ENV NEXT_PUBLIC_API_BASE_URL=$NEXT_PUBLIC_API_BASE_URL \
|
RUN asdf global nodejs 20.9.0 \
|
||||||
BUILT_NEXT_PUBLIC_API_BASE_URL=$NEXT_PUBLIC_API_BASE_URL
|
&& asdf global postgres 15.3 \
|
||||||
|
&& asdf global python 3.11.1
|
||||||
|
|
||||||
RUN /usr/local/bin/replace-env-vars.sh http://NEXT_PUBLIC_WEBAPP_URL_PLACEHOLDER ${NEXT_PUBLIC_API_BASE_URL}
|
RUN useradd -m postgres && passwd -d postgres
|
||||||
|
|
||||||
FROM python:3.11.1-alpine3.17 AS backend
|
ADD https://dl.min.io/server/minio/release/linux-amd64/minio /usr/bin
|
||||||
|
RUN chmod +x /usr/bin/minio
|
||||||
|
|
||||||
# set environment variables
|
RUN set -eo pipefail; \
|
||||||
ENV PYTHONDONTWRITEBYTECODE 1
|
curl -fsSL https://packages.redis.io/gpg | gpg --dearmor -o /usr/share/keyrings/redis-archive-keyring.gpg; \
|
||||||
ENV PYTHONUNBUFFERED 1
|
echo "deb [signed-by=/usr/share/keyrings/redis-archive-keyring.gpg] https://packages.redis.io/deb bookworm main" | tee /etc/apt/sources.list.d/redis.list; \
|
||||||
|
apt-get update; \
|
||||||
|
apt-get install -y redis
|
||||||
|
|
||||||
|
FROM system AS next_prebuild
|
||||||
|
|
||||||
|
RUN npm i -g yarn
|
||||||
|
RUN --mount=type=cache,target=/.yarn-cache \
|
||||||
|
yarn config set cache-folder /.yarn-cache
|
||||||
|
|
||||||
|
COPY package.json turbo.json yarn.lock app.json ./
|
||||||
|
COPY packages packages
|
||||||
|
COPY web web
|
||||||
|
COPY space space
|
||||||
|
|
||||||
|
RUN --mount=type=cache,target=/.yarn-cache \
|
||||||
|
yarn install
|
||||||
|
|
||||||
|
FROM next_prebuild AS next_build
|
||||||
|
|
||||||
|
RUN --mount=type=cache,target=/.yarn-cache \
|
||||||
|
--mount=type=cache,target=/web/.next \
|
||||||
|
--mount=type=cache,target=/space/.next \
|
||||||
|
yarn build && \
|
||||||
|
cp -R /web/.next /web/_next && \
|
||||||
|
cp -R /space/.next /space/_next
|
||||||
|
|
||||||
|
RUN mv /web/_next /web/.next && \
|
||||||
|
mv /space/_next /space/.next && \
|
||||||
|
cp -R /web/.next/standalone/web/* /web/ && \
|
||||||
|
cp -R /space/.next/standalone/space/* /space/
|
||||||
|
|
||||||
|
FROM next_build AS api_build
|
||||||
|
ENV PYTHONDONTWRITEBYTECODE=1
|
||||||
|
ENV PYTHONUNBUFFERED=1
|
||||||
ENV PIP_DISABLE_PIP_VERSION_CHECK=1
|
ENV PIP_DISABLE_PIP_VERSION_CHECK=1
|
||||||
ENV DJANGO_SETTINGS_MODULE plane.settings.production
|
|
||||||
ENV DOCKERIZED 1
|
|
||||||
|
|
||||||
WORKDIR /code
|
COPY apiserver apiserver
|
||||||
|
RUN --mount=type=cache,target=/root/.cache/pip \
|
||||||
|
cd /apiserver \
|
||||||
|
&& pip install -r requirements.txt --compile
|
||||||
|
|
||||||
RUN apk --no-cache add \
|
FROM api_build AS s6
|
||||||
"libpq~=15" \
|
|
||||||
"libxslt~=1.1" \
|
|
||||||
"nodejs-current~=19" \
|
|
||||||
"xmlsec~=1.2" \
|
|
||||||
"nginx" \
|
|
||||||
"nodejs" \
|
|
||||||
"npm" \
|
|
||||||
"supervisor"
|
|
||||||
|
|
||||||
COPY apiserver/requirements.txt ./
|
COPY docker/etc/ /etc/
|
||||||
COPY apiserver/requirements ./requirements
|
|
||||||
RUN apk add --no-cache libffi-dev
|
|
||||||
RUN apk add --no-cache --virtual .build-deps \
|
|
||||||
"bash~=5.2" \
|
|
||||||
"g++~=12.2" \
|
|
||||||
"gcc~=12.2" \
|
|
||||||
"cargo~=1.64" \
|
|
||||||
"git~=2" \
|
|
||||||
"make~=4.3" \
|
|
||||||
"postgresql13-dev~=13" \
|
|
||||||
"libc-dev" \
|
|
||||||
"linux-headers" \
|
|
||||||
&& \
|
|
||||||
pip install -r requirements.txt --compile --no-cache-dir \
|
|
||||||
&& \
|
|
||||||
apk del .build-deps
|
|
||||||
|
|
||||||
# Add in Django deps and generate Django's static files
|
RUN chmod -R 777 /root \
|
||||||
COPY apiserver/manage.py manage.py
|
&& chmod -R 777 /root/.asdf \
|
||||||
COPY apiserver/plane plane/
|
&& chmod -x /root/.asdf/lib/commands/* \
|
||||||
COPY apiserver/templates templates/
|
&& chmod -R 777 /apiserver \
|
||||||
|
&& chmod -R 777 /web \
|
||||||
|
&& chmod -R 777 /space \
|
||||||
|
&& ln $(asdf which postgres) /usr/bin/postgres \
|
||||||
|
&& ln $(asdf which initdb) /usr/bin/initdb \
|
||||||
|
&& ln $(asdf which node) /usr/bin/node \
|
||||||
|
&& ln $(asdf which npm) /usr/bin/npm \
|
||||||
|
&& ln $(asdf which python) /usr/bin/python
|
||||||
|
|
||||||
COPY apiserver/gunicorn.config.py ./
|
ENV S6_KEEP_ENV=1
|
||||||
RUN apk --no-cache add "bash~=5.2"
|
ENTRYPOINT ["/init"]
|
||||||
COPY apiserver/bin ./bin/
|
|
||||||
|
|
||||||
RUN chmod +x ./bin/takeoff ./bin/worker
|
|
||||||
RUN chmod -R 777 /code
|
|
||||||
|
|
||||||
# Expose container port and run entry point script
|
|
||||||
|
|
||||||
WORKDIR /app
|
|
||||||
|
|
||||||
# Don't run production as root
|
|
||||||
RUN addgroup --system --gid 1001 plane
|
|
||||||
RUN adduser --system --uid 1001 captain
|
|
||||||
|
|
||||||
COPY --from=installer /app/apps/app/next.config.js .
|
|
||||||
COPY --from=installer /app/apps/app/package.json .
|
|
||||||
COPY --from=installer /app/apps/space/next.config.js .
|
|
||||||
COPY --from=installer /app/apps/space/package.json .
|
|
||||||
|
|
||||||
COPY --from=installer --chown=captain:plane /app/apps/app/.next/standalone ./
|
|
||||||
|
|
||||||
COPY --from=installer --chown=captain:plane /app/apps/app/.next/static ./apps/app/.next/static
|
|
||||||
|
|
||||||
COPY --from=installer --chown=captain:plane /app/apps/space/.next/standalone ./
|
|
||||||
COPY --from=installer --chown=captain:plane /app/apps/space/.next ./apps/space/.next
|
|
||||||
|
|
||||||
ENV NEXT_TELEMETRY_DISABLED 1
|
|
||||||
|
|
||||||
# RUN rm /etc/nginx/conf.d/default.conf
|
|
||||||
#######################################################################
|
|
||||||
COPY nginx/nginx-single-docker-image.conf /etc/nginx/http.d/default.conf
|
|
||||||
#######################################################################
|
|
||||||
|
|
||||||
COPY nginx/supervisor.conf /code/supervisor.conf
|
|
||||||
|
|
||||||
ARG NEXT_PUBLIC_API_BASE_URL=http://localhost:8000
|
|
||||||
ENV NEXT_PUBLIC_API_BASE_URL=$NEXT_PUBLIC_API_BASE_URL \
|
|
||||||
BUILT_NEXT_PUBLIC_API_BASE_URL=$NEXT_PUBLIC_API_BASE_URL
|
|
||||||
|
|
||||||
USER root
|
|
||||||
COPY replace-env-vars.sh /usr/local/bin/
|
|
||||||
COPY start.sh /usr/local/bin/
|
|
||||||
RUN chmod +x /usr/local/bin/replace-env-vars.sh
|
|
||||||
RUN chmod +x /usr/local/bin/start.sh
|
|
||||||
|
|
||||||
EXPOSE 80
|
|
||||||
|
|
||||||
CMD ["supervisord","-c","/code/supervisor.conf"]
|
|
||||||
|
38
ENV_SETUP.md
38
ENV_SETUP.md
@ -31,12 +31,10 @@ AWS_S3_BUCKET_NAME="uploads"
|
|||||||
FILE_SIZE_LIMIT=5242880
|
FILE_SIZE_LIMIT=5242880
|
||||||
|
|
||||||
# GPT settings
|
# GPT settings
|
||||||
OPENAI_API_BASE="https://api.openai.com/v1" # change if using a custom endpoint
|
OPENAI_API_BASE="https://api.openai.com/v1" # deprecated
|
||||||
OPENAI_API_KEY="sk-" # add your openai key here
|
OPENAI_API_KEY="sk-" # deprecated
|
||||||
GPT_ENGINE="gpt-3.5-turbo" # use "gpt-4" if you have access
|
GPT_ENGINE="gpt-3.5-turbo" # deprecated
|
||||||
|
|
||||||
# Settings related to Docker
|
|
||||||
DOCKERIZED=1
|
|
||||||
# set to 1 If using the pre-configured minio setup
|
# set to 1 If using the pre-configured minio setup
|
||||||
USE_MINIO=1
|
USE_MINIO=1
|
||||||
|
|
||||||
@ -51,25 +49,10 @@ NGINX_PORT=80
|
|||||||
|
|
||||||
|
|
||||||
```
|
```
|
||||||
# Enable/Disable OAUTH - default 0 for selfhosted instance
|
|
||||||
NEXT_PUBLIC_ENABLE_OAUTH=0
|
|
||||||
# Public boards deploy URL
|
# Public boards deploy URL
|
||||||
NEXT_PUBLIC_DEPLOY_URL="http://localhost/spaces"
|
NEXT_PUBLIC_DEPLOY_URL="http://localhost/spaces"
|
||||||
```
|
```
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
## {PROJECT_FOLDER}/spaces/.env.example
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
```
|
|
||||||
# Flag to toggle OAuth
|
|
||||||
NEXT_PUBLIC_ENABLE_OAUTH=0
|
|
||||||
```
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
## {PROJECT_FOLDER}/apiserver/.env
|
## {PROJECT_FOLDER}/apiserver/.env
|
||||||
|
|
||||||
|
|
||||||
@ -78,7 +61,6 @@ NEXT_PUBLIC_ENABLE_OAUTH=0
|
|||||||
# Backend
|
# Backend
|
||||||
# Debug value for api server use it as 0 for production use
|
# Debug value for api server use it as 0 for production use
|
||||||
DEBUG=0
|
DEBUG=0
|
||||||
DJANGO_SETTINGS_MODULE="plane.settings.selfhosted"
|
|
||||||
|
|
||||||
# Error logs
|
# Error logs
|
||||||
SENTRY_DSN=""
|
SENTRY_DSN=""
|
||||||
@ -115,24 +97,22 @@ AWS_S3_BUCKET_NAME="uploads"
|
|||||||
FILE_SIZE_LIMIT=5242880
|
FILE_SIZE_LIMIT=5242880
|
||||||
|
|
||||||
# GPT settings
|
# GPT settings
|
||||||
OPENAI_API_BASE="https://api.openai.com/v1" # change if using a custom endpoint
|
OPENAI_API_BASE="https://api.openai.com/v1" # deprecated
|
||||||
OPENAI_API_KEY="sk-" # add your openai key here
|
OPENAI_API_KEY="sk-" # deprecated
|
||||||
GPT_ENGINE="gpt-3.5-turbo" # use "gpt-4" if you have access
|
GPT_ENGINE="gpt-3.5-turbo" # deprecated
|
||||||
|
|
||||||
|
# Settings related to Docker
|
||||||
|
DOCKERIZED=1 # Deprecated
|
||||||
|
|
||||||
# Github
|
# Github
|
||||||
GITHUB_CLIENT_SECRET="" # For fetching release notes
|
GITHUB_CLIENT_SECRET="" # For fetching release notes
|
||||||
|
|
||||||
# Settings related to Docker
|
|
||||||
DOCKERIZED=1
|
|
||||||
# set to 1 If using the pre-configured minio setup
|
# set to 1 If using the pre-configured minio setup
|
||||||
USE_MINIO=1
|
USE_MINIO=1
|
||||||
|
|
||||||
# Nginx Configuration
|
# Nginx Configuration
|
||||||
NGINX_PORT=80
|
NGINX_PORT=80
|
||||||
|
|
||||||
# Default Creds
|
|
||||||
DEFAULT_EMAIL="captain@plane.so"
|
|
||||||
DEFAULT_PASSWORD="password123"
|
|
||||||
|
|
||||||
# SignUps
|
# SignUps
|
||||||
ENABLE_SIGNUP="1"
|
ENABLE_SIGNUP="1"
|
||||||
|
159
README.md
159
README.md
@ -7,7 +7,7 @@
|
|||||||
</p>
|
</p>
|
||||||
|
|
||||||
<h3 align="center"><b>Plane</b></h3>
|
<h3 align="center"><b>Plane</b></h3>
|
||||||
<p align="center"><b>Flexible, extensible open-source project management</b></p>
|
<p align="center"><b>Open-source project management that unlocks customer value.</b></p>
|
||||||
|
|
||||||
<p align="center">
|
<p align="center">
|
||||||
<a href="https://discord.com/invite/A92xrEGCge">
|
<a href="https://discord.com/invite/A92xrEGCge">
|
||||||
@ -16,6 +16,13 @@
|
|||||||
<img alt="Commit activity per month" src="https://img.shields.io/github/commit-activity/m/makeplane/plane?style=for-the-badge" />
|
<img alt="Commit activity per month" src="https://img.shields.io/github/commit-activity/m/makeplane/plane?style=for-the-badge" />
|
||||||
</p>
|
</p>
|
||||||
|
|
||||||
|
<p align="center">
|
||||||
|
<a href="https://dub.sh/plane-website-readme"><b>Website</b></a> •
|
||||||
|
<a href="https://git.new/releases"><b>Releases</b></a> •
|
||||||
|
<a href="https://dub.sh/planepowershq"><b>Twitter</b></a> •
|
||||||
|
<a href="https://dub.sh/planedocs"><b>Documentation</b></a>
|
||||||
|
</p>
|
||||||
|
|
||||||
<p>
|
<p>
|
||||||
<a href="https://app.plane.so/#gh-light-mode-only" target="_blank">
|
<a href="https://app.plane.so/#gh-light-mode-only" target="_blank">
|
||||||
<img
|
<img
|
||||||
@ -33,60 +40,90 @@
|
|||||||
</a>
|
</a>
|
||||||
</p>
|
</p>
|
||||||
|
|
||||||
Meet [Plane](https://plane.so). An open-source software development tool to manage issues, sprints, and product roadmaps with peace of mind 🧘♀️.
|
Meet [Plane](https://dub.sh/plane-website-readme). An open-source software development tool to manage issues, sprints, and product roadmaps with peace of mind. 🧘♀️
|
||||||
|
|
||||||
> Plane is still in its early days, not everything will be perfect yet, and hiccups may happen. Please let us know of any suggestions, ideas, or bugs that you encounter on our [Discord](https://discord.com/invite/A92xrEGCge) or GitHub issues, and we will use your feedback to improve on our upcoming releases.
|
> Plane is still in its early days, not everything will be perfect yet, and hiccups may happen. Please let us know of any suggestions, ideas, or bugs that you encounter on our [Discord](https://discord.com/invite/A92xrEGCge) or GitHub issues, and we will use your feedback to improve in our upcoming releases.
|
||||||
|
|
||||||
The easiest way to get started with Plane is by creating a [Plane Cloud](https://app.plane.so) account. Plane Cloud offers a hosted solution for Plane. If you prefer to self-host Plane, please refer to our [deployment documentation](https://docs.plane.so/self-hosting).
|
## ⚡ Installation
|
||||||
|
|
||||||
## ⚡️ Contributors Quick Start
|
The easiest way to get started with Plane is by creating a [Plane Cloud](https://app.plane.so) account where we offer a hosted solution for users.
|
||||||
|
|
||||||
### Prerequisite
|
If you want more control over your data, prefer to self-host Plane, please refer to our [deployment documentation](https://docs.plane.so/docker-compose).
|
||||||
|
|
||||||
Development system must have docker engine installed and running.
|
| Installation Methods | Documentation Link |
|
||||||
|
| -------------------- | ------------------------------------------------------------------------------------------------------------------------------------------------------------ |
|
||||||
|
| Docker | [![Docker](https://img.shields.io/badge/docker-%230db7ed.svg?style=for-the-badge&logo=docker&logoColor=white)](https://docs.plane.so/self-hosting/methods/docker-compose) |
|
||||||
|
| Kubernetes | [![Kubernetes](https://img.shields.io/badge/kubernetes-%23326ce5.svg?style=for-the-badge&logo=kubernetes&logoColor=white)](https://docs.plane.so/kubernetes) |
|
||||||
|
|
||||||
### Steps
|
`Instance admin` can configure instance settings using our [God-mode](https://docs.plane.so/instance-admin) feature.
|
||||||
|
|
||||||
Setting up local environment is extremely easy and straight forward. Follow the below step and you will be ready to contribute
|
|
||||||
|
|
||||||
1. Clone the code locally using `git clone https://github.com/makeplane/plane.git`
|
|
||||||
1. Switch to the code folder `cd plane`
|
|
||||||
1. Create your feature or fix branch you plan to work on using `git checkout -b <feature-branch-name>`
|
|
||||||
1. Open terminal and run `./setup.sh`
|
|
||||||
1. Open the code on VSCode or similar equivalent IDE
|
|
||||||
1. Review the `.env` files available in various folders. Visit [Environment Setup](./ENV_SETUP.md) to know about various environment variables used in system
|
|
||||||
1. Run the docker command to initiate various services `docker compose -f docker-compose-local.yml up -d`
|
|
||||||
|
|
||||||
```bash
|
|
||||||
./setup.sh
|
|
||||||
```
|
|
||||||
|
|
||||||
You are ready to make changes to the code. Do not forget to refresh the browser (in case id does not auto-reload)
|
|
||||||
|
|
||||||
Thats it!
|
|
||||||
|
|
||||||
## 🍙 Self Hosting
|
|
||||||
|
|
||||||
For self hosting environment setup, visit the [Self Hosting](https://docs.plane.so/self-hosting) documentation page
|
|
||||||
|
|
||||||
## 🚀 Features
|
## 🚀 Features
|
||||||
|
|
||||||
- **Issue Planning and Tracking**: Quickly create issues and add details using a powerful rich text editor that supports file uploads. Add sub-properties and references to issues for better organization and tracking.
|
- **Issues**: Quickly create issues and add details using a powerful rich text editor that supports file uploads. Add sub-properties and references to problems for better organization and tracking.
|
||||||
- **Issue Attachments**: Collaborate effectively by attaching files to issues, making it easy for your team to find and share important project-related documents.
|
|
||||||
- **Layouts**: Customize your project view with your preferred layout - choose from List, Kanban, or Calendar to visualize your project in a way that makes sense to you.
|
- **Cycles**:
|
||||||
- **Cycles**: Plan sprints with Cycles to keep your team on track and productive. Gain insights into your project's progress with burn-down charts and other useful features.
|
Keep up your team's momentum with Cycles. Gain insights into your project's progress with burn-down charts and other valuable features.
|
||||||
- **Modules**: Break down your large projects into smaller, more manageable modules. Assign modules between teams to easily track and plan your project's progress.
|
|
||||||
|
- **Modules**: Break down your large projects into smaller, more manageable modules. Assign modules between teams to track and plan your project's progress easily.
|
||||||
|
|
||||||
- **Views**: Create custom filters to display only the issues that matter to you. Save and share your filters in just a few clicks.
|
- **Views**: Create custom filters to display only the issues that matter to you. Save and share your filters in just a few clicks.
|
||||||
- **Pages**: Plane pages function as an AI-powered notepad, allowing you to easily document issues, cycle plans, and module details, and then synchronize them with your issues.
|
|
||||||
- **Command K**: Enjoy a better user experience with the new Command + K menu. Easily manage and navigate through your projects from one convenient location.
|
- **Pages**: Plane pages, equipped with AI and a rich text editor, let you jot down your thoughts on the fly. Format your text, upload images, hyperlink, or sync your existing ideas into an actionable item or issue.
|
||||||
- **GitHub Sync**: Streamline your planning process by syncing your GitHub issues with Plane. Keep all your issues in one place for better tracking and collaboration.
|
|
||||||
|
- **Analytics**: Get insights into all your Plane data in real-time. Visualize issue data to spot trends, remove blockers, and progress your work.
|
||||||
|
|
||||||
|
- **Drive** (_coming soon_): The drive helps you share documents, images, videos, or any other files that make sense to you or your team and align on the problem/solution.
|
||||||
|
|
||||||
|
## 🛠️ Quick start for contributors
|
||||||
|
|
||||||
|
> Development system must have docker engine installed and running.
|
||||||
|
|
||||||
|
Setting up local environment is extremely easy and straight forward. Follow the below step and you will be ready to contribute -
|
||||||
|
|
||||||
|
1. Clone the code locally using:
|
||||||
|
```
|
||||||
|
git clone https://github.com/makeplane/plane.git
|
||||||
|
```
|
||||||
|
2. Switch to the code folder:
|
||||||
|
```
|
||||||
|
cd plane
|
||||||
|
```
|
||||||
|
3. Create your feature or fix branch you plan to work on using:
|
||||||
|
```
|
||||||
|
git checkout -b <feature-branch-name>
|
||||||
|
```
|
||||||
|
4. Open terminal and run:
|
||||||
|
```
|
||||||
|
./setup.sh
|
||||||
|
```
|
||||||
|
5. Open the code on VSCode or similar equivalent IDE.
|
||||||
|
6. Review the `.env` files available in various folders.
|
||||||
|
Visit [Environment Setup](./ENV_SETUP.md) to know about various environment variables used in system.
|
||||||
|
7. Run the docker command to initiate services:
|
||||||
|
```
|
||||||
|
docker compose -f docker-compose-local.yml up -d
|
||||||
|
```
|
||||||
|
|
||||||
|
You are ready to make changes to the code. Do not forget to refresh the browser (in case it does not auto-reload).
|
||||||
|
|
||||||
|
Thats it!
|
||||||
|
|
||||||
|
## ❤️ Community
|
||||||
|
|
||||||
|
The Plane community can be found on [GitHub Discussions](https://github.com/orgs/makeplane/discussions), and our [Discord server](https://discord.com/invite/A92xrEGCge). Our [Code of conduct](https://github.com/makeplane/plane/blob/master/CODE_OF_CONDUCT.md) applies to all Plane community chanels.
|
||||||
|
|
||||||
|
Ask questions, report bugs, join discussions, voice ideas, make feature requests, or share your projects.
|
||||||
|
|
||||||
|
### Repo Activity
|
||||||
|
|
||||||
|
![Plane Repo Activity](https://repobeats.axiom.co/api/embed/2523c6ed2f77c082b7908c33e2ab208981d76c39.svg "Repobeats analytics image")
|
||||||
|
|
||||||
## 📸 Screenshots
|
## 📸 Screenshots
|
||||||
|
|
||||||
<p>
|
<p>
|
||||||
<a href="https://plane.so" target="_blank">
|
<a href="https://plane.so" target="_blank">
|
||||||
<img
|
<img
|
||||||
src="https://plane-marketing.s3.ap-south-1.amazonaws.com/plane-readme/plane_views_dark_mode.webp"
|
src="https://ik.imagekit.io/w2okwbtu2/Issues_rNZjrGgFl.png?updatedAt=1709298765880"
|
||||||
alt="Plane Views"
|
alt="Plane Views"
|
||||||
width="100%"
|
width="100%"
|
||||||
/>
|
/>
|
||||||
@ -95,8 +132,7 @@ For self hosting environment setup, visit the [Self Hosting](https://docs.plane.
|
|||||||
<p>
|
<p>
|
||||||
<a href="https://plane.so" target="_blank">
|
<a href="https://plane.so" target="_blank">
|
||||||
<img
|
<img
|
||||||
src="https://plane-marketing.s3.ap-south-1.amazonaws.com/plane-readme/plane_issue_detail_dark_mode.webp"
|
src="https://ik.imagekit.io/w2okwbtu2/Cycles_jCDhqmTl9.png?updatedAt=1709298780697"
|
||||||
alt="Plane Issue Details"
|
|
||||||
width="100%"
|
width="100%"
|
||||||
/>
|
/>
|
||||||
</a>
|
</a>
|
||||||
@ -104,7 +140,7 @@ For self hosting environment setup, visit the [Self Hosting](https://docs.plane.
|
|||||||
<p>
|
<p>
|
||||||
<a href="https://plane.so" target="_blank">
|
<a href="https://plane.so" target="_blank">
|
||||||
<img
|
<img
|
||||||
src="https://plane-marketing.s3.ap-south-1.amazonaws.com/plane-readme/plane_cycles_modules_dark_mode.webp"
|
src="https://ik.imagekit.io/w2okwbtu2/Modules_PSCVsbSfI.png?updatedAt=1709298796783"
|
||||||
alt="Plane Cycles and Modules"
|
alt="Plane Cycles and Modules"
|
||||||
width="100%"
|
width="100%"
|
||||||
/>
|
/>
|
||||||
@ -113,7 +149,7 @@ For self hosting environment setup, visit the [Self Hosting](https://docs.plane.
|
|||||||
<p>
|
<p>
|
||||||
<a href="https://plane.so" target="_blank">
|
<a href="https://plane.so" target="_blank">
|
||||||
<img
|
<img
|
||||||
src="https://plane-marketing.s3.ap-south-1.amazonaws.com/plane-readme/plane_analytics_dark_mode.webp"
|
src="https://ik.imagekit.io/w2okwbtu2/Views_uxXsRatS4.png?updatedAt=1709298834522"
|
||||||
alt="Plane Analytics"
|
alt="Plane Analytics"
|
||||||
width="100%"
|
width="100%"
|
||||||
/>
|
/>
|
||||||
@ -122,7 +158,7 @@ For self hosting environment setup, visit the [Self Hosting](https://docs.plane.
|
|||||||
<p>
|
<p>
|
||||||
<a href="https://plane.so" target="_blank">
|
<a href="https://plane.so" target="_blank">
|
||||||
<img
|
<img
|
||||||
src="https://plane-marketing.s3.ap-south-1.amazonaws.com/plane-readme/plane_pages_dark_mode.webp"
|
src="https://ik.imagekit.io/w2okwbtu2/Analytics_0o22gLRtp.png?updatedAt=1709298834389"
|
||||||
alt="Plane Pages"
|
alt="Plane Pages"
|
||||||
width="100%"
|
width="100%"
|
||||||
/>
|
/>
|
||||||
@ -132,7 +168,7 @@ For self hosting environment setup, visit the [Self Hosting](https://docs.plane.
|
|||||||
<p>
|
<p>
|
||||||
<a href="https://plane.so" target="_blank">
|
<a href="https://plane.so" target="_blank">
|
||||||
<img
|
<img
|
||||||
src="https://plane-marketing.s3.ap-south-1.amazonaws.com/plane-readme/plane_commad_k_dark_mode.webp"
|
src="https://ik.imagekit.io/w2okwbtu2/Drive_LlfeY4xn3.png?updatedAt=1709298837917"
|
||||||
alt="Plane Command Menu"
|
alt="Plane Command Menu"
|
||||||
width="100%"
|
width="100%"
|
||||||
/>
|
/>
|
||||||
@ -140,20 +176,23 @@ For self hosting environment setup, visit the [Self Hosting](https://docs.plane.
|
|||||||
</p>
|
</p>
|
||||||
</p>
|
</p>
|
||||||
|
|
||||||
## 📚Documentation
|
|
||||||
|
|
||||||
For full documentation, visit [docs.plane.so](https://docs.plane.so/)
|
|
||||||
|
|
||||||
To see how to Contribute, visit [here](https://github.com/makeplane/plane/blob/master/CONTRIBUTING.md).
|
|
||||||
|
|
||||||
## ❤️ Community
|
|
||||||
|
|
||||||
The Plane community can be found on GitHub Discussions, where you can ask questions, voice ideas, and share your projects.
|
|
||||||
|
|
||||||
To chat with other community members you can join the [Plane Discord](https://discord.com/invite/A92xrEGCge).
|
|
||||||
|
|
||||||
Our [Code of Conduct](https://github.com/makeplane/plane/blob/master/CODE_OF_CONDUCT.md) applies to all Plane community channels.
|
|
||||||
|
|
||||||
## ⛓️ Security
|
## ⛓️ Security
|
||||||
|
|
||||||
If you believe you have found a security vulnerability in Plane, we encourage you to responsibly disclose this and not open a public issue. We will investigate all legitimate reports. Email engineering@plane.so to disclose any security vulnerabilities.
|
If you believe you have found a security vulnerability in Plane, we encourage you to responsibly disclose this and not open a public issue. We will investigate all legitimate reports.
|
||||||
|
|
||||||
|
Email squawk@plane.so to disclose any security vulnerabilities.
|
||||||
|
|
||||||
|
## ❤️ Contribute
|
||||||
|
|
||||||
|
There are many ways to contribute to Plane, including:
|
||||||
|
|
||||||
|
- Submitting [bugs](https://github.com/makeplane/plane/issues/new?assignees=srinivaspendem%2Cpushya22&labels=%F0%9F%90%9Bbug&projects=&template=--bug-report.yaml&title=%5Bbug%5D%3A+) and [feature requests](https://github.com/makeplane/plane/issues/new?assignees=srinivaspendem%2Cpushya22&labels=%E2%9C%A8feature&projects=&template=--feature-request.yaml&title=%5Bfeature%5D%3A+) for various components.
|
||||||
|
- Reviewing [the documentation](https://docs.plane.so/) and submitting [pull requests](https://github.com/makeplane/plane), from fixing typos to adding new features.
|
||||||
|
- Speaking or writing about Plane or any other ecosystem integration and [letting us know](https://discord.com/invite/A92xrEGCge)!
|
||||||
|
- Upvoting [popular feature requests](https://github.com/makeplane/plane/issues) to show your support.
|
||||||
|
|
||||||
|
### We couldn't have done this without you.
|
||||||
|
|
||||||
|
<a href="https://github.com/makeplane/plane/graphs/contributors">
|
||||||
|
<img src="https://contrib.rocks/image?repo=makeplane/plane" />
|
||||||
|
</a>
|
||||||
|
44
SECURITY.md
Normal file
44
SECURITY.md
Normal file
@ -0,0 +1,44 @@
|
|||||||
|
# Security Policy
|
||||||
|
|
||||||
|
This document outlines security procedures and vulnerabilities reporting for the Plane project.
|
||||||
|
|
||||||
|
At Plane, we safeguarding the security of our systems with top priority. Despite our efforts, vulnerabilities may still exist. We greatly appreciate your assistance in identifying and reporting any such vulnerabilities to help us maintain the integrity of our systems and protect our clients.
|
||||||
|
|
||||||
|
To report a security vulnerability, please email us directly at security@plane.so with a detailed description of the vulnerability and steps to reproduce it. Please refrain from disclosing the vulnerability publicly until we have had an opportunity to review and address it.
|
||||||
|
|
||||||
|
## Out of Scope Vulnerabilities
|
||||||
|
|
||||||
|
We appreciate your help in identifying vulnerabilities. However, please note that the following types of vulnerabilities are considered out of scope:
|
||||||
|
|
||||||
|
- Attacks requiring MITM or physical access to a user's device.
|
||||||
|
- Content spoofing and text injection issues without demonstrating an attack vector or ability to modify HTML/CSS.
|
||||||
|
- Email spoofing.
|
||||||
|
- Missing DNSSEC, CAA, CSP headers.
|
||||||
|
- Lack of Secure or HTTP only flag on non-sensitive cookies.
|
||||||
|
|
||||||
|
## Reporting Process
|
||||||
|
|
||||||
|
If you discover a vulnerability, please adhere to the following reporting process:
|
||||||
|
|
||||||
|
1. Email your findings to security@plane.so.
|
||||||
|
2. Refrain from running automated scanners on our infrastructure or dashboard without prior consent. Contact us to set up a sandbox environment if necessary.
|
||||||
|
3. Do not exploit the vulnerability for malicious purposes, such as downloading excessive data or altering user data.
|
||||||
|
4. Maintain confidentiality and refrain from disclosing the vulnerability until it has been resolved.
|
||||||
|
5. Avoid using physical security attacks, social engineering, distributed denial of service, spam, or third-party applications.
|
||||||
|
|
||||||
|
When reporting a vulnerability, please provide sufficient information to allow us to reproduce and address the issue promptly. Include the IP address or URL of the affected system, along with a detailed description of the vulnerability.
|
||||||
|
|
||||||
|
## Our Commitment
|
||||||
|
|
||||||
|
We are committed to promptly addressing reported vulnerabilities and maintaining open communication throughout the resolution process. Here's what you can expect from us:
|
||||||
|
|
||||||
|
- **Response Time:** We will acknowledge receipt of your report within three business days and provide an expected resolution date.
|
||||||
|
- **Legal Protection:** We will not pursue legal action against you for reporting vulnerabilities, provided you adhere to the reporting guidelines.
|
||||||
|
- **Confidentiality:** Your report will be treated with strict confidentiality. We will not disclose your personal information to third parties without your consent.
|
||||||
|
- **Progress Updates:** We will keep you informed of our progress in resolving the reported vulnerability.
|
||||||
|
- **Recognition:** With your permission, we will publicly acknowledge you as the discoverer of the vulnerability.
|
||||||
|
- **Timely Resolution:** We strive to resolve all reported vulnerabilities promptly and will actively participate in the publication process once the issue is resolved.
|
||||||
|
|
||||||
|
We appreciate your cooperation in helping us maintain the security of our systems and protecting our clients. Thank you for your contributions to our security efforts.
|
||||||
|
|
||||||
|
reference: https://supabase.com/.well-known/security.txt
|
@ -1,32 +1,25 @@
|
|||||||
# Backend
|
# Backend
|
||||||
# Debug value for api server use it as 0 for production use
|
# Debug value for api server use it as 0 for production use
|
||||||
DEBUG=0
|
DEBUG=0
|
||||||
DJANGO_SETTINGS_MODULE="plane.settings.production"
|
CORS_ALLOWED_ORIGINS=""
|
||||||
|
|
||||||
# Error logs
|
# Error logs
|
||||||
SENTRY_DSN=""
|
SENTRY_DSN=""
|
||||||
|
SENTRY_ENVIRONMENT="development"
|
||||||
|
|
||||||
# Database Settings
|
# Database Settings
|
||||||
PGUSER="plane"
|
POSTGRES_USER="plane"
|
||||||
PGPASSWORD="plane"
|
POSTGRES_PASSWORD="plane"
|
||||||
PGHOST="plane-db"
|
POSTGRES_HOST="plane-db"
|
||||||
PGDATABASE="plane"
|
POSTGRES_DB="plane"
|
||||||
DATABASE_URL=postgresql://${PGUSER}:${PGPASSWORD}@${PGHOST}/${PGDATABASE}
|
DATABASE_URL=postgresql://${POSTGRES_USER}:${POSTGRES_PASSWORD}@${POSTGRES_HOST}/${POSTGRES_DB}
|
||||||
|
|
||||||
|
|
||||||
# Redis Settings
|
# Redis Settings
|
||||||
REDIS_HOST="plane-redis"
|
REDIS_HOST="plane-redis"
|
||||||
REDIS_PORT="6379"
|
REDIS_PORT="6379"
|
||||||
REDIS_URL="redis://${REDIS_HOST}:6379/"
|
REDIS_URL="redis://${REDIS_HOST}:6379/"
|
||||||
|
|
||||||
# Email Settings
|
|
||||||
EMAIL_HOST=""
|
|
||||||
EMAIL_HOST_USER=""
|
|
||||||
EMAIL_HOST_PASSWORD=""
|
|
||||||
EMAIL_PORT=587
|
|
||||||
EMAIL_FROM="Team Plane <team@mailer.plane.so>"
|
|
||||||
EMAIL_USE_TLS="1"
|
|
||||||
EMAIL_USE_SSL="0"
|
|
||||||
|
|
||||||
# AWS Settings
|
# AWS Settings
|
||||||
AWS_REGION=""
|
AWS_REGION=""
|
||||||
AWS_ACCESS_KEY_ID="access-key"
|
AWS_ACCESS_KEY_ID="access-key"
|
||||||
@ -37,39 +30,17 @@ AWS_S3_BUCKET_NAME="uploads"
|
|||||||
# Maximum file upload limit
|
# Maximum file upload limit
|
||||||
FILE_SIZE_LIMIT=5242880
|
FILE_SIZE_LIMIT=5242880
|
||||||
|
|
||||||
# GPT settings
|
|
||||||
OPENAI_API_BASE="https://api.openai.com/v1" # change if using a custom endpoint
|
|
||||||
OPENAI_API_KEY="sk-" # add your openai key here
|
|
||||||
GPT_ENGINE="gpt-3.5-turbo" # use "gpt-4" if you have access
|
|
||||||
|
|
||||||
# Github
|
|
||||||
GITHUB_CLIENT_SECRET="" # For fetching release notes
|
|
||||||
|
|
||||||
# Settings related to Docker
|
# Settings related to Docker
|
||||||
DOCKERIZED=1
|
DOCKERIZED=1 # deprecated
|
||||||
|
|
||||||
# set to 1 If using the pre-configured minio setup
|
# set to 1 If using the pre-configured minio setup
|
||||||
USE_MINIO=1
|
USE_MINIO=1
|
||||||
|
|
||||||
# Nginx Configuration
|
# Nginx Configuration
|
||||||
NGINX_PORT=80
|
NGINX_PORT=80
|
||||||
|
|
||||||
# Default Creds
|
|
||||||
DEFAULT_EMAIL="captain@plane.so"
|
|
||||||
DEFAULT_PASSWORD="password123"
|
|
||||||
|
|
||||||
# SignUps
|
|
||||||
ENABLE_SIGNUP="1"
|
|
||||||
|
|
||||||
|
|
||||||
# Enable Email/Password Signup
|
|
||||||
ENABLE_EMAIL_PASSWORD="1"
|
|
||||||
|
|
||||||
# Enable Magic link Login
|
|
||||||
ENABLE_MAGIC_LINK_LOGIN="0"
|
|
||||||
|
|
||||||
# Email redirections and minio domain settings
|
# Email redirections and minio domain settings
|
||||||
WEB_URL="http://localhost"
|
WEB_URL="http://localhost"
|
||||||
|
|
||||||
|
|
||||||
# Gunicorn Workers
|
# Gunicorn Workers
|
||||||
GUNICORN_WORKERS=2
|
GUNICORN_WORKERS=2
|
||||||
|
@ -32,28 +32,19 @@ RUN apk add --no-cache --virtual .build-deps \
|
|||||||
apk del .build-deps
|
apk del .build-deps
|
||||||
|
|
||||||
|
|
||||||
RUN addgroup -S plane && \
|
|
||||||
adduser -S captain -G plane
|
|
||||||
|
|
||||||
RUN chown captain.plane /code
|
|
||||||
|
|
||||||
USER captain
|
|
||||||
|
|
||||||
# Add in Django deps and generate Django's static files
|
# Add in Django deps and generate Django's static files
|
||||||
COPY manage.py manage.py
|
COPY manage.py manage.py
|
||||||
COPY plane plane/
|
COPY plane plane/
|
||||||
COPY templates templates/
|
COPY templates templates/
|
||||||
|
COPY package.json package.json
|
||||||
|
|
||||||
COPY gunicorn.config.py ./
|
|
||||||
USER root
|
|
||||||
RUN apk --no-cache add "bash~=5.2"
|
RUN apk --no-cache add "bash~=5.2"
|
||||||
COPY ./bin ./bin/
|
COPY ./bin ./bin/
|
||||||
|
|
||||||
|
RUN mkdir -p /code/plane/logs
|
||||||
RUN chmod +x ./bin/takeoff ./bin/worker ./bin/beat
|
RUN chmod +x ./bin/takeoff ./bin/worker ./bin/beat
|
||||||
RUN chmod -R 777 /code
|
RUN chmod -R 777 /code
|
||||||
|
|
||||||
USER captain
|
|
||||||
|
|
||||||
# Expose container port and run entry point script
|
# Expose container port and run entry point script
|
||||||
EXPOSE 8000
|
EXPOSE 8000
|
||||||
|
|
||||||
|
@ -27,26 +27,19 @@ WORKDIR /code
|
|||||||
COPY requirements.txt ./requirements.txt
|
COPY requirements.txt ./requirements.txt
|
||||||
ADD requirements ./requirements
|
ADD requirements ./requirements
|
||||||
|
|
||||||
RUN pip install -r requirements.txt --compile --no-cache-dir
|
# Install the local development settings
|
||||||
|
RUN pip install -r requirements/local.txt --compile --no-cache-dir
|
||||||
|
|
||||||
RUN addgroup -S plane && \
|
|
||||||
adduser -S captain -G plane
|
|
||||||
|
|
||||||
RUN chown captain.plane /code
|
COPY . .
|
||||||
|
|
||||||
USER captain
|
RUN mkdir -p /code/plane/logs
|
||||||
|
RUN chmod -R +x /code/bin
|
||||||
# Add in Django deps and generate Django's static files
|
|
||||||
|
|
||||||
USER root
|
|
||||||
|
|
||||||
# RUN chmod +x ./bin/takeoff ./bin/worker ./bin/beat
|
|
||||||
RUN chmod -R 777 /code
|
RUN chmod -R 777 /code
|
||||||
|
|
||||||
USER captain
|
|
||||||
|
|
||||||
# Expose container port and run entry point script
|
# Expose container port and run entry point script
|
||||||
EXPOSE 8000
|
EXPOSE 8000
|
||||||
|
|
||||||
# CMD [ "./bin/takeoff" ]
|
CMD [ "./bin/takeoff.local" ]
|
||||||
|
|
||||||
|
@ -26,7 +26,9 @@ def update_description():
|
|||||||
updated_issues.append(issue)
|
updated_issues.append(issue)
|
||||||
|
|
||||||
Issue.objects.bulk_update(
|
Issue.objects.bulk_update(
|
||||||
updated_issues, ["description_html", "description_stripped"], batch_size=100
|
updated_issues,
|
||||||
|
["description_html", "description_stripped"],
|
||||||
|
batch_size=100,
|
||||||
)
|
)
|
||||||
print("Success")
|
print("Success")
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
@ -40,7 +42,9 @@ def update_comments():
|
|||||||
updated_issue_comments = []
|
updated_issue_comments = []
|
||||||
|
|
||||||
for issue_comment in issue_comments:
|
for issue_comment in issue_comments:
|
||||||
issue_comment.comment_html = f"<p>{issue_comment.comment_stripped}</p>"
|
issue_comment.comment_html = (
|
||||||
|
f"<p>{issue_comment.comment_stripped}</p>"
|
||||||
|
)
|
||||||
updated_issue_comments.append(issue_comment)
|
updated_issue_comments.append(issue_comment)
|
||||||
|
|
||||||
IssueComment.objects.bulk_update(
|
IssueComment.objects.bulk_update(
|
||||||
@ -99,7 +103,9 @@ def updated_issue_sort_order():
|
|||||||
issue.sort_order = issue.sequence_id * random.randint(100, 500)
|
issue.sort_order = issue.sequence_id * random.randint(100, 500)
|
||||||
updated_issues.append(issue)
|
updated_issues.append(issue)
|
||||||
|
|
||||||
Issue.objects.bulk_update(updated_issues, ["sort_order"], batch_size=100)
|
Issue.objects.bulk_update(
|
||||||
|
updated_issues, ["sort_order"], batch_size=100
|
||||||
|
)
|
||||||
print("Success")
|
print("Success")
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
print(e)
|
print(e)
|
||||||
@ -137,7 +143,9 @@ def update_project_cover_images():
|
|||||||
project.cover_image = project_cover_images[random.randint(0, 19)]
|
project.cover_image = project_cover_images[random.randint(0, 19)]
|
||||||
updated_projects.append(project)
|
updated_projects.append(project)
|
||||||
|
|
||||||
Project.objects.bulk_update(updated_projects, ["cover_image"], batch_size=100)
|
Project.objects.bulk_update(
|
||||||
|
updated_projects, ["cover_image"], batch_size=100
|
||||||
|
)
|
||||||
print("Success")
|
print("Success")
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
print(e)
|
print(e)
|
||||||
@ -174,7 +182,7 @@ def update_label_color():
|
|||||||
labels = Label.objects.filter(color="")
|
labels = Label.objects.filter(color="")
|
||||||
updated_labels = []
|
updated_labels = []
|
||||||
for label in labels:
|
for label in labels:
|
||||||
label.color = "#" + "%06x" % random.randint(0, 0xFFFFFF)
|
label.color = f"#{random.randint(0, 0xFFFFFF+1):06X}"
|
||||||
updated_labels.append(label)
|
updated_labels.append(label)
|
||||||
|
|
||||||
Label.objects.bulk_update(updated_labels, ["color"], batch_size=100)
|
Label.objects.bulk_update(updated_labels, ["color"], batch_size=100)
|
||||||
@ -186,7 +194,9 @@ def update_label_color():
|
|||||||
|
|
||||||
def create_slack_integration():
|
def create_slack_integration():
|
||||||
try:
|
try:
|
||||||
_ = Integration.objects.create(provider="slack", network=2, title="Slack")
|
_ = Integration.objects.create(
|
||||||
|
provider="slack", network=2, title="Slack"
|
||||||
|
)
|
||||||
print("Success")
|
print("Success")
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
print(e)
|
print(e)
|
||||||
@ -212,12 +222,16 @@ def update_integration_verified():
|
|||||||
|
|
||||||
def update_start_date():
|
def update_start_date():
|
||||||
try:
|
try:
|
||||||
issues = Issue.objects.filter(state__group__in=["started", "completed"])
|
issues = Issue.objects.filter(
|
||||||
|
state__group__in=["started", "completed"]
|
||||||
|
)
|
||||||
updated_issues = []
|
updated_issues = []
|
||||||
for issue in issues:
|
for issue in issues:
|
||||||
issue.start_date = issue.created_at.date()
|
issue.start_date = issue.created_at.date()
|
||||||
updated_issues.append(issue)
|
updated_issues.append(issue)
|
||||||
Issue.objects.bulk_update(updated_issues, ["start_date"], batch_size=500)
|
Issue.objects.bulk_update(
|
||||||
|
updated_issues, ["start_date"], batch_size=500
|
||||||
|
)
|
||||||
print("Success")
|
print("Success")
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
print(e)
|
print(e)
|
||||||
|
3
apiserver/bin/beat
Normal file → Executable file
3
apiserver/bin/beat
Normal file → Executable file
@ -2,4 +2,7 @@
|
|||||||
set -e
|
set -e
|
||||||
|
|
||||||
python manage.py wait_for_db
|
python manage.py wait_for_db
|
||||||
|
# Wait for migrations
|
||||||
|
python manage.py wait_for_migrations
|
||||||
|
# Run the processes
|
||||||
celery -A plane beat -l info
|
celery -A plane beat -l info
|
@ -1,9 +1,35 @@
|
|||||||
#!/bin/bash
|
#!/bin/bash
|
||||||
set -e
|
set -e
|
||||||
python manage.py wait_for_db
|
python manage.py wait_for_db
|
||||||
python manage.py migrate
|
# Wait for migrations
|
||||||
|
python manage.py wait_for_migrations
|
||||||
|
|
||||||
# Create a Default User
|
# Create the default bucket
|
||||||
python bin/user_script.py
|
#!/bin/bash
|
||||||
|
|
||||||
exec gunicorn -w $GUNICORN_WORKERS -k uvicorn.workers.UvicornWorker plane.asgi:application --bind 0.0.0.0:8000 --max-requests 1200 --max-requests-jitter 1000 --access-logfile -
|
# Collect system information
|
||||||
|
HOSTNAME=$(hostname)
|
||||||
|
MAC_ADDRESS=$(ip link show | awk '/ether/ {print $2}' | head -n 1)
|
||||||
|
CPU_INFO=$(cat /proc/cpuinfo)
|
||||||
|
MEMORY_INFO=$(free -h)
|
||||||
|
DISK_INFO=$(df -h)
|
||||||
|
|
||||||
|
# Concatenate information and compute SHA-256 hash
|
||||||
|
SIGNATURE=$(echo "$HOSTNAME$MAC_ADDRESS$CPU_INFO$MEMORY_INFO$DISK_INFO" | sha256sum | awk '{print $1}')
|
||||||
|
|
||||||
|
# Export the variables
|
||||||
|
export MACHINE_SIGNATURE=$SIGNATURE
|
||||||
|
|
||||||
|
# Register instance
|
||||||
|
python manage.py register_instance "$MACHINE_SIGNATURE"
|
||||||
|
|
||||||
|
# Load the configuration variable
|
||||||
|
python manage.py configure_instance
|
||||||
|
|
||||||
|
# Create the default bucket
|
||||||
|
python manage.py create_bucket
|
||||||
|
|
||||||
|
# Clear Cache before starting to remove stale values
|
||||||
|
python manage.py clear_cache
|
||||||
|
|
||||||
|
exec gunicorn -w "$GUNICORN_WORKERS" -k uvicorn.workers.UvicornWorker plane.asgi:application --bind 0.0.0.0:"${PORT:-8000}" --max-requests 1200 --max-requests-jitter 1000 --access-logfile -
|
||||||
|
35
apiserver/bin/takeoff.local
Executable file
35
apiserver/bin/takeoff.local
Executable file
@ -0,0 +1,35 @@
|
|||||||
|
#!/bin/bash
|
||||||
|
set -e
|
||||||
|
python manage.py wait_for_db
|
||||||
|
# Wait for migrations
|
||||||
|
python manage.py wait_for_migrations
|
||||||
|
|
||||||
|
# Create the default bucket
|
||||||
|
#!/bin/bash
|
||||||
|
|
||||||
|
# Collect system information
|
||||||
|
HOSTNAME=$(hostname)
|
||||||
|
MAC_ADDRESS=$(ip link show | awk '/ether/ {print $2}' | head -n 1)
|
||||||
|
CPU_INFO=$(cat /proc/cpuinfo)
|
||||||
|
MEMORY_INFO=$(free -h)
|
||||||
|
DISK_INFO=$(df -h)
|
||||||
|
|
||||||
|
# Concatenate information and compute SHA-256 hash
|
||||||
|
SIGNATURE=$(echo "$HOSTNAME$MAC_ADDRESS$CPU_INFO$MEMORY_INFO$DISK_INFO" | sha256sum | awk '{print $1}')
|
||||||
|
|
||||||
|
# Export the variables
|
||||||
|
export MACHINE_SIGNATURE=$SIGNATURE
|
||||||
|
|
||||||
|
# Register instance
|
||||||
|
python manage.py register_instance "$MACHINE_SIGNATURE"
|
||||||
|
# Load the configuration variable
|
||||||
|
python manage.py configure_instance
|
||||||
|
|
||||||
|
# Create the default bucket
|
||||||
|
python manage.py create_bucket
|
||||||
|
|
||||||
|
# Clear Cache before starting to remove stale values
|
||||||
|
python manage.py clear_cache
|
||||||
|
|
||||||
|
python manage.py runserver 0.0.0.0:8000 --settings=plane.settings.local
|
||||||
|
|
@ -1,28 +0,0 @@
|
|||||||
import os, sys
|
|
||||||
import uuid
|
|
||||||
|
|
||||||
sys.path.append("/code")
|
|
||||||
|
|
||||||
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "plane.settings.production")
|
|
||||||
import django
|
|
||||||
|
|
||||||
django.setup()
|
|
||||||
|
|
||||||
from plane.db.models import User
|
|
||||||
|
|
||||||
|
|
||||||
def populate():
|
|
||||||
default_email = os.environ.get("DEFAULT_EMAIL", "captain@plane.so")
|
|
||||||
default_password = os.environ.get("DEFAULT_PASSWORD", "password123")
|
|
||||||
|
|
||||||
if not User.objects.filter(email=default_email).exists():
|
|
||||||
user = User.objects.create(email=default_email, username=uuid.uuid4().hex)
|
|
||||||
user.set_password(default_password)
|
|
||||||
user.save()
|
|
||||||
print(f"User created with an email: {default_email}")
|
|
||||||
else:
|
|
||||||
print(f"User already exists with the default email: {default_email}")
|
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
|
||||||
populate()
|
|
@ -2,4 +2,7 @@
|
|||||||
set -e
|
set -e
|
||||||
|
|
||||||
python manage.py wait_for_db
|
python manage.py wait_for_db
|
||||||
|
# Wait for migrations
|
||||||
|
python manage.py wait_for_migrations
|
||||||
|
# Run the processes
|
||||||
celery -A plane worker -l info
|
celery -A plane worker -l info
|
@ -1,6 +0,0 @@
|
|||||||
from psycogreen.gevent import patch_psycopg
|
|
||||||
|
|
||||||
|
|
||||||
def post_fork(server, worker):
|
|
||||||
patch_psycopg()
|
|
||||||
worker.log.info("Made Psycopg2 Green")
|
|
@ -2,10 +2,10 @@
|
|||||||
import os
|
import os
|
||||||
import sys
|
import sys
|
||||||
|
|
||||||
if __name__ == '__main__':
|
if __name__ == "__main__":
|
||||||
os.environ.setdefault(
|
os.environ.setdefault(
|
||||||
'DJANGO_SETTINGS_MODULE',
|
"DJANGO_SETTINGS_MODULE", "plane.settings.production"
|
||||||
'plane.settings.production')
|
)
|
||||||
try:
|
try:
|
||||||
from django.core.management import execute_from_command_line
|
from django.core.management import execute_from_command_line
|
||||||
except ImportError as exc:
|
except ImportError as exc:
|
||||||
|
4
apiserver/package.json
Normal file
4
apiserver/package.json
Normal file
@ -0,0 +1,4 @@
|
|||||||
|
{
|
||||||
|
"name": "plane-api",
|
||||||
|
"version": "0.17.0"
|
||||||
|
}
|
@ -1,3 +1,3 @@
|
|||||||
from .celery import app as celery_app
|
from .celery import app as celery_app
|
||||||
|
|
||||||
__all__ = ('celery_app',)
|
__all__ = ("celery_app",)
|
||||||
|
@ -2,4 +2,4 @@ from django.apps import AppConfig
|
|||||||
|
|
||||||
|
|
||||||
class AnalyticsConfig(AppConfig):
|
class AnalyticsConfig(AppConfig):
|
||||||
name = 'plane.analytics'
|
name = "plane.analytics"
|
||||||
|
50
apiserver/plane/api/middleware/api_authentication.py
Normal file
50
apiserver/plane/api/middleware/api_authentication.py
Normal file
@ -0,0 +1,50 @@
|
|||||||
|
# Django imports
|
||||||
|
from django.utils import timezone
|
||||||
|
from django.db.models import Q
|
||||||
|
|
||||||
|
# Third party imports
|
||||||
|
from rest_framework import authentication
|
||||||
|
from rest_framework.exceptions import AuthenticationFailed
|
||||||
|
|
||||||
|
# Module imports
|
||||||
|
from plane.db.models import APIToken
|
||||||
|
|
||||||
|
|
||||||
|
class APIKeyAuthentication(authentication.BaseAuthentication):
|
||||||
|
"""
|
||||||
|
Authentication with an API Key
|
||||||
|
"""
|
||||||
|
|
||||||
|
www_authenticate_realm = "api"
|
||||||
|
media_type = "application/json"
|
||||||
|
auth_header_name = "X-Api-Key"
|
||||||
|
|
||||||
|
def get_api_token(self, request):
|
||||||
|
return request.headers.get(self.auth_header_name)
|
||||||
|
|
||||||
|
def validate_api_token(self, token):
|
||||||
|
try:
|
||||||
|
api_token = APIToken.objects.get(
|
||||||
|
Q(
|
||||||
|
Q(expired_at__gt=timezone.now())
|
||||||
|
| Q(expired_at__isnull=True)
|
||||||
|
),
|
||||||
|
token=token,
|
||||||
|
is_active=True,
|
||||||
|
)
|
||||||
|
except APIToken.DoesNotExist:
|
||||||
|
raise AuthenticationFailed("Given API token is not valid")
|
||||||
|
|
||||||
|
# save api token last used
|
||||||
|
api_token.last_used = timezone.now()
|
||||||
|
api_token.save(update_fields=["last_used"])
|
||||||
|
return (api_token.user, api_token.token)
|
||||||
|
|
||||||
|
def authenticate(self, request):
|
||||||
|
token = self.get_api_token(request=request)
|
||||||
|
if not token:
|
||||||
|
return None
|
||||||
|
|
||||||
|
# Validate the API token
|
||||||
|
user, token = self.validate_api_token(token)
|
||||||
|
return user, token
|
@ -1,2 +0,0 @@
|
|||||||
from .workspace import WorkSpaceBasePermission, WorkSpaceAdminPermission, WorkspaceEntityPermission, WorkspaceViewerPermission
|
|
||||||
from .project import ProjectBasePermission, ProjectEntityPermission, ProjectMemberPermission, ProjectLitePermission
|
|
42
apiserver/plane/api/rate_limit.py
Normal file
42
apiserver/plane/api/rate_limit.py
Normal file
@ -0,0 +1,42 @@
|
|||||||
|
from rest_framework.throttling import SimpleRateThrottle
|
||||||
|
|
||||||
|
|
||||||
|
class ApiKeyRateThrottle(SimpleRateThrottle):
|
||||||
|
scope = "api_key"
|
||||||
|
rate = "60/minute"
|
||||||
|
|
||||||
|
def get_cache_key(self, request, view):
|
||||||
|
# Retrieve the API key from the request header
|
||||||
|
api_key = request.headers.get("X-Api-Key")
|
||||||
|
if not api_key:
|
||||||
|
return None # Allow the request if there's no API key
|
||||||
|
|
||||||
|
# Use the API key as part of the cache key
|
||||||
|
return f"{self.scope}:{api_key}"
|
||||||
|
|
||||||
|
def allow_request(self, request, view):
|
||||||
|
allowed = super().allow_request(request, view)
|
||||||
|
|
||||||
|
if allowed:
|
||||||
|
now = self.timer()
|
||||||
|
# Calculate the remaining limit and reset time
|
||||||
|
history = self.cache.get(self.key, [])
|
||||||
|
|
||||||
|
# Remove old histories
|
||||||
|
while history and history[-1] <= now - self.duration:
|
||||||
|
history.pop()
|
||||||
|
|
||||||
|
# Calculate the requests
|
||||||
|
num_requests = len(history)
|
||||||
|
|
||||||
|
# Check available requests
|
||||||
|
available = self.num_requests - num_requests
|
||||||
|
|
||||||
|
# Unix timestamp for when the rate limit will reset
|
||||||
|
reset_time = int(now + self.duration)
|
||||||
|
|
||||||
|
# Add headers
|
||||||
|
request.META["X-RateLimit-Remaining"] = max(0, available)
|
||||||
|
request.META["X-RateLimit-Reset"] = reset_time
|
||||||
|
|
||||||
|
return allowed
|
@ -1,102 +1,21 @@
|
|||||||
from .base import BaseSerializer
|
from .user import UserLiteSerializer
|
||||||
from .user import (
|
from .workspace import WorkspaceLiteSerializer
|
||||||
UserSerializer,
|
from .project import ProjectSerializer, ProjectLiteSerializer
|
||||||
UserLiteSerializer,
|
|
||||||
ChangePasswordSerializer,
|
|
||||||
ResetPasswordSerializer,
|
|
||||||
UserAdminLiteSerializer,
|
|
||||||
UserMeSerializer,
|
|
||||||
UserMeSettingsSerializer,
|
|
||||||
)
|
|
||||||
from .workspace import (
|
|
||||||
WorkSpaceSerializer,
|
|
||||||
WorkSpaceMemberSerializer,
|
|
||||||
TeamSerializer,
|
|
||||||
WorkSpaceMemberInviteSerializer,
|
|
||||||
WorkspaceLiteSerializer,
|
|
||||||
WorkspaceThemeSerializer,
|
|
||||||
WorkspaceMemberAdminSerializer,
|
|
||||||
WorkspaceMemberMeSerializer,
|
|
||||||
)
|
|
||||||
from .project import (
|
|
||||||
ProjectSerializer,
|
|
||||||
ProjectListSerializer,
|
|
||||||
ProjectDetailSerializer,
|
|
||||||
ProjectMemberSerializer,
|
|
||||||
ProjectMemberInviteSerializer,
|
|
||||||
ProjectIdentifierSerializer,
|
|
||||||
ProjectFavoriteSerializer,
|
|
||||||
ProjectLiteSerializer,
|
|
||||||
ProjectMemberLiteSerializer,
|
|
||||||
ProjectDeployBoardSerializer,
|
|
||||||
ProjectMemberAdminSerializer,
|
|
||||||
ProjectPublicMemberSerializer,
|
|
||||||
)
|
|
||||||
from .state import StateSerializer, StateLiteSerializer
|
|
||||||
from .view import GlobalViewSerializer, IssueViewSerializer, IssueViewFavoriteSerializer
|
|
||||||
from .cycle import (
|
|
||||||
CycleSerializer,
|
|
||||||
CycleIssueSerializer,
|
|
||||||
CycleFavoriteSerializer,
|
|
||||||
CycleWriteSerializer,
|
|
||||||
)
|
|
||||||
from .asset import FileAssetSerializer
|
|
||||||
from .issue import (
|
from .issue import (
|
||||||
IssueCreateSerializer,
|
|
||||||
IssueActivitySerializer,
|
|
||||||
IssueCommentSerializer,
|
|
||||||
IssuePropertySerializer,
|
|
||||||
IssueAssigneeSerializer,
|
|
||||||
LabelSerializer,
|
|
||||||
IssueSerializer,
|
IssueSerializer,
|
||||||
IssueFlatSerializer,
|
LabelSerializer,
|
||||||
IssueStateSerializer,
|
|
||||||
IssueLinkSerializer,
|
IssueLinkSerializer,
|
||||||
IssueLiteSerializer,
|
|
||||||
IssueAttachmentSerializer,
|
IssueAttachmentSerializer,
|
||||||
IssueSubscriberSerializer,
|
IssueCommentSerializer,
|
||||||
IssueReactionSerializer,
|
IssueAttachmentSerializer,
|
||||||
CommentReactionSerializer,
|
IssueActivitySerializer,
|
||||||
IssueVoteSerializer,
|
IssueExpandSerializer,
|
||||||
IssueRelationSerializer,
|
|
||||||
RelatedIssueSerializer,
|
|
||||||
IssuePublicSerializer,
|
|
||||||
)
|
)
|
||||||
|
from .state import StateLiteSerializer, StateSerializer
|
||||||
|
from .cycle import CycleSerializer, CycleIssueSerializer, CycleLiteSerializer
|
||||||
from .module import (
|
from .module import (
|
||||||
ModuleWriteSerializer,
|
|
||||||
ModuleSerializer,
|
ModuleSerializer,
|
||||||
ModuleIssueSerializer,
|
ModuleIssueSerializer,
|
||||||
ModuleLinkSerializer,
|
ModuleLiteSerializer,
|
||||||
ModuleFavoriteSerializer,
|
|
||||||
)
|
)
|
||||||
|
from .inbox import InboxIssueSerializer
|
||||||
from .api_token import APITokenSerializer
|
|
||||||
|
|
||||||
from .integration import (
|
|
||||||
IntegrationSerializer,
|
|
||||||
WorkspaceIntegrationSerializer,
|
|
||||||
GithubIssueSyncSerializer,
|
|
||||||
GithubRepositorySerializer,
|
|
||||||
GithubRepositorySyncSerializer,
|
|
||||||
GithubCommentSyncSerializer,
|
|
||||||
SlackProjectSyncSerializer,
|
|
||||||
)
|
|
||||||
|
|
||||||
from .importer import ImporterSerializer
|
|
||||||
|
|
||||||
from .page import PageSerializer, PageBlockSerializer, PageFavoriteSerializer
|
|
||||||
|
|
||||||
from .estimate import (
|
|
||||||
EstimateSerializer,
|
|
||||||
EstimatePointSerializer,
|
|
||||||
EstimateReadSerializer,
|
|
||||||
)
|
|
||||||
|
|
||||||
from .inbox import InboxSerializer, InboxIssueSerializer, IssueStateInboxSerializer
|
|
||||||
|
|
||||||
from .analytic import AnalyticViewSerializer
|
|
||||||
|
|
||||||
from .notification import NotificationSerializer
|
|
||||||
|
|
||||||
from .exporter import ExporterHistorySerializer
|
|
||||||
|
@ -1,14 +0,0 @@
|
|||||||
from .base import BaseSerializer
|
|
||||||
from plane.db.models import APIToken
|
|
||||||
|
|
||||||
|
|
||||||
class APITokenSerializer(BaseSerializer):
|
|
||||||
class Meta:
|
|
||||||
model = APIToken
|
|
||||||
fields = [
|
|
||||||
"label",
|
|
||||||
"user",
|
|
||||||
"user_type",
|
|
||||||
"workspace",
|
|
||||||
"created_at",
|
|
||||||
]
|
|
@ -1,22 +1,22 @@
|
|||||||
|
# Third party imports
|
||||||
from rest_framework import serializers
|
from rest_framework import serializers
|
||||||
|
|
||||||
|
|
||||||
class BaseSerializer(serializers.ModelSerializer):
|
class BaseSerializer(serializers.ModelSerializer):
|
||||||
id = serializers.PrimaryKeyRelatedField(read_only=True)
|
id = serializers.PrimaryKeyRelatedField(read_only=True)
|
||||||
|
|
||||||
class DynamicBaseSerializer(BaseSerializer):
|
|
||||||
|
|
||||||
def __init__(self, *args, **kwargs):
|
def __init__(self, *args, **kwargs):
|
||||||
# If 'fields' is provided in the arguments, remove it and store it separately.
|
# If 'fields' is provided in the arguments, remove it and store it separately.
|
||||||
# This is done so as not to pass this custom argument up to the superclass.
|
# This is done so as not to pass this custom argument up to the superclass.
|
||||||
fields = kwargs.pop("fields", None)
|
fields = kwargs.pop("fields", [])
|
||||||
|
self.expand = kwargs.pop("expand", []) or []
|
||||||
|
|
||||||
# Call the initialization of the superclass.
|
# Call the initialization of the superclass.
|
||||||
super().__init__(*args, **kwargs)
|
super().__init__(*args, **kwargs)
|
||||||
|
|
||||||
# If 'fields' was provided, filter the fields of the serializer accordingly.
|
# If 'fields' was provided, filter the fields of the serializer accordingly.
|
||||||
if fields is not None:
|
if fields:
|
||||||
self.fields = self._filter_fields(fields)
|
self.fields = self._filter_fields(fields=fields)
|
||||||
|
|
||||||
def _filter_fields(self, fields):
|
def _filter_fields(self, fields):
|
||||||
"""
|
"""
|
||||||
@ -52,7 +52,56 @@ class DynamicBaseSerializer(BaseSerializer):
|
|||||||
allowed = set(allowed)
|
allowed = set(allowed)
|
||||||
|
|
||||||
# Remove fields from the serializer that aren't in the 'allowed' list.
|
# Remove fields from the serializer that aren't in the 'allowed' list.
|
||||||
for field_name in (existing - allowed):
|
for field_name in existing - allowed:
|
||||||
self.fields.pop(field_name)
|
self.fields.pop(field_name)
|
||||||
|
|
||||||
return self.fields
|
return self.fields
|
||||||
|
|
||||||
|
def to_representation(self, instance):
|
||||||
|
response = super().to_representation(instance)
|
||||||
|
|
||||||
|
# Ensure 'expand' is iterable before processing
|
||||||
|
if self.expand:
|
||||||
|
for expand in self.expand:
|
||||||
|
if expand in self.fields:
|
||||||
|
# Import all the expandable serializers
|
||||||
|
from . import (
|
||||||
|
IssueSerializer,
|
||||||
|
ProjectLiteSerializer,
|
||||||
|
StateLiteSerializer,
|
||||||
|
UserLiteSerializer,
|
||||||
|
WorkspaceLiteSerializer,
|
||||||
|
)
|
||||||
|
|
||||||
|
# Expansion mapper
|
||||||
|
expansion = {
|
||||||
|
"user": UserLiteSerializer,
|
||||||
|
"workspace": WorkspaceLiteSerializer,
|
||||||
|
"project": ProjectLiteSerializer,
|
||||||
|
"default_assignee": UserLiteSerializer,
|
||||||
|
"project_lead": UserLiteSerializer,
|
||||||
|
"state": StateLiteSerializer,
|
||||||
|
"created_by": UserLiteSerializer,
|
||||||
|
"issue": IssueSerializer,
|
||||||
|
"actor": UserLiteSerializer,
|
||||||
|
"owned_by": UserLiteSerializer,
|
||||||
|
"members": UserLiteSerializer,
|
||||||
|
}
|
||||||
|
# Check if field in expansion then expand the field
|
||||||
|
if expand in expansion:
|
||||||
|
if isinstance(response.get(expand), list):
|
||||||
|
exp_serializer = expansion[expand](
|
||||||
|
getattr(instance, expand), many=True
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
exp_serializer = expansion[expand](
|
||||||
|
getattr(instance, expand)
|
||||||
|
)
|
||||||
|
response[expand] = exp_serializer.data
|
||||||
|
else:
|
||||||
|
# You might need to handle this case differently
|
||||||
|
response[expand] = getattr(
|
||||||
|
instance, f"{expand}_id", None
|
||||||
|
)
|
||||||
|
|
||||||
|
return response
|
||||||
|
@ -3,43 +3,19 @@ from rest_framework import serializers
|
|||||||
|
|
||||||
# Module imports
|
# Module imports
|
||||||
from .base import BaseSerializer
|
from .base import BaseSerializer
|
||||||
from .user import UserLiteSerializer
|
from plane.db.models import Cycle, CycleIssue
|
||||||
from .issue import IssueStateSerializer
|
|
||||||
from .workspace import WorkspaceLiteSerializer
|
|
||||||
from .project import ProjectLiteSerializer
|
|
||||||
from plane.db.models import Cycle, CycleIssue, CycleFavorite
|
|
||||||
|
|
||||||
|
|
||||||
class CycleWriteSerializer(BaseSerializer):
|
|
||||||
def validate(self, data):
|
|
||||||
if (
|
|
||||||
data.get("start_date", None) is not None
|
|
||||||
and data.get("end_date", None) is not None
|
|
||||||
and data.get("start_date", None) > data.get("end_date", None)
|
|
||||||
):
|
|
||||||
raise serializers.ValidationError("Start date cannot exceed end date")
|
|
||||||
return data
|
|
||||||
|
|
||||||
class Meta:
|
|
||||||
model = Cycle
|
|
||||||
fields = "__all__"
|
|
||||||
|
|
||||||
|
|
||||||
class CycleSerializer(BaseSerializer):
|
class CycleSerializer(BaseSerializer):
|
||||||
owned_by = UserLiteSerializer(read_only=True)
|
|
||||||
is_favorite = serializers.BooleanField(read_only=True)
|
|
||||||
total_issues = serializers.IntegerField(read_only=True)
|
total_issues = serializers.IntegerField(read_only=True)
|
||||||
cancelled_issues = serializers.IntegerField(read_only=True)
|
cancelled_issues = serializers.IntegerField(read_only=True)
|
||||||
completed_issues = serializers.IntegerField(read_only=True)
|
completed_issues = serializers.IntegerField(read_only=True)
|
||||||
started_issues = serializers.IntegerField(read_only=True)
|
started_issues = serializers.IntegerField(read_only=True)
|
||||||
unstarted_issues = serializers.IntegerField(read_only=True)
|
unstarted_issues = serializers.IntegerField(read_only=True)
|
||||||
backlog_issues = serializers.IntegerField(read_only=True)
|
backlog_issues = serializers.IntegerField(read_only=True)
|
||||||
assignees = serializers.SerializerMethodField(read_only=True)
|
|
||||||
total_estimates = serializers.IntegerField(read_only=True)
|
total_estimates = serializers.IntegerField(read_only=True)
|
||||||
completed_estimates = serializers.IntegerField(read_only=True)
|
completed_estimates = serializers.IntegerField(read_only=True)
|
||||||
started_estimates = serializers.IntegerField(read_only=True)
|
started_estimates = serializers.IntegerField(read_only=True)
|
||||||
workspace_detail = WorkspaceLiteSerializer(read_only=True, source="workspace")
|
|
||||||
project_detail = ProjectLiteSerializer(read_only=True, source="project")
|
|
||||||
|
|
||||||
def validate(self, data):
|
def validate(self, data):
|
||||||
if (
|
if (
|
||||||
@ -47,33 +23,20 @@ class CycleSerializer(BaseSerializer):
|
|||||||
and data.get("end_date", None) is not None
|
and data.get("end_date", None) is not None
|
||||||
and data.get("start_date", None) > data.get("end_date", None)
|
and data.get("start_date", None) > data.get("end_date", None)
|
||||||
):
|
):
|
||||||
raise serializers.ValidationError("Start date cannot exceed end date")
|
raise serializers.ValidationError(
|
||||||
|
"Start date cannot exceed end date"
|
||||||
|
)
|
||||||
return data
|
return data
|
||||||
|
|
||||||
def get_assignees(self, obj):
|
|
||||||
members = [
|
|
||||||
{
|
|
||||||
"avatar": assignee.avatar,
|
|
||||||
"display_name": assignee.display_name,
|
|
||||||
"id": assignee.id,
|
|
||||||
}
|
|
||||||
for issue_cycle in obj.issue_cycle.prefetch_related(
|
|
||||||
"issue__assignees"
|
|
||||||
).all()
|
|
||||||
for assignee in issue_cycle.issue.assignees.all()
|
|
||||||
]
|
|
||||||
# Use a set comprehension to return only the unique objects
|
|
||||||
unique_objects = {frozenset(item.items()) for item in members}
|
|
||||||
|
|
||||||
# Convert the set back to a list of dictionaries
|
|
||||||
unique_list = [dict(item) for item in unique_objects]
|
|
||||||
|
|
||||||
return unique_list
|
|
||||||
|
|
||||||
class Meta:
|
class Meta:
|
||||||
model = Cycle
|
model = Cycle
|
||||||
fields = "__all__"
|
fields = "__all__"
|
||||||
read_only_fields = [
|
read_only_fields = [
|
||||||
|
"id",
|
||||||
|
"created_at",
|
||||||
|
"updated_at",
|
||||||
|
"created_by",
|
||||||
|
"updated_by",
|
||||||
"workspace",
|
"workspace",
|
||||||
"project",
|
"project",
|
||||||
"owned_by",
|
"owned_by",
|
||||||
@ -81,7 +44,6 @@ class CycleSerializer(BaseSerializer):
|
|||||||
|
|
||||||
|
|
||||||
class CycleIssueSerializer(BaseSerializer):
|
class CycleIssueSerializer(BaseSerializer):
|
||||||
issue_detail = IssueStateSerializer(read_only=True, source="issue")
|
|
||||||
sub_issues_count = serializers.IntegerField(read_only=True)
|
sub_issues_count = serializers.IntegerField(read_only=True)
|
||||||
|
|
||||||
class Meta:
|
class Meta:
|
||||||
@ -94,14 +56,7 @@ class CycleIssueSerializer(BaseSerializer):
|
|||||||
]
|
]
|
||||||
|
|
||||||
|
|
||||||
class CycleFavoriteSerializer(BaseSerializer):
|
class CycleLiteSerializer(BaseSerializer):
|
||||||
cycle_detail = CycleSerializer(source="cycle", read_only=True)
|
|
||||||
|
|
||||||
class Meta:
|
class Meta:
|
||||||
model = CycleFavorite
|
model = Cycle
|
||||||
fields = "__all__"
|
fields = "__all__"
|
||||||
read_only_fields = [
|
|
||||||
"workspace",
|
|
||||||
"project",
|
|
||||||
"user",
|
|
||||||
]
|
|
||||||
|
@ -1,57 +1,19 @@
|
|||||||
# Third party frameworks
|
# Module improts
|
||||||
from rest_framework import serializers
|
|
||||||
|
|
||||||
# Module imports
|
|
||||||
from .base import BaseSerializer
|
from .base import BaseSerializer
|
||||||
from .issue import IssueFlatSerializer, LabelLiteSerializer
|
from plane.db.models import InboxIssue
|
||||||
from .project import ProjectLiteSerializer
|
|
||||||
from .state import StateLiteSerializer
|
|
||||||
from .user import UserLiteSerializer
|
|
||||||
from plane.db.models import Inbox, InboxIssue, Issue
|
|
||||||
|
|
||||||
|
|
||||||
class InboxSerializer(BaseSerializer):
|
|
||||||
project_detail = ProjectLiteSerializer(source="project", read_only=True)
|
|
||||||
pending_issue_count = serializers.IntegerField(read_only=True)
|
|
||||||
|
|
||||||
class Meta:
|
|
||||||
model = Inbox
|
|
||||||
fields = "__all__"
|
|
||||||
read_only_fields = [
|
|
||||||
"project",
|
|
||||||
"workspace",
|
|
||||||
]
|
|
||||||
|
|
||||||
|
|
||||||
class InboxIssueSerializer(BaseSerializer):
|
class InboxIssueSerializer(BaseSerializer):
|
||||||
issue_detail = IssueFlatSerializer(source="issue", read_only=True)
|
|
||||||
project_detail = ProjectLiteSerializer(source="project", read_only=True)
|
|
||||||
|
|
||||||
class Meta:
|
class Meta:
|
||||||
model = InboxIssue
|
model = InboxIssue
|
||||||
fields = "__all__"
|
fields = "__all__"
|
||||||
read_only_fields = [
|
read_only_fields = [
|
||||||
"project",
|
"id",
|
||||||
"workspace",
|
"workspace",
|
||||||
|
"project",
|
||||||
|
"issue",
|
||||||
|
"created_by",
|
||||||
|
"updated_by",
|
||||||
|
"created_at",
|
||||||
|
"updated_at",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
|
||||||
class InboxIssueLiteSerializer(BaseSerializer):
|
|
||||||
class Meta:
|
|
||||||
model = InboxIssue
|
|
||||||
fields = ["id", "status", "duplicate_to", "snoozed_till", "source"]
|
|
||||||
read_only_fields = fields
|
|
||||||
|
|
||||||
|
|
||||||
class IssueStateInboxSerializer(BaseSerializer):
|
|
||||||
state_detail = StateLiteSerializer(read_only=True, source="state")
|
|
||||||
project_detail = ProjectLiteSerializer(read_only=True, source="project")
|
|
||||||
label_details = LabelLiteSerializer(read_only=True, source="labels", many=True)
|
|
||||||
assignee_details = UserLiteSerializer(read_only=True, source="assignees", many=True)
|
|
||||||
sub_issues_count = serializers.IntegerField(read_only=True)
|
|
||||||
bridge_id = serializers.UUIDField(read_only=True)
|
|
||||||
issue_inbox = InboxIssueLiteSerializer(read_only=True, many=True)
|
|
||||||
|
|
||||||
class Meta:
|
|
||||||
model = Issue
|
|
||||||
fields = "__all__"
|
|
||||||
|
@ -1,8 +0,0 @@
|
|||||||
from .base import IntegrationSerializer, WorkspaceIntegrationSerializer
|
|
||||||
from .github import (
|
|
||||||
GithubRepositorySerializer,
|
|
||||||
GithubRepositorySyncSerializer,
|
|
||||||
GithubIssueSyncSerializer,
|
|
||||||
GithubCommentSyncSerializer,
|
|
||||||
)
|
|
||||||
from .slack import SlackProjectSyncSerializer
|
|
@ -1,20 +0,0 @@
|
|||||||
# Module imports
|
|
||||||
from plane.api.serializers import BaseSerializer
|
|
||||||
from plane.db.models import Integration, WorkspaceIntegration
|
|
||||||
|
|
||||||
|
|
||||||
class IntegrationSerializer(BaseSerializer):
|
|
||||||
class Meta:
|
|
||||||
model = Integration
|
|
||||||
fields = "__all__"
|
|
||||||
read_only_fields = [
|
|
||||||
"verified",
|
|
||||||
]
|
|
||||||
|
|
||||||
|
|
||||||
class WorkspaceIntegrationSerializer(BaseSerializer):
|
|
||||||
integration_detail = IntegrationSerializer(read_only=True, source="integration")
|
|
||||||
|
|
||||||
class Meta:
|
|
||||||
model = WorkspaceIntegration
|
|
||||||
fields = "__all__"
|
|
@ -1,45 +0,0 @@
|
|||||||
# Module imports
|
|
||||||
from plane.api.serializers import BaseSerializer
|
|
||||||
from plane.db.models import (
|
|
||||||
GithubIssueSync,
|
|
||||||
GithubRepository,
|
|
||||||
GithubRepositorySync,
|
|
||||||
GithubCommentSync,
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
class GithubRepositorySerializer(BaseSerializer):
|
|
||||||
class Meta:
|
|
||||||
model = GithubRepository
|
|
||||||
fields = "__all__"
|
|
||||||
|
|
||||||
|
|
||||||
class GithubRepositorySyncSerializer(BaseSerializer):
|
|
||||||
repo_detail = GithubRepositorySerializer(source="repository")
|
|
||||||
|
|
||||||
class Meta:
|
|
||||||
model = GithubRepositorySync
|
|
||||||
fields = "__all__"
|
|
||||||
|
|
||||||
|
|
||||||
class GithubIssueSyncSerializer(BaseSerializer):
|
|
||||||
class Meta:
|
|
||||||
model = GithubIssueSync
|
|
||||||
fields = "__all__"
|
|
||||||
read_only_fields = [
|
|
||||||
"project",
|
|
||||||
"workspace",
|
|
||||||
"repository_sync",
|
|
||||||
]
|
|
||||||
|
|
||||||
|
|
||||||
class GithubCommentSyncSerializer(BaseSerializer):
|
|
||||||
class Meta:
|
|
||||||
model = GithubCommentSync
|
|
||||||
fields = "__all__"
|
|
||||||
read_only_fields = [
|
|
||||||
"project",
|
|
||||||
"workspace",
|
|
||||||
"repository_sync",
|
|
||||||
"issue_sync",
|
|
||||||
]
|
|
@ -1,14 +0,0 @@
|
|||||||
# Module imports
|
|
||||||
from plane.api.serializers import BaseSerializer
|
|
||||||
from plane.db.models import SlackProjectSync
|
|
||||||
|
|
||||||
|
|
||||||
class SlackProjectSyncSerializer(BaseSerializer):
|
|
||||||
class Meta:
|
|
||||||
model = SlackProjectSync
|
|
||||||
fields = "__all__"
|
|
||||||
read_only_fields = [
|
|
||||||
"project",
|
|
||||||
"workspace",
|
|
||||||
"workspace_integration",
|
|
||||||
]
|
|
@ -1,95 +1,56 @@
|
|||||||
|
from django.core.exceptions import ValidationError
|
||||||
|
from django.core.validators import URLValidator
|
||||||
|
|
||||||
# Django imports
|
# Django imports
|
||||||
from django.utils import timezone
|
from django.utils import timezone
|
||||||
|
from lxml import html
|
||||||
|
|
||||||
# Third Party imports
|
# Third party imports
|
||||||
from rest_framework import serializers
|
from rest_framework import serializers
|
||||||
|
|
||||||
# Module imports
|
# Module imports
|
||||||
from .base import BaseSerializer
|
|
||||||
from .user import UserLiteSerializer
|
|
||||||
from .state import StateSerializer, StateLiteSerializer
|
|
||||||
from .project import ProjectLiteSerializer
|
|
||||||
from .workspace import WorkspaceLiteSerializer
|
|
||||||
from plane.db.models import (
|
from plane.db.models import (
|
||||||
User,
|
|
||||||
Issue,
|
Issue,
|
||||||
IssueActivity,
|
IssueActivity,
|
||||||
IssueComment,
|
|
||||||
IssueProperty,
|
|
||||||
IssueAssignee,
|
IssueAssignee,
|
||||||
IssueSubscriber,
|
|
||||||
IssueLabel,
|
|
||||||
Label,
|
|
||||||
CycleIssue,
|
|
||||||
Cycle,
|
|
||||||
Module,
|
|
||||||
ModuleIssue,
|
|
||||||
IssueLink,
|
|
||||||
IssueAttachment,
|
IssueAttachment,
|
||||||
IssueReaction,
|
IssueComment,
|
||||||
CommentReaction,
|
IssueLabel,
|
||||||
IssueVote,
|
IssueLink,
|
||||||
IssueRelation,
|
Label,
|
||||||
|
ProjectMember,
|
||||||
|
State,
|
||||||
|
User,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
from .base import BaseSerializer
|
||||||
class IssueFlatSerializer(BaseSerializer):
|
from .cycle import CycleLiteSerializer, CycleSerializer
|
||||||
## Contain only flat fields
|
from .module import ModuleLiteSerializer, ModuleSerializer
|
||||||
|
from .state import StateLiteSerializer
|
||||||
class Meta:
|
from .user import UserLiteSerializer
|
||||||
model = Issue
|
|
||||||
fields = [
|
|
||||||
"id",
|
|
||||||
"name",
|
|
||||||
"description",
|
|
||||||
"description_html",
|
|
||||||
"priority",
|
|
||||||
"start_date",
|
|
||||||
"target_date",
|
|
||||||
"sequence_id",
|
|
||||||
"sort_order",
|
|
||||||
"is_draft",
|
|
||||||
]
|
|
||||||
|
|
||||||
|
|
||||||
class IssueProjectLiteSerializer(BaseSerializer):
|
class IssueSerializer(BaseSerializer):
|
||||||
project_detail = ProjectLiteSerializer(source="project", read_only=True)
|
|
||||||
|
|
||||||
class Meta:
|
|
||||||
model = Issue
|
|
||||||
fields = [
|
|
||||||
"id",
|
|
||||||
"project_detail",
|
|
||||||
"name",
|
|
||||||
"sequence_id",
|
|
||||||
]
|
|
||||||
read_only_fields = fields
|
|
||||||
|
|
||||||
|
|
||||||
##TODO: Find a better way to write this serializer
|
|
||||||
## Find a better approach to save manytomany?
|
|
||||||
class IssueCreateSerializer(BaseSerializer):
|
|
||||||
state_detail = StateSerializer(read_only=True, source="state")
|
|
||||||
created_by_detail = UserLiteSerializer(read_only=True, source="created_by")
|
|
||||||
project_detail = ProjectLiteSerializer(read_only=True, source="project")
|
|
||||||
workspace_detail = WorkspaceLiteSerializer(read_only=True, source="workspace")
|
|
||||||
|
|
||||||
assignees = serializers.ListField(
|
assignees = serializers.ListField(
|
||||||
child=serializers.PrimaryKeyRelatedField(queryset=User.objects.all()),
|
child=serializers.PrimaryKeyRelatedField(
|
||||||
|
queryset=User.objects.values_list("id", flat=True)
|
||||||
|
),
|
||||||
write_only=True,
|
write_only=True,
|
||||||
required=False,
|
required=False,
|
||||||
)
|
)
|
||||||
|
|
||||||
labels = serializers.ListField(
|
labels = serializers.ListField(
|
||||||
child=serializers.PrimaryKeyRelatedField(queryset=Label.objects.all()),
|
child=serializers.PrimaryKeyRelatedField(
|
||||||
|
queryset=Label.objects.values_list("id", flat=True)
|
||||||
|
),
|
||||||
write_only=True,
|
write_only=True,
|
||||||
required=False,
|
required=False,
|
||||||
)
|
)
|
||||||
|
|
||||||
class Meta:
|
class Meta:
|
||||||
model = Issue
|
model = Issue
|
||||||
fields = "__all__"
|
|
||||||
read_only_fields = [
|
read_only_fields = [
|
||||||
|
"id",
|
||||||
"workspace",
|
"workspace",
|
||||||
"project",
|
"project",
|
||||||
"created_by",
|
"created_by",
|
||||||
@ -97,12 +58,10 @@ class IssueCreateSerializer(BaseSerializer):
|
|||||||
"created_at",
|
"created_at",
|
||||||
"updated_at",
|
"updated_at",
|
||||||
]
|
]
|
||||||
|
exclude = [
|
||||||
def to_representation(self, instance):
|
"description",
|
||||||
data = super().to_representation(instance)
|
"description_stripped",
|
||||||
data['assignees'] = [str(assignee.id) for assignee in instance.assignees.all()]
|
]
|
||||||
data['labels'] = [str(label.id) for label in instance.labels.all()]
|
|
||||||
return data
|
|
||||||
|
|
||||||
def validate(self, data):
|
def validate(self, data):
|
||||||
if (
|
if (
|
||||||
@ -110,7 +69,58 @@ class IssueCreateSerializer(BaseSerializer):
|
|||||||
and data.get("target_date", None) is not None
|
and data.get("target_date", None) is not None
|
||||||
and data.get("start_date", None) > data.get("target_date", None)
|
and data.get("start_date", None) > data.get("target_date", None)
|
||||||
):
|
):
|
||||||
raise serializers.ValidationError("Start date cannot exceed target date")
|
raise serializers.ValidationError(
|
||||||
|
"Start date cannot exceed target date"
|
||||||
|
)
|
||||||
|
|
||||||
|
try:
|
||||||
|
if data.get("description_html", None) is not None:
|
||||||
|
parsed = html.fromstring(data["description_html"])
|
||||||
|
parsed_str = html.tostring(parsed, encoding="unicode")
|
||||||
|
data["description_html"] = parsed_str
|
||||||
|
|
||||||
|
except Exception:
|
||||||
|
raise serializers.ValidationError("Invalid HTML passed")
|
||||||
|
|
||||||
|
# Validate assignees are from project
|
||||||
|
if data.get("assignees", []):
|
||||||
|
data["assignees"] = ProjectMember.objects.filter(
|
||||||
|
project_id=self.context.get("project_id"),
|
||||||
|
is_active=True,
|
||||||
|
member_id__in=data["assignees"],
|
||||||
|
).values_list("member_id", flat=True)
|
||||||
|
|
||||||
|
# Validate labels are from project
|
||||||
|
if data.get("labels", []):
|
||||||
|
data["labels"] = Label.objects.filter(
|
||||||
|
project_id=self.context.get("project_id"),
|
||||||
|
id__in=data["labels"],
|
||||||
|
).values_list("id", flat=True)
|
||||||
|
|
||||||
|
# Check state is from the project only else raise validation error
|
||||||
|
if (
|
||||||
|
data.get("state")
|
||||||
|
and not State.objects.filter(
|
||||||
|
project_id=self.context.get("project_id"),
|
||||||
|
pk=data.get("state").id,
|
||||||
|
).exists()
|
||||||
|
):
|
||||||
|
raise serializers.ValidationError(
|
||||||
|
"State is not valid please pass a valid state_id"
|
||||||
|
)
|
||||||
|
|
||||||
|
# Check parent issue is from workspace as it can be cross workspace
|
||||||
|
if (
|
||||||
|
data.get("parent")
|
||||||
|
and not Issue.objects.filter(
|
||||||
|
workspace_id=self.context.get("workspace_id"),
|
||||||
|
pk=data.get("parent").id,
|
||||||
|
).exists()
|
||||||
|
):
|
||||||
|
raise serializers.ValidationError(
|
||||||
|
"Parent is not valid issue_id please pass a valid issue_id"
|
||||||
|
)
|
||||||
|
|
||||||
return data
|
return data
|
||||||
|
|
||||||
def create(self, validated_data):
|
def create(self, validated_data):
|
||||||
@ -131,14 +141,14 @@ class IssueCreateSerializer(BaseSerializer):
|
|||||||
IssueAssignee.objects.bulk_create(
|
IssueAssignee.objects.bulk_create(
|
||||||
[
|
[
|
||||||
IssueAssignee(
|
IssueAssignee(
|
||||||
assignee=user,
|
assignee_id=assignee_id,
|
||||||
issue=issue,
|
issue=issue,
|
||||||
project_id=project_id,
|
project_id=project_id,
|
||||||
workspace_id=workspace_id,
|
workspace_id=workspace_id,
|
||||||
created_by_id=created_by_id,
|
created_by_id=created_by_id,
|
||||||
updated_by_id=updated_by_id,
|
updated_by_id=updated_by_id,
|
||||||
)
|
)
|
||||||
for user in assignees
|
for assignee_id in assignees
|
||||||
],
|
],
|
||||||
batch_size=10,
|
batch_size=10,
|
||||||
)
|
)
|
||||||
@ -158,14 +168,14 @@ class IssueCreateSerializer(BaseSerializer):
|
|||||||
IssueLabel.objects.bulk_create(
|
IssueLabel.objects.bulk_create(
|
||||||
[
|
[
|
||||||
IssueLabel(
|
IssueLabel(
|
||||||
label=label,
|
label_id=label_id,
|
||||||
issue=issue,
|
issue=issue,
|
||||||
project_id=project_id,
|
project_id=project_id,
|
||||||
workspace_id=workspace_id,
|
workspace_id=workspace_id,
|
||||||
created_by_id=created_by_id,
|
created_by_id=created_by_id,
|
||||||
updated_by_id=updated_by_id,
|
updated_by_id=updated_by_id,
|
||||||
)
|
)
|
||||||
for label in labels
|
for label_id in labels
|
||||||
],
|
],
|
||||||
batch_size=10,
|
batch_size=10,
|
||||||
)
|
)
|
||||||
@ -187,14 +197,14 @@ class IssueCreateSerializer(BaseSerializer):
|
|||||||
IssueAssignee.objects.bulk_create(
|
IssueAssignee.objects.bulk_create(
|
||||||
[
|
[
|
||||||
IssueAssignee(
|
IssueAssignee(
|
||||||
assignee=user,
|
assignee_id=assignee_id,
|
||||||
issue=instance,
|
issue=instance,
|
||||||
project_id=project_id,
|
project_id=project_id,
|
||||||
workspace_id=workspace_id,
|
workspace_id=workspace_id,
|
||||||
created_by_id=created_by_id,
|
created_by_id=created_by_id,
|
||||||
updated_by_id=updated_by_id,
|
updated_by_id=updated_by_id,
|
||||||
)
|
)
|
||||||
for user in assignees
|
for assignee_id in assignees
|
||||||
],
|
],
|
||||||
batch_size=10,
|
batch_size=10,
|
||||||
)
|
)
|
||||||
@ -204,14 +214,14 @@ class IssueCreateSerializer(BaseSerializer):
|
|||||||
IssueLabel.objects.bulk_create(
|
IssueLabel.objects.bulk_create(
|
||||||
[
|
[
|
||||||
IssueLabel(
|
IssueLabel(
|
||||||
label=label,
|
label_id=label_id,
|
||||||
issue=instance,
|
issue=instance,
|
||||||
project_id=project_id,
|
project_id=project_id,
|
||||||
workspace_id=workspace_id,
|
workspace_id=workspace_id,
|
||||||
created_by_id=created_by_id,
|
created_by_id=created_by_id,
|
||||||
updated_by_id=updated_by_id,
|
updated_by_id=updated_by_id,
|
||||||
)
|
)
|
||||||
for label in labels
|
for label_id in labels
|
||||||
],
|
],
|
||||||
batch_size=10,
|
batch_size=10,
|
||||||
)
|
)
|
||||||
@ -220,39 +230,171 @@ class IssueCreateSerializer(BaseSerializer):
|
|||||||
instance.updated_at = timezone.now()
|
instance.updated_at = timezone.now()
|
||||||
return super().update(instance, validated_data)
|
return super().update(instance, validated_data)
|
||||||
|
|
||||||
|
def to_representation(self, instance):
|
||||||
|
data = super().to_representation(instance)
|
||||||
|
if "assignees" in self.fields:
|
||||||
|
if "assignees" in self.expand:
|
||||||
|
from .user import UserLiteSerializer
|
||||||
|
|
||||||
class IssueActivitySerializer(BaseSerializer):
|
data["assignees"] = UserLiteSerializer(
|
||||||
actor_detail = UserLiteSerializer(read_only=True, source="actor")
|
instance.assignees.all(), many=True
|
||||||
issue_detail = IssueFlatSerializer(read_only=True, source="issue")
|
).data
|
||||||
project_detail = ProjectLiteSerializer(read_only=True, source="project")
|
else:
|
||||||
|
data["assignees"] = [
|
||||||
|
str(assignee.id) for assignee in instance.assignees.all()
|
||||||
|
]
|
||||||
|
if "labels" in self.fields:
|
||||||
|
if "labels" in self.expand:
|
||||||
|
data["labels"] = LabelSerializer(
|
||||||
|
instance.labels.all(), many=True
|
||||||
|
).data
|
||||||
|
else:
|
||||||
|
data["labels"] = [
|
||||||
|
str(label.id) for label in instance.labels.all()
|
||||||
|
]
|
||||||
|
|
||||||
class Meta:
|
return data
|
||||||
model = IssueActivity
|
|
||||||
fields = "__all__"
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
class IssuePropertySerializer(BaseSerializer):
|
|
||||||
class Meta:
|
|
||||||
model = IssueProperty
|
|
||||||
fields = "__all__"
|
|
||||||
read_only_fields = [
|
|
||||||
"user",
|
|
||||||
"workspace",
|
|
||||||
"project",
|
|
||||||
]
|
|
||||||
|
|
||||||
|
|
||||||
class LabelSerializer(BaseSerializer):
|
class LabelSerializer(BaseSerializer):
|
||||||
workspace_detail = WorkspaceLiteSerializer(source="workspace", read_only=True)
|
|
||||||
project_detail = ProjectLiteSerializer(source="project", read_only=True)
|
|
||||||
|
|
||||||
class Meta:
|
class Meta:
|
||||||
model = Label
|
model = Label
|
||||||
fields = "__all__"
|
fields = "__all__"
|
||||||
read_only_fields = [
|
read_only_fields = [
|
||||||
|
"id",
|
||||||
"workspace",
|
"workspace",
|
||||||
"project",
|
"project",
|
||||||
|
"created_by",
|
||||||
|
"updated_by",
|
||||||
|
"created_at",
|
||||||
|
"updated_at",
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
|
class IssueLinkSerializer(BaseSerializer):
|
||||||
|
class Meta:
|
||||||
|
model = IssueLink
|
||||||
|
fields = "__all__"
|
||||||
|
read_only_fields = [
|
||||||
|
"id",
|
||||||
|
"workspace",
|
||||||
|
"project",
|
||||||
|
"issue",
|
||||||
|
"created_by",
|
||||||
|
"updated_by",
|
||||||
|
"created_at",
|
||||||
|
"updated_at",
|
||||||
|
]
|
||||||
|
|
||||||
|
def validate_url(self, value):
|
||||||
|
# Check URL format
|
||||||
|
validate_url = URLValidator()
|
||||||
|
try:
|
||||||
|
validate_url(value)
|
||||||
|
except ValidationError:
|
||||||
|
raise serializers.ValidationError("Invalid URL format.")
|
||||||
|
|
||||||
|
# Check URL scheme
|
||||||
|
if not value.startswith(("http://", "https://")):
|
||||||
|
raise serializers.ValidationError("Invalid URL scheme.")
|
||||||
|
|
||||||
|
return value
|
||||||
|
|
||||||
|
# Validation if url already exists
|
||||||
|
def create(self, validated_data):
|
||||||
|
if IssueLink.objects.filter(
|
||||||
|
url=validated_data.get("url"),
|
||||||
|
issue_id=validated_data.get("issue_id"),
|
||||||
|
).exists():
|
||||||
|
raise serializers.ValidationError(
|
||||||
|
{"error": "URL already exists for this Issue"}
|
||||||
|
)
|
||||||
|
return IssueLink.objects.create(**validated_data)
|
||||||
|
|
||||||
|
def update(self, instance, validated_data):
|
||||||
|
if IssueLink.objects.filter(
|
||||||
|
url=validated_data.get("url"),
|
||||||
|
issue_id=instance.issue_id,
|
||||||
|
).exists():
|
||||||
|
raise serializers.ValidationError(
|
||||||
|
{"error": "URL already exists for this Issue"}
|
||||||
|
)
|
||||||
|
|
||||||
|
return super().update(instance, validated_data)
|
||||||
|
|
||||||
|
|
||||||
|
class IssueAttachmentSerializer(BaseSerializer):
|
||||||
|
class Meta:
|
||||||
|
model = IssueAttachment
|
||||||
|
fields = "__all__"
|
||||||
|
read_only_fields = [
|
||||||
|
"id",
|
||||||
|
"workspace",
|
||||||
|
"project",
|
||||||
|
"issue",
|
||||||
|
"created_by",
|
||||||
|
"updated_by",
|
||||||
|
"created_at",
|
||||||
|
"updated_at",
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
|
class IssueCommentSerializer(BaseSerializer):
|
||||||
|
is_member = serializers.BooleanField(read_only=True)
|
||||||
|
|
||||||
|
class Meta:
|
||||||
|
model = IssueComment
|
||||||
|
read_only_fields = [
|
||||||
|
"id",
|
||||||
|
"workspace",
|
||||||
|
"project",
|
||||||
|
"issue",
|
||||||
|
"created_by",
|
||||||
|
"updated_by",
|
||||||
|
"created_at",
|
||||||
|
"updated_at",
|
||||||
|
]
|
||||||
|
exclude = [
|
||||||
|
"comment_stripped",
|
||||||
|
"comment_json",
|
||||||
|
]
|
||||||
|
|
||||||
|
def validate(self, data):
|
||||||
|
try:
|
||||||
|
if data.get("comment_html", None) is not None:
|
||||||
|
parsed = html.fromstring(data["comment_html"])
|
||||||
|
parsed_str = html.tostring(parsed, encoding="unicode")
|
||||||
|
data["comment_html"] = parsed_str
|
||||||
|
|
||||||
|
except Exception:
|
||||||
|
raise serializers.ValidationError("Invalid HTML passed")
|
||||||
|
return data
|
||||||
|
|
||||||
|
|
||||||
|
class IssueActivitySerializer(BaseSerializer):
|
||||||
|
class Meta:
|
||||||
|
model = IssueActivity
|
||||||
|
exclude = [
|
||||||
|
"created_by",
|
||||||
|
"updated_by",
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
|
class CycleIssueSerializer(BaseSerializer):
|
||||||
|
cycle = CycleSerializer(read_only=True)
|
||||||
|
|
||||||
|
class Meta:
|
||||||
|
fields = [
|
||||||
|
"cycle",
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
|
class ModuleIssueSerializer(BaseSerializer):
|
||||||
|
module = ModuleSerializer(read_only=True)
|
||||||
|
|
||||||
|
class Meta:
|
||||||
|
fields = [
|
||||||
|
"module",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
|
||||||
@ -266,279 +408,18 @@ class LabelLiteSerializer(BaseSerializer):
|
|||||||
]
|
]
|
||||||
|
|
||||||
|
|
||||||
class IssueLabelSerializer(BaseSerializer):
|
class IssueExpandSerializer(BaseSerializer):
|
||||||
|
cycle = CycleLiteSerializer(source="issue_cycle.cycle", read_only=True)
|
||||||
|
module = ModuleLiteSerializer(source="issue_module.module", read_only=True)
|
||||||
|
labels = LabelLiteSerializer(read_only=True, many=True)
|
||||||
|
assignees = UserLiteSerializer(read_only=True, many=True)
|
||||||
|
state = StateLiteSerializer(read_only=True)
|
||||||
|
|
||||||
class Meta:
|
class Meta:
|
||||||
model = IssueLabel
|
model = Issue
|
||||||
fields = "__all__"
|
fields = "__all__"
|
||||||
read_only_fields = [
|
read_only_fields = [
|
||||||
"workspace",
|
|
||||||
"project",
|
|
||||||
]
|
|
||||||
|
|
||||||
|
|
||||||
class IssueRelationSerializer(BaseSerializer):
|
|
||||||
issue_detail = IssueProjectLiteSerializer(read_only=True, source="related_issue")
|
|
||||||
|
|
||||||
class Meta:
|
|
||||||
model = IssueRelation
|
|
||||||
fields = [
|
|
||||||
"issue_detail",
|
|
||||||
"relation_type",
|
|
||||||
"related_issue",
|
|
||||||
"issue",
|
|
||||||
"id"
|
|
||||||
]
|
|
||||||
read_only_fields = [
|
|
||||||
"workspace",
|
|
||||||
"project",
|
|
||||||
]
|
|
||||||
|
|
||||||
class RelatedIssueSerializer(BaseSerializer):
|
|
||||||
issue_detail = IssueProjectLiteSerializer(read_only=True, source="issue")
|
|
||||||
|
|
||||||
class Meta:
|
|
||||||
model = IssueRelation
|
|
||||||
fields = [
|
|
||||||
"issue_detail",
|
|
||||||
"relation_type",
|
|
||||||
"related_issue",
|
|
||||||
"issue",
|
|
||||||
"id"
|
|
||||||
]
|
|
||||||
read_only_fields = [
|
|
||||||
"workspace",
|
|
||||||
"project",
|
|
||||||
]
|
|
||||||
|
|
||||||
|
|
||||||
class IssueAssigneeSerializer(BaseSerializer):
|
|
||||||
assignee_details = UserLiteSerializer(read_only=True, source="assignee")
|
|
||||||
|
|
||||||
class Meta:
|
|
||||||
model = IssueAssignee
|
|
||||||
fields = "__all__"
|
|
||||||
|
|
||||||
|
|
||||||
class CycleBaseSerializer(BaseSerializer):
|
|
||||||
class Meta:
|
|
||||||
model = Cycle
|
|
||||||
fields = "__all__"
|
|
||||||
read_only_fields = [
|
|
||||||
"workspace",
|
|
||||||
"project",
|
|
||||||
"created_by",
|
|
||||||
"updated_by",
|
|
||||||
"created_at",
|
|
||||||
"updated_at",
|
|
||||||
]
|
|
||||||
|
|
||||||
|
|
||||||
class IssueCycleDetailSerializer(BaseSerializer):
|
|
||||||
cycle_detail = CycleBaseSerializer(read_only=True, source="cycle")
|
|
||||||
|
|
||||||
class Meta:
|
|
||||||
model = CycleIssue
|
|
||||||
fields = "__all__"
|
|
||||||
read_only_fields = [
|
|
||||||
"workspace",
|
|
||||||
"project",
|
|
||||||
"created_by",
|
|
||||||
"updated_by",
|
|
||||||
"created_at",
|
|
||||||
"updated_at",
|
|
||||||
]
|
|
||||||
|
|
||||||
|
|
||||||
class ModuleBaseSerializer(BaseSerializer):
|
|
||||||
class Meta:
|
|
||||||
model = Module
|
|
||||||
fields = "__all__"
|
|
||||||
read_only_fields = [
|
|
||||||
"workspace",
|
|
||||||
"project",
|
|
||||||
"created_by",
|
|
||||||
"updated_by",
|
|
||||||
"created_at",
|
|
||||||
"updated_at",
|
|
||||||
]
|
|
||||||
|
|
||||||
|
|
||||||
class IssueModuleDetailSerializer(BaseSerializer):
|
|
||||||
module_detail = ModuleBaseSerializer(read_only=True, source="module")
|
|
||||||
|
|
||||||
class Meta:
|
|
||||||
model = ModuleIssue
|
|
||||||
fields = "__all__"
|
|
||||||
read_only_fields = [
|
|
||||||
"workspace",
|
|
||||||
"project",
|
|
||||||
"created_by",
|
|
||||||
"updated_by",
|
|
||||||
"created_at",
|
|
||||||
"updated_at",
|
|
||||||
]
|
|
||||||
|
|
||||||
|
|
||||||
class IssueLinkSerializer(BaseSerializer):
|
|
||||||
created_by_detail = UserLiteSerializer(read_only=True, source="created_by")
|
|
||||||
|
|
||||||
class Meta:
|
|
||||||
model = IssueLink
|
|
||||||
fields = "__all__"
|
|
||||||
read_only_fields = [
|
|
||||||
"workspace",
|
|
||||||
"project",
|
|
||||||
"created_by",
|
|
||||||
"updated_by",
|
|
||||||
"created_at",
|
|
||||||
"updated_at",
|
|
||||||
"issue",
|
|
||||||
]
|
|
||||||
|
|
||||||
# Validation if url already exists
|
|
||||||
def create(self, validated_data):
|
|
||||||
if IssueLink.objects.filter(
|
|
||||||
url=validated_data.get("url"), issue_id=validated_data.get("issue_id")
|
|
||||||
).exists():
|
|
||||||
raise serializers.ValidationError(
|
|
||||||
{"error": "URL already exists for this Issue"}
|
|
||||||
)
|
|
||||||
return IssueLink.objects.create(**validated_data)
|
|
||||||
|
|
||||||
|
|
||||||
class IssueAttachmentSerializer(BaseSerializer):
|
|
||||||
class Meta:
|
|
||||||
model = IssueAttachment
|
|
||||||
fields = "__all__"
|
|
||||||
read_only_fields = [
|
|
||||||
"created_by",
|
|
||||||
"updated_by",
|
|
||||||
"created_at",
|
|
||||||
"updated_at",
|
|
||||||
"workspace",
|
|
||||||
"project",
|
|
||||||
"issue",
|
|
||||||
]
|
|
||||||
|
|
||||||
|
|
||||||
class IssueReactionSerializer(BaseSerializer):
|
|
||||||
|
|
||||||
actor_detail = UserLiteSerializer(read_only=True, source="actor")
|
|
||||||
|
|
||||||
class Meta:
|
|
||||||
model = IssueReaction
|
|
||||||
fields = "__all__"
|
|
||||||
read_only_fields = [
|
|
||||||
"workspace",
|
|
||||||
"project",
|
|
||||||
"issue",
|
|
||||||
"actor",
|
|
||||||
]
|
|
||||||
|
|
||||||
|
|
||||||
class CommentReactionLiteSerializer(BaseSerializer):
|
|
||||||
actor_detail = UserLiteSerializer(read_only=True, source="actor")
|
|
||||||
|
|
||||||
class Meta:
|
|
||||||
model = CommentReaction
|
|
||||||
fields = [
|
|
||||||
"id",
|
"id",
|
||||||
"reaction",
|
|
||||||
"comment",
|
|
||||||
"actor_detail",
|
|
||||||
]
|
|
||||||
|
|
||||||
|
|
||||||
class CommentReactionSerializer(BaseSerializer):
|
|
||||||
class Meta:
|
|
||||||
model = CommentReaction
|
|
||||||
fields = "__all__"
|
|
||||||
read_only_fields = ["workspace", "project", "comment", "actor"]
|
|
||||||
|
|
||||||
|
|
||||||
class IssueVoteSerializer(BaseSerializer):
|
|
||||||
|
|
||||||
actor_detail = UserLiteSerializer(read_only=True, source="actor")
|
|
||||||
|
|
||||||
class Meta:
|
|
||||||
model = IssueVote
|
|
||||||
fields = ["issue", "vote", "workspace", "project", "actor", "actor_detail"]
|
|
||||||
read_only_fields = fields
|
|
||||||
|
|
||||||
|
|
||||||
class IssueCommentSerializer(BaseSerializer):
|
|
||||||
actor_detail = UserLiteSerializer(read_only=True, source="actor")
|
|
||||||
issue_detail = IssueFlatSerializer(read_only=True, source="issue")
|
|
||||||
project_detail = ProjectLiteSerializer(read_only=True, source="project")
|
|
||||||
workspace_detail = WorkspaceLiteSerializer(read_only=True, source="workspace")
|
|
||||||
comment_reactions = CommentReactionLiteSerializer(read_only=True, many=True)
|
|
||||||
is_member = serializers.BooleanField(read_only=True)
|
|
||||||
|
|
||||||
class Meta:
|
|
||||||
model = IssueComment
|
|
||||||
fields = "__all__"
|
|
||||||
read_only_fields = [
|
|
||||||
"workspace",
|
|
||||||
"project",
|
|
||||||
"issue",
|
|
||||||
"created_by",
|
|
||||||
"updated_by",
|
|
||||||
"created_at",
|
|
||||||
"updated_at",
|
|
||||||
]
|
|
||||||
|
|
||||||
|
|
||||||
class IssueStateFlatSerializer(BaseSerializer):
|
|
||||||
state_detail = StateLiteSerializer(read_only=True, source="state")
|
|
||||||
project_detail = ProjectLiteSerializer(read_only=True, source="project")
|
|
||||||
|
|
||||||
class Meta:
|
|
||||||
model = Issue
|
|
||||||
fields = [
|
|
||||||
"id",
|
|
||||||
"sequence_id",
|
|
||||||
"name",
|
|
||||||
"state_detail",
|
|
||||||
"project_detail",
|
|
||||||
]
|
|
||||||
|
|
||||||
|
|
||||||
# Issue Serializer with state details
|
|
||||||
class IssueStateSerializer(BaseSerializer):
|
|
||||||
label_details = LabelLiteSerializer(read_only=True, source="labels", many=True)
|
|
||||||
state_detail = StateLiteSerializer(read_only=True, source="state")
|
|
||||||
project_detail = ProjectLiteSerializer(read_only=True, source="project")
|
|
||||||
assignee_details = UserLiteSerializer(read_only=True, source="assignees", many=True)
|
|
||||||
sub_issues_count = serializers.IntegerField(read_only=True)
|
|
||||||
bridge_id = serializers.UUIDField(read_only=True)
|
|
||||||
attachment_count = serializers.IntegerField(read_only=True)
|
|
||||||
link_count = serializers.IntegerField(read_only=True)
|
|
||||||
|
|
||||||
class Meta:
|
|
||||||
model = Issue
|
|
||||||
fields = "__all__"
|
|
||||||
|
|
||||||
|
|
||||||
class IssueSerializer(BaseSerializer):
|
|
||||||
project_detail = ProjectLiteSerializer(read_only=True, source="project")
|
|
||||||
state_detail = StateSerializer(read_only=True, source="state")
|
|
||||||
parent_detail = IssueStateFlatSerializer(read_only=True, source="parent")
|
|
||||||
label_details = LabelSerializer(read_only=True, source="labels", many=True)
|
|
||||||
assignee_details = UserLiteSerializer(read_only=True, source="assignees", many=True)
|
|
||||||
related_issues = IssueRelationSerializer(read_only=True, source="issue_relation", many=True)
|
|
||||||
issue_relations = RelatedIssueSerializer(read_only=True, source="issue_related", many=True)
|
|
||||||
issue_cycle = IssueCycleDetailSerializer(read_only=True)
|
|
||||||
issue_module = IssueModuleDetailSerializer(read_only=True)
|
|
||||||
issue_link = IssueLinkSerializer(read_only=True, many=True)
|
|
||||||
issue_attachment = IssueAttachmentSerializer(read_only=True, many=True)
|
|
||||||
sub_issues_count = serializers.IntegerField(read_only=True)
|
|
||||||
issue_reactions = IssueReactionSerializer(read_only=True, many=True)
|
|
||||||
|
|
||||||
class Meta:
|
|
||||||
model = Issue
|
|
||||||
fields = "__all__"
|
|
||||||
read_only_fields = [
|
|
||||||
"workspace",
|
"workspace",
|
||||||
"project",
|
"project",
|
||||||
"created_by",
|
"created_by",
|
||||||
@ -546,70 +427,3 @@ class IssueSerializer(BaseSerializer):
|
|||||||
"created_at",
|
"created_at",
|
||||||
"updated_at",
|
"updated_at",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
|
||||||
class IssueLiteSerializer(BaseSerializer):
|
|
||||||
workspace_detail = WorkspaceLiteSerializer(read_only=True, source="workspace")
|
|
||||||
project_detail = ProjectLiteSerializer(read_only=True, source="project")
|
|
||||||
state_detail = StateLiteSerializer(read_only=True, source="state")
|
|
||||||
label_details = LabelLiteSerializer(read_only=True, source="labels", many=True)
|
|
||||||
assignee_details = UserLiteSerializer(read_only=True, source="assignees", many=True)
|
|
||||||
sub_issues_count = serializers.IntegerField(read_only=True)
|
|
||||||
cycle_id = serializers.UUIDField(read_only=True)
|
|
||||||
module_id = serializers.UUIDField(read_only=True)
|
|
||||||
attachment_count = serializers.IntegerField(read_only=True)
|
|
||||||
link_count = serializers.IntegerField(read_only=True)
|
|
||||||
issue_reactions = IssueReactionSerializer(read_only=True, many=True)
|
|
||||||
|
|
||||||
class Meta:
|
|
||||||
model = Issue
|
|
||||||
fields = "__all__"
|
|
||||||
read_only_fields = [
|
|
||||||
"start_date",
|
|
||||||
"target_date",
|
|
||||||
"completed_at",
|
|
||||||
"workspace",
|
|
||||||
"project",
|
|
||||||
"created_by",
|
|
||||||
"updated_by",
|
|
||||||
"created_at",
|
|
||||||
"updated_at",
|
|
||||||
]
|
|
||||||
|
|
||||||
|
|
||||||
class IssuePublicSerializer(BaseSerializer):
|
|
||||||
project_detail = ProjectLiteSerializer(read_only=True, source="project")
|
|
||||||
state_detail = StateLiteSerializer(read_only=True, source="state")
|
|
||||||
reactions = IssueReactionSerializer(read_only=True, many=True, source="issue_reactions")
|
|
||||||
votes = IssueVoteSerializer(read_only=True, many=True)
|
|
||||||
|
|
||||||
class Meta:
|
|
||||||
model = Issue
|
|
||||||
fields = [
|
|
||||||
"id",
|
|
||||||
"name",
|
|
||||||
"description_html",
|
|
||||||
"sequence_id",
|
|
||||||
"state",
|
|
||||||
"state_detail",
|
|
||||||
"project",
|
|
||||||
"project_detail",
|
|
||||||
"workspace",
|
|
||||||
"priority",
|
|
||||||
"target_date",
|
|
||||||
"reactions",
|
|
||||||
"votes",
|
|
||||||
]
|
|
||||||
read_only_fields = fields
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
class IssueSubscriberSerializer(BaseSerializer):
|
|
||||||
class Meta:
|
|
||||||
model = IssueSubscriber
|
|
||||||
fields = "__all__"
|
|
||||||
read_only_fields = [
|
|
||||||
"workspace",
|
|
||||||
"project",
|
|
||||||
"issue",
|
|
||||||
]
|
|
||||||
|
@ -1,36 +1,38 @@
|
|||||||
# Third Party imports
|
# Third party imports
|
||||||
from rest_framework import serializers
|
from rest_framework import serializers
|
||||||
|
|
||||||
# Module imports
|
# Module imports
|
||||||
from .base import BaseSerializer
|
from .base import BaseSerializer
|
||||||
from .user import UserLiteSerializer
|
|
||||||
from .project import ProjectLiteSerializer
|
|
||||||
from .workspace import WorkspaceLiteSerializer
|
|
||||||
|
|
||||||
from plane.db.models import (
|
from plane.db.models import (
|
||||||
User,
|
User,
|
||||||
Module,
|
Module,
|
||||||
|
ModuleLink,
|
||||||
ModuleMember,
|
ModuleMember,
|
||||||
ModuleIssue,
|
ModuleIssue,
|
||||||
ModuleLink,
|
ProjectMember,
|
||||||
ModuleFavorite,
|
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
class ModuleWriteSerializer(BaseSerializer):
|
class ModuleSerializer(BaseSerializer):
|
||||||
members = serializers.ListField(
|
members = serializers.ListField(
|
||||||
child=serializers.PrimaryKeyRelatedField(queryset=User.objects.all()),
|
child=serializers.PrimaryKeyRelatedField(
|
||||||
|
queryset=User.objects.values_list("id", flat=True)
|
||||||
|
),
|
||||||
write_only=True,
|
write_only=True,
|
||||||
required=False,
|
required=False,
|
||||||
)
|
)
|
||||||
|
total_issues = serializers.IntegerField(read_only=True)
|
||||||
project_detail = ProjectLiteSerializer(source="project", read_only=True)
|
cancelled_issues = serializers.IntegerField(read_only=True)
|
||||||
workspace_detail = WorkspaceLiteSerializer(source="workspace", read_only=True)
|
completed_issues = serializers.IntegerField(read_only=True)
|
||||||
|
started_issues = serializers.IntegerField(read_only=True)
|
||||||
|
unstarted_issues = serializers.IntegerField(read_only=True)
|
||||||
|
backlog_issues = serializers.IntegerField(read_only=True)
|
||||||
|
|
||||||
class Meta:
|
class Meta:
|
||||||
model = Module
|
model = Module
|
||||||
fields = "__all__"
|
fields = "__all__"
|
||||||
read_only_fields = [
|
read_only_fields = [
|
||||||
|
"id",
|
||||||
"workspace",
|
"workspace",
|
||||||
"project",
|
"project",
|
||||||
"created_by",
|
"created_by",
|
||||||
@ -41,29 +43,42 @@ class ModuleWriteSerializer(BaseSerializer):
|
|||||||
|
|
||||||
def to_representation(self, instance):
|
def to_representation(self, instance):
|
||||||
data = super().to_representation(instance)
|
data = super().to_representation(instance)
|
||||||
data['members'] = [str(member.id) for member in instance.members.all()]
|
data["members"] = [str(member.id) for member in instance.members.all()]
|
||||||
return data
|
return data
|
||||||
|
|
||||||
def validate(self, data):
|
def validate(self, data):
|
||||||
if data.get("start_date", None) is not None and data.get("target_date", None) is not None and data.get("start_date", None) > data.get("target_date", None):
|
if (
|
||||||
raise serializers.ValidationError("Start date cannot exceed target date")
|
data.get("start_date", None) is not None
|
||||||
|
and data.get("target_date", None) is not None
|
||||||
|
and data.get("start_date", None) > data.get("target_date", None)
|
||||||
|
):
|
||||||
|
raise serializers.ValidationError(
|
||||||
|
"Start date cannot exceed target date"
|
||||||
|
)
|
||||||
|
|
||||||
|
if data.get("members", []):
|
||||||
|
data["members"] = ProjectMember.objects.filter(
|
||||||
|
project_id=self.context.get("project_id"),
|
||||||
|
member_id__in=data["members"],
|
||||||
|
).values_list("member_id", flat=True)
|
||||||
|
|
||||||
return data
|
return data
|
||||||
|
|
||||||
def create(self, validated_data):
|
def create(self, validated_data):
|
||||||
members = validated_data.pop("members", None)
|
members = validated_data.pop("members", None)
|
||||||
|
|
||||||
project = self.context["project"]
|
project_id = self.context["project_id"]
|
||||||
|
workspace_id = self.context["workspace_id"]
|
||||||
module = Module.objects.create(**validated_data, project=project)
|
|
||||||
|
|
||||||
|
module = Module.objects.create(**validated_data, project_id=project_id)
|
||||||
if members is not None:
|
if members is not None:
|
||||||
ModuleMember.objects.bulk_create(
|
ModuleMember.objects.bulk_create(
|
||||||
[
|
[
|
||||||
ModuleMember(
|
ModuleMember(
|
||||||
module=module,
|
module=module,
|
||||||
member=member,
|
member_id=str(member),
|
||||||
project=project,
|
project_id=project_id,
|
||||||
workspace=project.workspace,
|
workspace_id=workspace_id,
|
||||||
created_by=module.created_by,
|
created_by=module.created_by,
|
||||||
updated_by=module.updated_by,
|
updated_by=module.updated_by,
|
||||||
)
|
)
|
||||||
@ -84,7 +99,7 @@ class ModuleWriteSerializer(BaseSerializer):
|
|||||||
[
|
[
|
||||||
ModuleMember(
|
ModuleMember(
|
||||||
module=instance,
|
module=instance,
|
||||||
member=member,
|
member_id=str(member),
|
||||||
project=instance.project,
|
project=instance.project,
|
||||||
workspace=instance.project.workspace,
|
workspace=instance.project.workspace,
|
||||||
created_by=instance.created_by,
|
created_by=instance.created_by,
|
||||||
@ -99,23 +114,7 @@ class ModuleWriteSerializer(BaseSerializer):
|
|||||||
return super().update(instance, validated_data)
|
return super().update(instance, validated_data)
|
||||||
|
|
||||||
|
|
||||||
class ModuleFlatSerializer(BaseSerializer):
|
|
||||||
class Meta:
|
|
||||||
model = Module
|
|
||||||
fields = "__all__"
|
|
||||||
read_only_fields = [
|
|
||||||
"workspace",
|
|
||||||
"project",
|
|
||||||
"created_by",
|
|
||||||
"updated_by",
|
|
||||||
"created_at",
|
|
||||||
"updated_at",
|
|
||||||
]
|
|
||||||
|
|
||||||
|
|
||||||
class ModuleIssueSerializer(BaseSerializer):
|
class ModuleIssueSerializer(BaseSerializer):
|
||||||
module_detail = ModuleFlatSerializer(read_only=True, source="module")
|
|
||||||
issue_detail = ProjectLiteSerializer(read_only=True, source="issue")
|
|
||||||
sub_issues_count = serializers.IntegerField(read_only=True)
|
sub_issues_count = serializers.IntegerField(read_only=True)
|
||||||
|
|
||||||
class Meta:
|
class Meta:
|
||||||
@ -133,8 +132,6 @@ class ModuleIssueSerializer(BaseSerializer):
|
|||||||
|
|
||||||
|
|
||||||
class ModuleLinkSerializer(BaseSerializer):
|
class ModuleLinkSerializer(BaseSerializer):
|
||||||
created_by_detail = UserLiteSerializer(read_only=True, source="created_by")
|
|
||||||
|
|
||||||
class Meta:
|
class Meta:
|
||||||
model = ModuleLink
|
model = ModuleLink
|
||||||
fields = "__all__"
|
fields = "__all__"
|
||||||
@ -151,7 +148,8 @@ class ModuleLinkSerializer(BaseSerializer):
|
|||||||
# Validation if url already exists
|
# Validation if url already exists
|
||||||
def create(self, validated_data):
|
def create(self, validated_data):
|
||||||
if ModuleLink.objects.filter(
|
if ModuleLink.objects.filter(
|
||||||
url=validated_data.get("url"), module_id=validated_data.get("module_id")
|
url=validated_data.get("url"),
|
||||||
|
module_id=validated_data.get("module_id"),
|
||||||
).exists():
|
).exists():
|
||||||
raise serializers.ValidationError(
|
raise serializers.ValidationError(
|
||||||
{"error": "URL already exists for this Issue"}
|
{"error": "URL already exists for this Issue"}
|
||||||
@ -159,40 +157,7 @@ class ModuleLinkSerializer(BaseSerializer):
|
|||||||
return ModuleLink.objects.create(**validated_data)
|
return ModuleLink.objects.create(**validated_data)
|
||||||
|
|
||||||
|
|
||||||
class ModuleSerializer(BaseSerializer):
|
class ModuleLiteSerializer(BaseSerializer):
|
||||||
project_detail = ProjectLiteSerializer(read_only=True, source="project")
|
|
||||||
lead_detail = UserLiteSerializer(read_only=True, source="lead")
|
|
||||||
members_detail = UserLiteSerializer(read_only=True, many=True, source="members")
|
|
||||||
link_module = ModuleLinkSerializer(read_only=True, many=True)
|
|
||||||
is_favorite = serializers.BooleanField(read_only=True)
|
|
||||||
total_issues = serializers.IntegerField(read_only=True)
|
|
||||||
cancelled_issues = serializers.IntegerField(read_only=True)
|
|
||||||
completed_issues = serializers.IntegerField(read_only=True)
|
|
||||||
started_issues = serializers.IntegerField(read_only=True)
|
|
||||||
unstarted_issues = serializers.IntegerField(read_only=True)
|
|
||||||
backlog_issues = serializers.IntegerField(read_only=True)
|
|
||||||
|
|
||||||
class Meta:
|
class Meta:
|
||||||
model = Module
|
model = Module
|
||||||
fields = "__all__"
|
fields = "__all__"
|
||||||
read_only_fields = [
|
|
||||||
"workspace",
|
|
||||||
"project",
|
|
||||||
"created_by",
|
|
||||||
"updated_by",
|
|
||||||
"created_at",
|
|
||||||
"updated_at",
|
|
||||||
]
|
|
||||||
|
|
||||||
|
|
||||||
class ModuleFavoriteSerializer(BaseSerializer):
|
|
||||||
module_detail = ModuleFlatSerializer(source="module", read_only=True)
|
|
||||||
|
|
||||||
class Meta:
|
|
||||||
model = ModuleFavorite
|
|
||||||
fields = "__all__"
|
|
||||||
read_only_fields = [
|
|
||||||
"workspace",
|
|
||||||
"project",
|
|
||||||
"user",
|
|
||||||
]
|
|
||||||
|
@ -1,12 +0,0 @@
|
|||||||
# Module imports
|
|
||||||
from .base import BaseSerializer
|
|
||||||
from .user import UserLiteSerializer
|
|
||||||
from plane.db.models import Notification
|
|
||||||
|
|
||||||
class NotificationSerializer(BaseSerializer):
|
|
||||||
triggered_by_details = UserLiteSerializer(read_only=True, source="triggered_by")
|
|
||||||
|
|
||||||
class Meta:
|
|
||||||
model = Notification
|
|
||||||
fields = "__all__"
|
|
||||||
|
|
@ -2,39 +2,78 @@
|
|||||||
from rest_framework import serializers
|
from rest_framework import serializers
|
||||||
|
|
||||||
# Module imports
|
# Module imports
|
||||||
from .base import BaseSerializer, DynamicBaseSerializer
|
|
||||||
from plane.api.serializers.workspace import WorkSpaceSerializer, WorkspaceLiteSerializer
|
|
||||||
from plane.api.serializers.user import UserLiteSerializer, UserAdminLiteSerializer
|
|
||||||
from plane.db.models import (
|
from plane.db.models import (
|
||||||
Project,
|
Project,
|
||||||
ProjectMember,
|
|
||||||
ProjectMemberInvite,
|
|
||||||
ProjectIdentifier,
|
ProjectIdentifier,
|
||||||
ProjectFavorite,
|
WorkspaceMember,
|
||||||
ProjectDeployBoard,
|
|
||||||
ProjectPublicMember,
|
|
||||||
)
|
)
|
||||||
|
|
||||||
|
from .base import BaseSerializer
|
||||||
|
|
||||||
|
|
||||||
class ProjectSerializer(BaseSerializer):
|
class ProjectSerializer(BaseSerializer):
|
||||||
workspace_detail = WorkspaceLiteSerializer(source="workspace", read_only=True)
|
total_members = serializers.IntegerField(read_only=True)
|
||||||
|
total_cycles = serializers.IntegerField(read_only=True)
|
||||||
|
total_modules = serializers.IntegerField(read_only=True)
|
||||||
|
is_member = serializers.BooleanField(read_only=True)
|
||||||
|
sort_order = serializers.FloatField(read_only=True)
|
||||||
|
member_role = serializers.IntegerField(read_only=True)
|
||||||
|
is_deployed = serializers.BooleanField(read_only=True)
|
||||||
|
|
||||||
class Meta:
|
class Meta:
|
||||||
model = Project
|
model = Project
|
||||||
fields = "__all__"
|
fields = "__all__"
|
||||||
read_only_fields = [
|
read_only_fields = [
|
||||||
|
"id",
|
||||||
|
"emoji",
|
||||||
"workspace",
|
"workspace",
|
||||||
|
"created_at",
|
||||||
|
"updated_at",
|
||||||
|
"created_by",
|
||||||
|
"updated_by",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
def validate(self, data):
|
||||||
|
# Check project lead should be a member of the workspace
|
||||||
|
if (
|
||||||
|
data.get("project_lead", None) is not None
|
||||||
|
and not WorkspaceMember.objects.filter(
|
||||||
|
workspace_id=self.context["workspace_id"],
|
||||||
|
member_id=data.get("project_lead"),
|
||||||
|
).exists()
|
||||||
|
):
|
||||||
|
raise serializers.ValidationError(
|
||||||
|
"Project lead should be a user in the workspace"
|
||||||
|
)
|
||||||
|
|
||||||
|
# Check default assignee should be a member of the workspace
|
||||||
|
if (
|
||||||
|
data.get("default_assignee", None) is not None
|
||||||
|
and not WorkspaceMember.objects.filter(
|
||||||
|
workspace_id=self.context["workspace_id"],
|
||||||
|
member_id=data.get("default_assignee"),
|
||||||
|
).exists()
|
||||||
|
):
|
||||||
|
raise serializers.ValidationError(
|
||||||
|
"Default assignee should be a user in the workspace"
|
||||||
|
)
|
||||||
|
|
||||||
|
return data
|
||||||
|
|
||||||
def create(self, validated_data):
|
def create(self, validated_data):
|
||||||
identifier = validated_data.get("identifier", "").strip().upper()
|
identifier = validated_data.get("identifier", "").strip().upper()
|
||||||
if identifier == "":
|
if identifier == "":
|
||||||
raise serializers.ValidationError(detail="Project Identifier is required")
|
raise serializers.ValidationError(
|
||||||
|
detail="Project Identifier is required"
|
||||||
|
)
|
||||||
|
|
||||||
if ProjectIdentifier.objects.filter(
|
if ProjectIdentifier.objects.filter(
|
||||||
name=identifier, workspace_id=self.context["workspace_id"]
|
name=identifier, workspace_id=self.context["workspace_id"]
|
||||||
).exists():
|
).exists():
|
||||||
raise serializers.ValidationError(detail="Project Identifier is taken")
|
raise serializers.ValidationError(
|
||||||
|
detail="Project Identifier is taken"
|
||||||
|
)
|
||||||
|
|
||||||
project = Project.objects.create(
|
project = Project.objects.create(
|
||||||
**validated_data, workspace_id=self.context["workspace_id"]
|
**validated_data, workspace_id=self.context["workspace_id"]
|
||||||
)
|
)
|
||||||
@ -45,36 +84,6 @@ class ProjectSerializer(BaseSerializer):
|
|||||||
)
|
)
|
||||||
return project
|
return project
|
||||||
|
|
||||||
def update(self, instance, validated_data):
|
|
||||||
identifier = validated_data.get("identifier", "").strip().upper()
|
|
||||||
|
|
||||||
# If identifier is not passed update the project and return
|
|
||||||
if identifier == "":
|
|
||||||
project = super().update(instance, validated_data)
|
|
||||||
return project
|
|
||||||
|
|
||||||
# If no Project Identifier is found create it
|
|
||||||
project_identifier = ProjectIdentifier.objects.filter(
|
|
||||||
name=identifier, workspace_id=instance.workspace_id
|
|
||||||
).first()
|
|
||||||
if project_identifier is None:
|
|
||||||
project = super().update(instance, validated_data)
|
|
||||||
project_identifier = ProjectIdentifier.objects.filter(
|
|
||||||
project=project
|
|
||||||
).first()
|
|
||||||
if project_identifier is not None:
|
|
||||||
project_identifier.name = identifier
|
|
||||||
project_identifier.save()
|
|
||||||
return project
|
|
||||||
# If found check if the project_id to be updated and identifier project id is same
|
|
||||||
if project_identifier.project_id == instance.id:
|
|
||||||
# If same pass update
|
|
||||||
project = super().update(instance, validated_data)
|
|
||||||
return project
|
|
||||||
|
|
||||||
# If not same fail update
|
|
||||||
raise serializers.ValidationError(detail="Project Identifier is already taken")
|
|
||||||
|
|
||||||
|
|
||||||
class ProjectLiteSerializer(BaseSerializer):
|
class ProjectLiteSerializer(BaseSerializer):
|
||||||
class Meta:
|
class Meta:
|
||||||
@ -89,126 +98,3 @@ class ProjectLiteSerializer(BaseSerializer):
|
|||||||
"description",
|
"description",
|
||||||
]
|
]
|
||||||
read_only_fields = fields
|
read_only_fields = fields
|
||||||
|
|
||||||
|
|
||||||
class ProjectListSerializer(DynamicBaseSerializer):
|
|
||||||
is_favorite = serializers.BooleanField(read_only=True)
|
|
||||||
total_members = serializers.IntegerField(read_only=True)
|
|
||||||
total_cycles = serializers.IntegerField(read_only=True)
|
|
||||||
total_modules = serializers.IntegerField(read_only=True)
|
|
||||||
is_member = serializers.BooleanField(read_only=True)
|
|
||||||
sort_order = serializers.FloatField(read_only=True)
|
|
||||||
member_role = serializers.IntegerField(read_only=True)
|
|
||||||
is_deployed = serializers.BooleanField(read_only=True)
|
|
||||||
members = serializers.SerializerMethodField()
|
|
||||||
|
|
||||||
def get_members(self, obj):
|
|
||||||
project_members = ProjectMember.objects.filter(project_id=obj.id).values(
|
|
||||||
"id",
|
|
||||||
"member_id",
|
|
||||||
"member__display_name",
|
|
||||||
"member__avatar",
|
|
||||||
)
|
|
||||||
return project_members
|
|
||||||
|
|
||||||
class Meta:
|
|
||||||
model = Project
|
|
||||||
fields = "__all__"
|
|
||||||
|
|
||||||
|
|
||||||
class ProjectDetailSerializer(BaseSerializer):
|
|
||||||
# workspace = WorkSpaceSerializer(read_only=True)
|
|
||||||
default_assignee = UserLiteSerializer(read_only=True)
|
|
||||||
project_lead = UserLiteSerializer(read_only=True)
|
|
||||||
is_favorite = serializers.BooleanField(read_only=True)
|
|
||||||
total_members = serializers.IntegerField(read_only=True)
|
|
||||||
total_cycles = serializers.IntegerField(read_only=True)
|
|
||||||
total_modules = serializers.IntegerField(read_only=True)
|
|
||||||
is_member = serializers.BooleanField(read_only=True)
|
|
||||||
sort_order = serializers.FloatField(read_only=True)
|
|
||||||
member_role = serializers.IntegerField(read_only=True)
|
|
||||||
is_deployed = serializers.BooleanField(read_only=True)
|
|
||||||
|
|
||||||
class Meta:
|
|
||||||
model = Project
|
|
||||||
fields = "__all__"
|
|
||||||
|
|
||||||
|
|
||||||
class ProjectMemberSerializer(BaseSerializer):
|
|
||||||
workspace = WorkspaceLiteSerializer(read_only=True)
|
|
||||||
project = ProjectLiteSerializer(read_only=True)
|
|
||||||
member = UserLiteSerializer(read_only=True)
|
|
||||||
|
|
||||||
class Meta:
|
|
||||||
model = ProjectMember
|
|
||||||
fields = "__all__"
|
|
||||||
|
|
||||||
|
|
||||||
class ProjectMemberAdminSerializer(BaseSerializer):
|
|
||||||
workspace = WorkspaceLiteSerializer(read_only=True)
|
|
||||||
project = ProjectLiteSerializer(read_only=True)
|
|
||||||
member = UserAdminLiteSerializer(read_only=True)
|
|
||||||
|
|
||||||
class Meta:
|
|
||||||
model = ProjectMember
|
|
||||||
fields = "__all__"
|
|
||||||
|
|
||||||
|
|
||||||
class ProjectMemberInviteSerializer(BaseSerializer):
|
|
||||||
project = ProjectLiteSerializer(read_only=True)
|
|
||||||
workspace = WorkspaceLiteSerializer(read_only=True)
|
|
||||||
|
|
||||||
class Meta:
|
|
||||||
model = ProjectMemberInvite
|
|
||||||
fields = "__all__"
|
|
||||||
|
|
||||||
|
|
||||||
class ProjectIdentifierSerializer(BaseSerializer):
|
|
||||||
class Meta:
|
|
||||||
model = ProjectIdentifier
|
|
||||||
fields = "__all__"
|
|
||||||
|
|
||||||
|
|
||||||
class ProjectFavoriteSerializer(BaseSerializer):
|
|
||||||
class Meta:
|
|
||||||
model = ProjectFavorite
|
|
||||||
fields = "__all__"
|
|
||||||
read_only_fields = [
|
|
||||||
"workspace",
|
|
||||||
"user",
|
|
||||||
]
|
|
||||||
|
|
||||||
|
|
||||||
class ProjectMemberLiteSerializer(BaseSerializer):
|
|
||||||
member = UserLiteSerializer(read_only=True)
|
|
||||||
is_subscribed = serializers.BooleanField(read_only=True)
|
|
||||||
|
|
||||||
class Meta:
|
|
||||||
model = ProjectMember
|
|
||||||
fields = ["member", "id", "is_subscribed"]
|
|
||||||
read_only_fields = fields
|
|
||||||
|
|
||||||
|
|
||||||
class ProjectDeployBoardSerializer(BaseSerializer):
|
|
||||||
project_details = ProjectLiteSerializer(read_only=True, source="project")
|
|
||||||
workspace_detail = WorkspaceLiteSerializer(read_only=True, source="workspace")
|
|
||||||
|
|
||||||
class Meta:
|
|
||||||
model = ProjectDeployBoard
|
|
||||||
fields = "__all__"
|
|
||||||
read_only_fields = [
|
|
||||||
"workspace",
|
|
||||||
"project",
|
|
||||||
"anchor",
|
|
||||||
]
|
|
||||||
|
|
||||||
|
|
||||||
class ProjectPublicMemberSerializer(BaseSerializer):
|
|
||||||
class Meta:
|
|
||||||
model = ProjectPublicMember
|
|
||||||
fields = "__all__"
|
|
||||||
read_only_fields = [
|
|
||||||
"workspace",
|
|
||||||
"project",
|
|
||||||
"member",
|
|
||||||
]
|
|
||||||
|
@ -1,17 +1,26 @@
|
|||||||
# Module imports
|
# Module imports
|
||||||
from .base import BaseSerializer
|
from .base import BaseSerializer
|
||||||
from .workspace import WorkspaceLiteSerializer
|
|
||||||
from .project import ProjectLiteSerializer
|
|
||||||
|
|
||||||
from plane.db.models import State
|
from plane.db.models import State
|
||||||
|
|
||||||
|
|
||||||
class StateSerializer(BaseSerializer):
|
class StateSerializer(BaseSerializer):
|
||||||
|
def validate(self, data):
|
||||||
|
# If the default is being provided then make all other states default False
|
||||||
|
if data.get("default", False):
|
||||||
|
State.objects.filter(
|
||||||
|
project_id=self.context.get("project_id")
|
||||||
|
).update(default=False)
|
||||||
|
return data
|
||||||
|
|
||||||
class Meta:
|
class Meta:
|
||||||
model = State
|
model = State
|
||||||
fields = "__all__"
|
fields = "__all__"
|
||||||
read_only_fields = [
|
read_only_fields = [
|
||||||
|
"id",
|
||||||
|
"created_by",
|
||||||
|
"updated_by",
|
||||||
|
"created_at",
|
||||||
|
"updated_at",
|
||||||
"workspace",
|
"workspace",
|
||||||
"project",
|
"project",
|
||||||
]
|
]
|
||||||
|
@ -1,111 +1,7 @@
|
|||||||
# Third party imports
|
# Module imports
|
||||||
from rest_framework import serializers
|
from plane.db.models import User
|
||||||
|
|
||||||
# Module import
|
|
||||||
from .base import BaseSerializer
|
from .base import BaseSerializer
|
||||||
from plane.db.models import User, Workspace, WorkspaceMemberInvite
|
|
||||||
|
|
||||||
|
|
||||||
class UserSerializer(BaseSerializer):
|
|
||||||
class Meta:
|
|
||||||
model = User
|
|
||||||
fields = "__all__"
|
|
||||||
read_only_fields = [
|
|
||||||
"id",
|
|
||||||
"created_at",
|
|
||||||
"updated_at",
|
|
||||||
"is_superuser",
|
|
||||||
"is_staff",
|
|
||||||
"last_active",
|
|
||||||
"last_login_time",
|
|
||||||
"last_logout_time",
|
|
||||||
"last_login_ip",
|
|
||||||
"last_logout_ip",
|
|
||||||
"last_login_uagent",
|
|
||||||
"token_updated_at",
|
|
||||||
"is_onboarded",
|
|
||||||
"is_bot",
|
|
||||||
]
|
|
||||||
extra_kwargs = {"password": {"write_only": True}}
|
|
||||||
|
|
||||||
# If the user has already filled first name or last name then he is onboarded
|
|
||||||
def get_is_onboarded(self, obj):
|
|
||||||
return bool(obj.first_name) or bool(obj.last_name)
|
|
||||||
|
|
||||||
|
|
||||||
class UserMeSerializer(BaseSerializer):
|
|
||||||
class Meta:
|
|
||||||
model = User
|
|
||||||
fields = [
|
|
||||||
"id",
|
|
||||||
"avatar",
|
|
||||||
"cover_image",
|
|
||||||
"date_joined",
|
|
||||||
"display_name",
|
|
||||||
"email",
|
|
||||||
"first_name",
|
|
||||||
"last_name",
|
|
||||||
"is_active",
|
|
||||||
"is_bot",
|
|
||||||
"is_email_verified",
|
|
||||||
"is_managed",
|
|
||||||
"is_onboarded",
|
|
||||||
"is_tour_completed",
|
|
||||||
"mobile_number",
|
|
||||||
"role",
|
|
||||||
"onboarding_step",
|
|
||||||
"user_timezone",
|
|
||||||
"username",
|
|
||||||
"theme",
|
|
||||||
"last_workspace_id",
|
|
||||||
]
|
|
||||||
read_only_fields = fields
|
|
||||||
|
|
||||||
|
|
||||||
class UserMeSettingsSerializer(BaseSerializer):
|
|
||||||
workspace = serializers.SerializerMethodField()
|
|
||||||
|
|
||||||
class Meta:
|
|
||||||
model = User
|
|
||||||
fields = [
|
|
||||||
"id",
|
|
||||||
"email",
|
|
||||||
"workspace",
|
|
||||||
]
|
|
||||||
read_only_fields = fields
|
|
||||||
|
|
||||||
def get_workspace(self, obj):
|
|
||||||
workspace_invites = WorkspaceMemberInvite.objects.filter(
|
|
||||||
email=obj.email
|
|
||||||
).count()
|
|
||||||
if obj.last_workspace_id is not None:
|
|
||||||
workspace = Workspace.objects.filter(
|
|
||||||
pk=obj.last_workspace_id, workspace_member__member=obj.id
|
|
||||||
).first()
|
|
||||||
return {
|
|
||||||
"last_workspace_id": obj.last_workspace_id,
|
|
||||||
"last_workspace_slug": workspace.slug if workspace is not None else "",
|
|
||||||
"fallback_workspace_id": obj.last_workspace_id,
|
|
||||||
"fallback_workspace_slug": workspace.slug if workspace is not None else "",
|
|
||||||
"invites": workspace_invites,
|
|
||||||
}
|
|
||||||
else:
|
|
||||||
fallback_workspace = (
|
|
||||||
Workspace.objects.filter(workspace_member__member_id=obj.id)
|
|
||||||
.order_by("created_at")
|
|
||||||
.first()
|
|
||||||
)
|
|
||||||
return {
|
|
||||||
"last_workspace_id": None,
|
|
||||||
"last_workspace_slug": None,
|
|
||||||
"fallback_workspace_id": fallback_workspace.id
|
|
||||||
if fallback_workspace is not None
|
|
||||||
else None,
|
|
||||||
"fallback_workspace_slug": fallback_workspace.slug
|
|
||||||
if fallback_workspace is not None
|
|
||||||
else None,
|
|
||||||
"invites": workspace_invites,
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
class UserLiteSerializer(BaseSerializer):
|
class UserLiteSerializer(BaseSerializer):
|
||||||
@ -115,49 +11,9 @@ class UserLiteSerializer(BaseSerializer):
|
|||||||
"id",
|
"id",
|
||||||
"first_name",
|
"first_name",
|
||||||
"last_name",
|
"last_name",
|
||||||
|
"email",
|
||||||
"avatar",
|
"avatar",
|
||||||
"is_bot",
|
|
||||||
"display_name",
|
|
||||||
]
|
|
||||||
read_only_fields = [
|
|
||||||
"id",
|
|
||||||
"is_bot",
|
|
||||||
]
|
|
||||||
|
|
||||||
|
|
||||||
class UserAdminLiteSerializer(BaseSerializer):
|
|
||||||
class Meta:
|
|
||||||
model = User
|
|
||||||
fields = [
|
|
||||||
"id",
|
|
||||||
"first_name",
|
|
||||||
"last_name",
|
|
||||||
"avatar",
|
|
||||||
"is_bot",
|
|
||||||
"display_name",
|
"display_name",
|
||||||
"email",
|
"email",
|
||||||
]
|
]
|
||||||
read_only_fields = [
|
read_only_fields = fields
|
||||||
"id",
|
|
||||||
"is_bot",
|
|
||||||
]
|
|
||||||
|
|
||||||
|
|
||||||
class ChangePasswordSerializer(serializers.Serializer):
|
|
||||||
model = User
|
|
||||||
|
|
||||||
"""
|
|
||||||
Serializer for password change endpoint.
|
|
||||||
"""
|
|
||||||
old_password = serializers.CharField(required=True)
|
|
||||||
new_password = serializers.CharField(required=True)
|
|
||||||
|
|
||||||
|
|
||||||
class ResetPasswordSerializer(serializers.Serializer):
|
|
||||||
model = User
|
|
||||||
|
|
||||||
"""
|
|
||||||
Serializer for password change endpoint.
|
|
||||||
"""
|
|
||||||
new_password = serializers.CharField(required=True)
|
|
||||||
confirm_password = serializers.CharField(required=True)
|
|
||||||
|
@ -1,39 +1,11 @@
|
|||||||
# Third party imports
|
|
||||||
from rest_framework import serializers
|
|
||||||
|
|
||||||
# Module imports
|
# Module imports
|
||||||
|
from plane.db.models import Workspace
|
||||||
from .base import BaseSerializer
|
from .base import BaseSerializer
|
||||||
from .user import UserLiteSerializer, UserAdminLiteSerializer
|
|
||||||
|
|
||||||
from plane.db.models import (
|
|
||||||
User,
|
|
||||||
Workspace,
|
|
||||||
WorkspaceMember,
|
|
||||||
Team,
|
|
||||||
TeamMember,
|
|
||||||
WorkspaceMemberInvite,
|
|
||||||
WorkspaceTheme,
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
class WorkSpaceSerializer(BaseSerializer):
|
|
||||||
owner = UserLiteSerializer(read_only=True)
|
|
||||||
total_members = serializers.IntegerField(read_only=True)
|
|
||||||
total_issues = serializers.IntegerField(read_only=True)
|
|
||||||
|
|
||||||
class Meta:
|
|
||||||
model = Workspace
|
|
||||||
fields = "__all__"
|
|
||||||
read_only_fields = [
|
|
||||||
"id",
|
|
||||||
"created_by",
|
|
||||||
"updated_by",
|
|
||||||
"created_at",
|
|
||||||
"updated_at",
|
|
||||||
"owner",
|
|
||||||
]
|
|
||||||
|
|
||||||
class WorkspaceLiteSerializer(BaseSerializer):
|
class WorkspaceLiteSerializer(BaseSerializer):
|
||||||
|
"""Lite serializer with only required fields"""
|
||||||
|
|
||||||
class Meta:
|
class Meta:
|
||||||
model = Workspace
|
model = Workspace
|
||||||
fields = [
|
fields = [
|
||||||
@ -42,95 +14,3 @@ class WorkspaceLiteSerializer(BaseSerializer):
|
|||||||
"id",
|
"id",
|
||||||
]
|
]
|
||||||
read_only_fields = fields
|
read_only_fields = fields
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
class WorkSpaceMemberSerializer(BaseSerializer):
|
|
||||||
member = UserLiteSerializer(read_only=True)
|
|
||||||
workspace = WorkspaceLiteSerializer(read_only=True)
|
|
||||||
|
|
||||||
class Meta:
|
|
||||||
model = WorkspaceMember
|
|
||||||
fields = "__all__"
|
|
||||||
|
|
||||||
|
|
||||||
class WorkspaceMemberMeSerializer(BaseSerializer):
|
|
||||||
|
|
||||||
class Meta:
|
|
||||||
model = WorkspaceMember
|
|
||||||
fields = "__all__"
|
|
||||||
|
|
||||||
|
|
||||||
class WorkspaceMemberAdminSerializer(BaseSerializer):
|
|
||||||
member = UserAdminLiteSerializer(read_only=True)
|
|
||||||
workspace = WorkspaceLiteSerializer(read_only=True)
|
|
||||||
|
|
||||||
class Meta:
|
|
||||||
model = WorkspaceMember
|
|
||||||
fields = "__all__"
|
|
||||||
|
|
||||||
|
|
||||||
class WorkSpaceMemberInviteSerializer(BaseSerializer):
|
|
||||||
workspace = WorkSpaceSerializer(read_only=True)
|
|
||||||
total_members = serializers.IntegerField(read_only=True)
|
|
||||||
created_by_detail = UserLiteSerializer(read_only=True, source="created_by")
|
|
||||||
|
|
||||||
class Meta:
|
|
||||||
model = WorkspaceMemberInvite
|
|
||||||
fields = "__all__"
|
|
||||||
|
|
||||||
|
|
||||||
class TeamSerializer(BaseSerializer):
|
|
||||||
members_detail = UserLiteSerializer(read_only=True, source="members", many=True)
|
|
||||||
members = serializers.ListField(
|
|
||||||
child=serializers.PrimaryKeyRelatedField(queryset=User.objects.all()),
|
|
||||||
write_only=True,
|
|
||||||
required=False,
|
|
||||||
)
|
|
||||||
|
|
||||||
class Meta:
|
|
||||||
model = Team
|
|
||||||
fields = "__all__"
|
|
||||||
read_only_fields = [
|
|
||||||
"workspace",
|
|
||||||
"created_by",
|
|
||||||
"updated_by",
|
|
||||||
"created_at",
|
|
||||||
"updated_at",
|
|
||||||
]
|
|
||||||
|
|
||||||
def create(self, validated_data, **kwargs):
|
|
||||||
if "members" in validated_data:
|
|
||||||
members = validated_data.pop("members")
|
|
||||||
workspace = self.context["workspace"]
|
|
||||||
team = Team.objects.create(**validated_data, workspace=workspace)
|
|
||||||
team_members = [
|
|
||||||
TeamMember(member=member, team=team, workspace=workspace)
|
|
||||||
for member in members
|
|
||||||
]
|
|
||||||
TeamMember.objects.bulk_create(team_members, batch_size=10)
|
|
||||||
return team
|
|
||||||
team = Team.objects.create(**validated_data)
|
|
||||||
return team
|
|
||||||
|
|
||||||
def update(self, instance, validated_data):
|
|
||||||
if "members" in validated_data:
|
|
||||||
members = validated_data.pop("members")
|
|
||||||
TeamMember.objects.filter(team=instance).delete()
|
|
||||||
team_members = [
|
|
||||||
TeamMember(member=member, team=instance, workspace=instance.workspace)
|
|
||||||
for member in members
|
|
||||||
]
|
|
||||||
TeamMember.objects.bulk_create(team_members, batch_size=10)
|
|
||||||
return super().update(instance, validated_data)
|
|
||||||
return super().update(instance, validated_data)
|
|
||||||
|
|
||||||
|
|
||||||
class WorkspaceThemeSerializer(BaseSerializer):
|
|
||||||
class Meta:
|
|
||||||
model = WorkspaceTheme
|
|
||||||
fields = "__all__"
|
|
||||||
read_only_fields = [
|
|
||||||
"workspace",
|
|
||||||
"actor",
|
|
||||||
]
|
|
||||||
|
@ -1,50 +1,15 @@
|
|||||||
from .analytic import urlpatterns as analytic_urls
|
from .project import urlpatterns as project_patterns
|
||||||
from .asset import urlpatterns as asset_urls
|
from .state import urlpatterns as state_patterns
|
||||||
from .authentication import urlpatterns as authentication_urls
|
from .issue import urlpatterns as issue_patterns
|
||||||
from .config import urlpatterns as configuration_urls
|
from .cycle import urlpatterns as cycle_patterns
|
||||||
from .cycle import urlpatterns as cycle_urls
|
from .module import urlpatterns as module_patterns
|
||||||
from .estimate import urlpatterns as estimate_urls
|
from .inbox import urlpatterns as inbox_patterns
|
||||||
from .gpt import urlpatterns as gpt_urls
|
|
||||||
from .importer import urlpatterns as importer_urls
|
|
||||||
from .inbox import urlpatterns as inbox_urls
|
|
||||||
from .integration import urlpatterns as integration_urls
|
|
||||||
from .issue import urlpatterns as issue_urls
|
|
||||||
from .module import urlpatterns as module_urls
|
|
||||||
from .notification import urlpatterns as notification_urls
|
|
||||||
from .page import urlpatterns as page_urls
|
|
||||||
from .project import urlpatterns as project_urls
|
|
||||||
from .public_board import urlpatterns as public_board_urls
|
|
||||||
from .release_note import urlpatterns as release_note_urls
|
|
||||||
from .search import urlpatterns as search_urls
|
|
||||||
from .state import urlpatterns as state_urls
|
|
||||||
from .unsplash import urlpatterns as unsplash_urls
|
|
||||||
from .user import urlpatterns as user_urls
|
|
||||||
from .views import urlpatterns as view_urls
|
|
||||||
from .workspace import urlpatterns as workspace_urls
|
|
||||||
|
|
||||||
|
|
||||||
urlpatterns = [
|
urlpatterns = [
|
||||||
*analytic_urls,
|
*project_patterns,
|
||||||
*asset_urls,
|
*state_patterns,
|
||||||
*authentication_urls,
|
*issue_patterns,
|
||||||
*configuration_urls,
|
*cycle_patterns,
|
||||||
*cycle_urls,
|
*module_patterns,
|
||||||
*estimate_urls,
|
*inbox_patterns,
|
||||||
*gpt_urls,
|
|
||||||
*importer_urls,
|
|
||||||
*inbox_urls,
|
|
||||||
*integration_urls,
|
|
||||||
*issue_urls,
|
|
||||||
*module_urls,
|
|
||||||
*notification_urls,
|
|
||||||
*page_urls,
|
|
||||||
*project_urls,
|
|
||||||
*public_board_urls,
|
|
||||||
*release_note_urls,
|
|
||||||
*search_urls,
|
|
||||||
*state_urls,
|
|
||||||
*unsplash_urls,
|
|
||||||
*user_urls,
|
|
||||||
*view_urls,
|
|
||||||
*workspace_urls,
|
|
||||||
]
|
]
|
||||||
|
@ -1,12 +0,0 @@
|
|||||||
from django.urls import path
|
|
||||||
|
|
||||||
|
|
||||||
from plane.api.views import ConfigurationEndpoint
|
|
||||||
|
|
||||||
urlpatterns = [
|
|
||||||
path(
|
|
||||||
"configs/",
|
|
||||||
ConfigurationEndpoint.as_view(),
|
|
||||||
name="configuration",
|
|
||||||
),
|
|
||||||
]
|
|
@ -1,87 +1,46 @@
|
|||||||
from django.urls import path
|
from django.urls import path
|
||||||
|
|
||||||
|
from plane.api.views.cycle import (
|
||||||
from plane.api.views import (
|
CycleAPIEndpoint,
|
||||||
CycleViewSet,
|
CycleIssueAPIEndpoint,
|
||||||
CycleIssueViewSet,
|
TransferCycleIssueAPIEndpoint,
|
||||||
CycleDateCheckEndpoint,
|
CycleArchiveUnarchiveAPIEndpoint,
|
||||||
CycleFavoriteViewSet,
|
|
||||||
TransferCycleIssueEndpoint,
|
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
urlpatterns = [
|
urlpatterns = [
|
||||||
path(
|
path(
|
||||||
"workspaces/<str:slug>/projects/<uuid:project_id>/cycles/",
|
"workspaces/<str:slug>/projects/<uuid:project_id>/cycles/",
|
||||||
CycleViewSet.as_view(
|
CycleAPIEndpoint.as_view(),
|
||||||
{
|
name="cycles",
|
||||||
"get": "list",
|
|
||||||
"post": "create",
|
|
||||||
}
|
|
||||||
),
|
|
||||||
name="project-cycle",
|
|
||||||
),
|
),
|
||||||
path(
|
path(
|
||||||
"workspaces/<str:slug>/projects/<uuid:project_id>/cycles/<uuid:pk>/",
|
"workspaces/<str:slug>/projects/<uuid:project_id>/cycles/<uuid:pk>/",
|
||||||
CycleViewSet.as_view(
|
CycleAPIEndpoint.as_view(),
|
||||||
{
|
name="cycles",
|
||||||
"get": "retrieve",
|
|
||||||
"put": "update",
|
|
||||||
"patch": "partial_update",
|
|
||||||
"delete": "destroy",
|
|
||||||
}
|
|
||||||
),
|
|
||||||
name="project-cycle",
|
|
||||||
),
|
),
|
||||||
path(
|
path(
|
||||||
"workspaces/<str:slug>/projects/<uuid:project_id>/cycles/<uuid:cycle_id>/cycle-issues/",
|
"workspaces/<str:slug>/projects/<uuid:project_id>/cycles/<uuid:cycle_id>/cycle-issues/",
|
||||||
CycleIssueViewSet.as_view(
|
CycleIssueAPIEndpoint.as_view(),
|
||||||
{
|
name="cycle-issues",
|
||||||
"get": "list",
|
|
||||||
"post": "create",
|
|
||||||
}
|
|
||||||
),
|
|
||||||
name="project-issue-cycle",
|
|
||||||
),
|
),
|
||||||
path(
|
path(
|
||||||
"workspaces/<str:slug>/projects/<uuid:project_id>/cycles/<uuid:cycle_id>/cycle-issues/<uuid:pk>/",
|
"workspaces/<str:slug>/projects/<uuid:project_id>/cycles/<uuid:cycle_id>/cycle-issues/<uuid:issue_id>/",
|
||||||
CycleIssueViewSet.as_view(
|
CycleIssueAPIEndpoint.as_view(),
|
||||||
{
|
name="cycle-issues",
|
||||||
"get": "retrieve",
|
|
||||||
"put": "update",
|
|
||||||
"patch": "partial_update",
|
|
||||||
"delete": "destroy",
|
|
||||||
}
|
|
||||||
),
|
|
||||||
name="project-issue-cycle",
|
|
||||||
),
|
|
||||||
path(
|
|
||||||
"workspaces/<str:slug>/projects/<uuid:project_id>/cycles/date-check/",
|
|
||||||
CycleDateCheckEndpoint.as_view(),
|
|
||||||
name="project-cycle-date",
|
|
||||||
),
|
|
||||||
path(
|
|
||||||
"workspaces/<str:slug>/projects/<uuid:project_id>/user-favorite-cycles/",
|
|
||||||
CycleFavoriteViewSet.as_view(
|
|
||||||
{
|
|
||||||
"get": "list",
|
|
||||||
"post": "create",
|
|
||||||
}
|
|
||||||
),
|
|
||||||
name="user-favorite-cycle",
|
|
||||||
),
|
|
||||||
path(
|
|
||||||
"workspaces/<str:slug>/projects/<uuid:project_id>/user-favorite-cycles/<uuid:cycle_id>/",
|
|
||||||
CycleFavoriteViewSet.as_view(
|
|
||||||
{
|
|
||||||
"delete": "destroy",
|
|
||||||
}
|
|
||||||
),
|
|
||||||
name="user-favorite-cycle",
|
|
||||||
),
|
),
|
||||||
path(
|
path(
|
||||||
"workspaces/<str:slug>/projects/<uuid:project_id>/cycles/<uuid:cycle_id>/transfer-issues/",
|
"workspaces/<str:slug>/projects/<uuid:project_id>/cycles/<uuid:cycle_id>/transfer-issues/",
|
||||||
TransferCycleIssueEndpoint.as_view(),
|
TransferCycleIssueAPIEndpoint.as_view(),
|
||||||
name="transfer-issues",
|
name="transfer-issues",
|
||||||
),
|
),
|
||||||
|
path(
|
||||||
|
"workspaces/<str:slug>/projects/<uuid:project_id>/cycles/<uuid:pk>/archive/",
|
||||||
|
CycleArchiveUnarchiveAPIEndpoint.as_view(),
|
||||||
|
name="cycle-archive-unarchive",
|
||||||
|
),
|
||||||
|
path(
|
||||||
|
"workspaces/<str:slug>/projects/<uuid:project_id>/archived-cycles/",
|
||||||
|
CycleArchiveUnarchiveAPIEndpoint.as_view(),
|
||||||
|
name="cycle-archive-unarchive",
|
||||||
|
),
|
||||||
]
|
]
|
||||||
|
@ -1,37 +0,0 @@
|
|||||||
from django.urls import path
|
|
||||||
|
|
||||||
|
|
||||||
from plane.api.views import (
|
|
||||||
ServiceIssueImportSummaryEndpoint,
|
|
||||||
ImportServiceEndpoint,
|
|
||||||
UpdateServiceImportStatusEndpoint,
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
urlpatterns = [
|
|
||||||
path(
|
|
||||||
"workspaces/<str:slug>/importers/<str:service>/",
|
|
||||||
ServiceIssueImportSummaryEndpoint.as_view(),
|
|
||||||
name="importer-summary",
|
|
||||||
),
|
|
||||||
path(
|
|
||||||
"workspaces/<str:slug>/projects/importers/<str:service>/",
|
|
||||||
ImportServiceEndpoint.as_view(),
|
|
||||||
name="importer",
|
|
||||||
),
|
|
||||||
path(
|
|
||||||
"workspaces/<str:slug>/importers/",
|
|
||||||
ImportServiceEndpoint.as_view(),
|
|
||||||
name="importer",
|
|
||||||
),
|
|
||||||
path(
|
|
||||||
"workspaces/<str:slug>/importers/<str:service>/<uuid:pk>/",
|
|
||||||
ImportServiceEndpoint.as_view(),
|
|
||||||
name="importer",
|
|
||||||
),
|
|
||||||
path(
|
|
||||||
"workspaces/<str:slug>/projects/<uuid:project_id>/service/<str:service>/importers/<uuid:importer_id>/",
|
|
||||||
UpdateServiceImportStatusEndpoint.as_view(),
|
|
||||||
name="importer-status",
|
|
||||||
),
|
|
||||||
]
|
|
@ -1,53 +1,17 @@
|
|||||||
from django.urls import path
|
from django.urls import path
|
||||||
|
|
||||||
|
from plane.api.views import InboxIssueAPIEndpoint
|
||||||
from plane.api.views import (
|
|
||||||
InboxViewSet,
|
|
||||||
InboxIssueViewSet,
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
urlpatterns = [
|
urlpatterns = [
|
||||||
path(
|
path(
|
||||||
"workspaces/<str:slug>/projects/<uuid:project_id>/inboxes/",
|
"workspaces/<str:slug>/projects/<uuid:project_id>/inbox-issues/",
|
||||||
InboxViewSet.as_view(
|
InboxIssueAPIEndpoint.as_view(),
|
||||||
{
|
|
||||||
"get": "list",
|
|
||||||
"post": "create",
|
|
||||||
}
|
|
||||||
),
|
|
||||||
name="inbox",
|
|
||||||
),
|
|
||||||
path(
|
|
||||||
"workspaces/<str:slug>/projects/<uuid:project_id>/inboxes/<uuid:pk>/",
|
|
||||||
InboxViewSet.as_view(
|
|
||||||
{
|
|
||||||
"get": "retrieve",
|
|
||||||
"patch": "partial_update",
|
|
||||||
"delete": "destroy",
|
|
||||||
}
|
|
||||||
),
|
|
||||||
name="inbox",
|
|
||||||
),
|
|
||||||
path(
|
|
||||||
"workspaces/<str:slug>/projects/<uuid:project_id>/inboxes/<uuid:inbox_id>/inbox-issues/",
|
|
||||||
InboxIssueViewSet.as_view(
|
|
||||||
{
|
|
||||||
"get": "list",
|
|
||||||
"post": "create",
|
|
||||||
}
|
|
||||||
),
|
|
||||||
name="inbox-issue",
|
name="inbox-issue",
|
||||||
),
|
),
|
||||||
path(
|
path(
|
||||||
"workspaces/<str:slug>/projects/<uuid:project_id>/inboxes/<uuid:inbox_id>/inbox-issues/<uuid:pk>/",
|
"workspaces/<str:slug>/projects/<uuid:project_id>/inbox-issues/<uuid:issue_id>/",
|
||||||
InboxIssueViewSet.as_view(
|
InboxIssueAPIEndpoint.as_view(),
|
||||||
{
|
|
||||||
"get": "retrieve",
|
|
||||||
"patch": "partial_update",
|
|
||||||
"delete": "destroy",
|
|
||||||
}
|
|
||||||
),
|
|
||||||
name="inbox-issue",
|
name="inbox-issue",
|
||||||
),
|
),
|
||||||
]
|
]
|
||||||
|
@ -1,150 +0,0 @@
|
|||||||
from django.urls import path
|
|
||||||
|
|
||||||
|
|
||||||
from plane.api.views import (
|
|
||||||
IntegrationViewSet,
|
|
||||||
WorkspaceIntegrationViewSet,
|
|
||||||
GithubRepositoriesEndpoint,
|
|
||||||
GithubRepositorySyncViewSet,
|
|
||||||
GithubIssueSyncViewSet,
|
|
||||||
GithubCommentSyncViewSet,
|
|
||||||
BulkCreateGithubIssueSyncEndpoint,
|
|
||||||
SlackProjectSyncViewSet,
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
urlpatterns = [
|
|
||||||
path(
|
|
||||||
"integrations/",
|
|
||||||
IntegrationViewSet.as_view(
|
|
||||||
{
|
|
||||||
"get": "list",
|
|
||||||
"post": "create",
|
|
||||||
}
|
|
||||||
),
|
|
||||||
name="integrations",
|
|
||||||
),
|
|
||||||
path(
|
|
||||||
"integrations/<uuid:pk>/",
|
|
||||||
IntegrationViewSet.as_view(
|
|
||||||
{
|
|
||||||
"get": "retrieve",
|
|
||||||
"patch": "partial_update",
|
|
||||||
"delete": "destroy",
|
|
||||||
}
|
|
||||||
),
|
|
||||||
name="integrations",
|
|
||||||
),
|
|
||||||
path(
|
|
||||||
"workspaces/<str:slug>/workspace-integrations/",
|
|
||||||
WorkspaceIntegrationViewSet.as_view(
|
|
||||||
{
|
|
||||||
"get": "list",
|
|
||||||
}
|
|
||||||
),
|
|
||||||
name="workspace-integrations",
|
|
||||||
),
|
|
||||||
path(
|
|
||||||
"workspaces/<str:slug>/workspace-integrations/<str:provider>/",
|
|
||||||
WorkspaceIntegrationViewSet.as_view(
|
|
||||||
{
|
|
||||||
"post": "create",
|
|
||||||
}
|
|
||||||
),
|
|
||||||
name="workspace-integrations",
|
|
||||||
),
|
|
||||||
path(
|
|
||||||
"workspaces/<str:slug>/workspace-integrations/<uuid:pk>/provider/",
|
|
||||||
WorkspaceIntegrationViewSet.as_view(
|
|
||||||
{
|
|
||||||
"get": "retrieve",
|
|
||||||
"delete": "destroy",
|
|
||||||
}
|
|
||||||
),
|
|
||||||
name="workspace-integrations",
|
|
||||||
),
|
|
||||||
# Github Integrations
|
|
||||||
path(
|
|
||||||
"workspaces/<str:slug>/workspace-integrations/<uuid:workspace_integration_id>/github-repositories/",
|
|
||||||
GithubRepositoriesEndpoint.as_view(),
|
|
||||||
),
|
|
||||||
path(
|
|
||||||
"workspaces/<str:slug>/projects/<uuid:project_id>/workspace-integrations/<uuid:workspace_integration_id>/github-repository-sync/",
|
|
||||||
GithubRepositorySyncViewSet.as_view(
|
|
||||||
{
|
|
||||||
"get": "list",
|
|
||||||
"post": "create",
|
|
||||||
}
|
|
||||||
),
|
|
||||||
),
|
|
||||||
path(
|
|
||||||
"workspaces/<str:slug>/projects/<uuid:project_id>/workspace-integrations/<uuid:workspace_integration_id>/github-repository-sync/<uuid:pk>/",
|
|
||||||
GithubRepositorySyncViewSet.as_view(
|
|
||||||
{
|
|
||||||
"get": "retrieve",
|
|
||||||
"delete": "destroy",
|
|
||||||
}
|
|
||||||
),
|
|
||||||
),
|
|
||||||
path(
|
|
||||||
"workspaces/<str:slug>/projects/<uuid:project_id>/github-repository-sync/<uuid:repo_sync_id>/github-issue-sync/",
|
|
||||||
GithubIssueSyncViewSet.as_view(
|
|
||||||
{
|
|
||||||
"post": "create",
|
|
||||||
"get": "list",
|
|
||||||
}
|
|
||||||
),
|
|
||||||
),
|
|
||||||
path(
|
|
||||||
"workspaces/<str:slug>/projects/<uuid:project_id>/github-repository-sync/<uuid:repo_sync_id>/bulk-create-github-issue-sync/",
|
|
||||||
BulkCreateGithubIssueSyncEndpoint.as_view(),
|
|
||||||
),
|
|
||||||
path(
|
|
||||||
"workspaces/<str:slug>/projects/<uuid:project_id>/github-repository-sync/<uuid:repo_sync_id>/github-issue-sync/<uuid:pk>/",
|
|
||||||
GithubIssueSyncViewSet.as_view(
|
|
||||||
{
|
|
||||||
"get": "retrieve",
|
|
||||||
"delete": "destroy",
|
|
||||||
}
|
|
||||||
),
|
|
||||||
),
|
|
||||||
path(
|
|
||||||
"workspaces/<str:slug>/projects/<uuid:project_id>/github-repository-sync/<uuid:repo_sync_id>/github-issue-sync/<uuid:issue_sync_id>/github-comment-sync/",
|
|
||||||
GithubCommentSyncViewSet.as_view(
|
|
||||||
{
|
|
||||||
"post": "create",
|
|
||||||
"get": "list",
|
|
||||||
}
|
|
||||||
),
|
|
||||||
),
|
|
||||||
path(
|
|
||||||
"workspaces/<str:slug>/projects/<uuid:project_id>/github-repository-sync/<uuid:repo_sync_id>/github-issue-sync/<uuid:issue_sync_id>/github-comment-sync/<uuid:pk>/",
|
|
||||||
GithubCommentSyncViewSet.as_view(
|
|
||||||
{
|
|
||||||
"get": "retrieve",
|
|
||||||
"delete": "destroy",
|
|
||||||
}
|
|
||||||
),
|
|
||||||
),
|
|
||||||
## End Github Integrations
|
|
||||||
# Slack Integration
|
|
||||||
path(
|
|
||||||
"workspaces/<str:slug>/projects/<uuid:project_id>/workspace-integrations/<uuid:workspace_integration_id>/project-slack-sync/",
|
|
||||||
SlackProjectSyncViewSet.as_view(
|
|
||||||
{
|
|
||||||
"post": "create",
|
|
||||||
"get": "list",
|
|
||||||
}
|
|
||||||
),
|
|
||||||
),
|
|
||||||
path(
|
|
||||||
"workspaces/<str:slug>/projects/<uuid:project_id>/workspace-integrations/<uuid:workspace_integration_id>/project-slack-sync/<uuid:pk>/",
|
|
||||||
SlackProjectSyncViewSet.as_view(
|
|
||||||
{
|
|
||||||
"delete": "destroy",
|
|
||||||
"get": "retrieve",
|
|
||||||
}
|
|
||||||
),
|
|
||||||
),
|
|
||||||
## End Slack Integration
|
|
||||||
]
|
|
@ -1,315 +1,62 @@
|
|||||||
from django.urls import path
|
from django.urls import path
|
||||||
|
|
||||||
|
|
||||||
from plane.api.views import (
|
from plane.api.views import (
|
||||||
IssueViewSet,
|
IssueAPIEndpoint,
|
||||||
LabelViewSet,
|
LabelAPIEndpoint,
|
||||||
BulkCreateIssueLabelsEndpoint,
|
IssueLinkAPIEndpoint,
|
||||||
BulkDeleteIssuesEndpoint,
|
IssueCommentAPIEndpoint,
|
||||||
BulkImportIssuesEndpoint,
|
IssueActivityAPIEndpoint,
|
||||||
UserWorkSpaceIssues,
|
|
||||||
SubIssuesEndpoint,
|
|
||||||
IssueLinkViewSet,
|
|
||||||
IssueAttachmentEndpoint,
|
|
||||||
ExportIssuesEndpoint,
|
|
||||||
IssueActivityEndpoint,
|
|
||||||
IssueCommentViewSet,
|
|
||||||
IssueSubscriberViewSet,
|
|
||||||
IssueReactionViewSet,
|
|
||||||
CommentReactionViewSet,
|
|
||||||
IssueUserDisplayPropertyEndpoint,
|
|
||||||
IssueArchiveViewSet,
|
|
||||||
IssueRelationViewSet,
|
|
||||||
IssueDraftViewSet,
|
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
urlpatterns = [
|
urlpatterns = [
|
||||||
path(
|
path(
|
||||||
"workspaces/<str:slug>/projects/<uuid:project_id>/issues/",
|
"workspaces/<str:slug>/projects/<uuid:project_id>/issues/",
|
||||||
IssueViewSet.as_view(
|
IssueAPIEndpoint.as_view(),
|
||||||
{
|
name="issue",
|
||||||
"get": "list",
|
|
||||||
"post": "create",
|
|
||||||
}
|
|
||||||
),
|
|
||||||
name="project-issue",
|
|
||||||
),
|
),
|
||||||
path(
|
path(
|
||||||
"workspaces/<str:slug>/projects/<uuid:project_id>/issues/<uuid:pk>/",
|
"workspaces/<str:slug>/projects/<uuid:project_id>/issues/<uuid:pk>/",
|
||||||
IssueViewSet.as_view(
|
IssueAPIEndpoint.as_view(),
|
||||||
{
|
name="issue",
|
||||||
"get": "retrieve",
|
|
||||||
"put": "update",
|
|
||||||
"patch": "partial_update",
|
|
||||||
"delete": "destroy",
|
|
||||||
}
|
|
||||||
),
|
|
||||||
name="project-issue",
|
|
||||||
),
|
),
|
||||||
path(
|
path(
|
||||||
"workspaces/<str:slug>/projects/<uuid:project_id>/issue-labels/",
|
"workspaces/<str:slug>/projects/<uuid:project_id>/labels/",
|
||||||
LabelViewSet.as_view(
|
LabelAPIEndpoint.as_view(),
|
||||||
{
|
name="label",
|
||||||
"get": "list",
|
|
||||||
"post": "create",
|
|
||||||
}
|
|
||||||
),
|
|
||||||
name="project-issue-labels",
|
|
||||||
),
|
),
|
||||||
path(
|
path(
|
||||||
"workspaces/<str:slug>/projects/<uuid:project_id>/issue-labels/<uuid:pk>/",
|
"workspaces/<str:slug>/projects/<uuid:project_id>/labels/<uuid:pk>/",
|
||||||
LabelViewSet.as_view(
|
LabelAPIEndpoint.as_view(),
|
||||||
{
|
name="label",
|
||||||
"get": "retrieve",
|
|
||||||
"put": "update",
|
|
||||||
"patch": "partial_update",
|
|
||||||
"delete": "destroy",
|
|
||||||
}
|
|
||||||
),
|
|
||||||
name="project-issue-labels",
|
|
||||||
),
|
),
|
||||||
path(
|
path(
|
||||||
"workspaces/<str:slug>/projects/<uuid:project_id>/bulk-create-labels/",
|
"workspaces/<str:slug>/projects/<uuid:project_id>/issues/<uuid:issue_id>/links/",
|
||||||
BulkCreateIssueLabelsEndpoint.as_view(),
|
IssueLinkAPIEndpoint.as_view(),
|
||||||
name="project-bulk-labels",
|
name="link",
|
||||||
),
|
),
|
||||||
path(
|
path(
|
||||||
"workspaces/<str:slug>/projects/<uuid:project_id>/bulk-delete-issues/",
|
"workspaces/<str:slug>/projects/<uuid:project_id>/issues/<uuid:issue_id>/links/<uuid:pk>/",
|
||||||
BulkDeleteIssuesEndpoint.as_view(),
|
IssueLinkAPIEndpoint.as_view(),
|
||||||
name="project-issues-bulk",
|
name="link",
|
||||||
),
|
),
|
||||||
path(
|
|
||||||
"workspaces/<str:slug>/projects/<uuid:project_id>/bulk-import-issues/<str:service>/",
|
|
||||||
BulkImportIssuesEndpoint.as_view(),
|
|
||||||
name="project-issues-bulk",
|
|
||||||
),
|
|
||||||
path(
|
|
||||||
"workspaces/<str:slug>/my-issues/",
|
|
||||||
UserWorkSpaceIssues.as_view(),
|
|
||||||
name="workspace-issues",
|
|
||||||
),
|
|
||||||
path(
|
|
||||||
"workspaces/<str:slug>/projects/<uuid:project_id>/issues/<uuid:issue_id>/sub-issues/",
|
|
||||||
SubIssuesEndpoint.as_view(),
|
|
||||||
name="sub-issues",
|
|
||||||
),
|
|
||||||
path(
|
|
||||||
"workspaces/<str:slug>/projects/<uuid:project_id>/issues/<uuid:issue_id>/issue-links/",
|
|
||||||
IssueLinkViewSet.as_view(
|
|
||||||
{
|
|
||||||
"get": "list",
|
|
||||||
"post": "create",
|
|
||||||
}
|
|
||||||
),
|
|
||||||
name="project-issue-links",
|
|
||||||
),
|
|
||||||
path(
|
|
||||||
"workspaces/<str:slug>/projects/<uuid:project_id>/issues/<uuid:issue_id>/issue-links/<uuid:pk>/",
|
|
||||||
IssueLinkViewSet.as_view(
|
|
||||||
{
|
|
||||||
"get": "retrieve",
|
|
||||||
"put": "update",
|
|
||||||
"patch": "partial_update",
|
|
||||||
"delete": "destroy",
|
|
||||||
}
|
|
||||||
),
|
|
||||||
name="project-issue-links",
|
|
||||||
),
|
|
||||||
path(
|
|
||||||
"workspaces/<str:slug>/projects/<uuid:project_id>/issues/<uuid:issue_id>/issue-attachments/",
|
|
||||||
IssueAttachmentEndpoint.as_view(),
|
|
||||||
name="project-issue-attachments",
|
|
||||||
),
|
|
||||||
path(
|
|
||||||
"workspaces/<str:slug>/projects/<uuid:project_id>/issues/<uuid:issue_id>/issue-attachments/<uuid:pk>/",
|
|
||||||
IssueAttachmentEndpoint.as_view(),
|
|
||||||
name="project-issue-attachments",
|
|
||||||
),
|
|
||||||
path(
|
|
||||||
"workspaces/<str:slug>/export-issues/",
|
|
||||||
ExportIssuesEndpoint.as_view(),
|
|
||||||
name="export-issues",
|
|
||||||
),
|
|
||||||
## End Issues
|
|
||||||
## Issue Activity
|
|
||||||
path(
|
|
||||||
"workspaces/<str:slug>/projects/<uuid:project_id>/issues/<uuid:issue_id>/history/",
|
|
||||||
IssueActivityEndpoint.as_view(),
|
|
||||||
name="project-issue-history",
|
|
||||||
),
|
|
||||||
## Issue Activity
|
|
||||||
## IssueComments
|
|
||||||
path(
|
path(
|
||||||
"workspaces/<str:slug>/projects/<uuid:project_id>/issues/<uuid:issue_id>/comments/",
|
"workspaces/<str:slug>/projects/<uuid:project_id>/issues/<uuid:issue_id>/comments/",
|
||||||
IssueCommentViewSet.as_view(
|
IssueCommentAPIEndpoint.as_view(),
|
||||||
{
|
name="comment",
|
||||||
"get": "list",
|
|
||||||
"post": "create",
|
|
||||||
}
|
|
||||||
),
|
|
||||||
name="project-issue-comment",
|
|
||||||
),
|
),
|
||||||
path(
|
path(
|
||||||
"workspaces/<str:slug>/projects/<uuid:project_id>/issues/<uuid:issue_id>/comments/<uuid:pk>/",
|
"workspaces/<str:slug>/projects/<uuid:project_id>/issues/<uuid:issue_id>/comments/<uuid:pk>/",
|
||||||
IssueCommentViewSet.as_view(
|
IssueCommentAPIEndpoint.as_view(),
|
||||||
{
|
name="comment",
|
||||||
"get": "retrieve",
|
|
||||||
"put": "update",
|
|
||||||
"patch": "partial_update",
|
|
||||||
"delete": "destroy",
|
|
||||||
}
|
|
||||||
),
|
|
||||||
name="project-issue-comment",
|
|
||||||
),
|
|
||||||
## End IssueComments
|
|
||||||
# Issue Subscribers
|
|
||||||
path(
|
|
||||||
"workspaces/<str:slug>/projects/<uuid:project_id>/issues/<uuid:issue_id>/issue-subscribers/",
|
|
||||||
IssueSubscriberViewSet.as_view(
|
|
||||||
{
|
|
||||||
"get": "list",
|
|
||||||
"post": "create",
|
|
||||||
}
|
|
||||||
),
|
|
||||||
name="project-issue-subscribers",
|
|
||||||
),
|
),
|
||||||
path(
|
path(
|
||||||
"workspaces/<str:slug>/projects/<uuid:project_id>/issues/<uuid:issue_id>/issue-subscribers/<uuid:subscriber_id>/",
|
"workspaces/<str:slug>/projects/<uuid:project_id>/issues/<uuid:issue_id>/activities/",
|
||||||
IssueSubscriberViewSet.as_view({"delete": "destroy"}),
|
IssueActivityAPIEndpoint.as_view(),
|
||||||
name="project-issue-subscribers",
|
name="activity",
|
||||||
),
|
),
|
||||||
path(
|
path(
|
||||||
"workspaces/<str:slug>/projects/<uuid:project_id>/issues/<uuid:issue_id>/subscribe/",
|
"workspaces/<str:slug>/projects/<uuid:project_id>/issues/<uuid:issue_id>/activities/<uuid:pk>/",
|
||||||
IssueSubscriberViewSet.as_view(
|
IssueActivityAPIEndpoint.as_view(),
|
||||||
{
|
name="activity",
|
||||||
"get": "subscription_status",
|
|
||||||
"post": "subscribe",
|
|
||||||
"delete": "unsubscribe",
|
|
||||||
}
|
|
||||||
),
|
|
||||||
name="project-issue-subscribers",
|
|
||||||
),
|
|
||||||
## End Issue Subscribers
|
|
||||||
# Issue Reactions
|
|
||||||
path(
|
|
||||||
"workspaces/<str:slug>/projects/<uuid:project_id>/issues/<uuid:issue_id>/reactions/",
|
|
||||||
IssueReactionViewSet.as_view(
|
|
||||||
{
|
|
||||||
"get": "list",
|
|
||||||
"post": "create",
|
|
||||||
}
|
|
||||||
),
|
|
||||||
name="project-issue-reactions",
|
|
||||||
),
|
|
||||||
path(
|
|
||||||
"workspaces/<str:slug>/projects/<uuid:project_id>/issues/<uuid:issue_id>/reactions/<str:reaction_code>/",
|
|
||||||
IssueReactionViewSet.as_view(
|
|
||||||
{
|
|
||||||
"delete": "destroy",
|
|
||||||
}
|
|
||||||
),
|
|
||||||
name="project-issue-reactions",
|
|
||||||
),
|
|
||||||
## End Issue Reactions
|
|
||||||
# Comment Reactions
|
|
||||||
path(
|
|
||||||
"workspaces/<str:slug>/projects/<uuid:project_id>/comments/<uuid:comment_id>/reactions/",
|
|
||||||
CommentReactionViewSet.as_view(
|
|
||||||
{
|
|
||||||
"get": "list",
|
|
||||||
"post": "create",
|
|
||||||
}
|
|
||||||
),
|
|
||||||
name="project-issue-comment-reactions",
|
|
||||||
),
|
|
||||||
path(
|
|
||||||
"workspaces/<str:slug>/projects/<uuid:project_id>/comments/<uuid:comment_id>/reactions/<str:reaction_code>/",
|
|
||||||
CommentReactionViewSet.as_view(
|
|
||||||
{
|
|
||||||
"delete": "destroy",
|
|
||||||
}
|
|
||||||
),
|
|
||||||
name="project-issue-comment-reactions",
|
|
||||||
),
|
|
||||||
## End Comment Reactions
|
|
||||||
## IssueProperty
|
|
||||||
path(
|
|
||||||
"workspaces/<str:slug>/projects/<uuid:project_id>/issue-display-properties/",
|
|
||||||
IssueUserDisplayPropertyEndpoint.as_view(),
|
|
||||||
name="project-issue-display-properties",
|
|
||||||
),
|
|
||||||
## IssueProperty End
|
|
||||||
## Issue Archives
|
|
||||||
path(
|
|
||||||
"workspaces/<str:slug>/projects/<uuid:project_id>/archived-issues/",
|
|
||||||
IssueArchiveViewSet.as_view(
|
|
||||||
{
|
|
||||||
"get": "list",
|
|
||||||
}
|
|
||||||
),
|
|
||||||
name="project-issue-archive",
|
|
||||||
),
|
|
||||||
path(
|
|
||||||
"workspaces/<str:slug>/projects/<uuid:project_id>/archived-issues/<uuid:pk>/",
|
|
||||||
IssueArchiveViewSet.as_view(
|
|
||||||
{
|
|
||||||
"get": "retrieve",
|
|
||||||
"delete": "destroy",
|
|
||||||
}
|
|
||||||
),
|
|
||||||
name="project-issue-archive",
|
|
||||||
),
|
|
||||||
path(
|
|
||||||
"workspaces/<str:slug>/projects/<uuid:project_id>/unarchive/<uuid:pk>/",
|
|
||||||
IssueArchiveViewSet.as_view(
|
|
||||||
{
|
|
||||||
"post": "unarchive",
|
|
||||||
}
|
|
||||||
),
|
|
||||||
name="project-issue-archive",
|
|
||||||
),
|
|
||||||
## End Issue Archives
|
|
||||||
## Issue Relation
|
|
||||||
path(
|
|
||||||
"workspaces/<str:slug>/projects/<uuid:project_id>/issues/<uuid:issue_id>/issue-relation/",
|
|
||||||
IssueRelationViewSet.as_view(
|
|
||||||
{
|
|
||||||
"post": "create",
|
|
||||||
}
|
|
||||||
),
|
|
||||||
name="issue-relation",
|
|
||||||
),
|
|
||||||
path(
|
|
||||||
"workspaces/<str:slug>/projects/<uuid:project_id>/issues/<uuid:issue_id>/issue-relation/<uuid:pk>/",
|
|
||||||
IssueRelationViewSet.as_view(
|
|
||||||
{
|
|
||||||
"delete": "destroy",
|
|
||||||
}
|
|
||||||
),
|
|
||||||
name="issue-relation",
|
|
||||||
),
|
|
||||||
## End Issue Relation
|
|
||||||
## Issue Drafts
|
|
||||||
path(
|
|
||||||
"workspaces/<str:slug>/projects/<uuid:project_id>/issue-drafts/",
|
|
||||||
IssueDraftViewSet.as_view(
|
|
||||||
{
|
|
||||||
"get": "list",
|
|
||||||
"post": "create",
|
|
||||||
}
|
|
||||||
),
|
|
||||||
name="project-issue-draft",
|
|
||||||
),
|
|
||||||
path(
|
|
||||||
"workspaces/<str:slug>/projects/<uuid:project_id>/issue-drafts/<uuid:pk>/",
|
|
||||||
IssueDraftViewSet.as_view(
|
|
||||||
{
|
|
||||||
"get": "retrieve",
|
|
||||||
"patch": "partial_update",
|
|
||||||
"delete": "destroy",
|
|
||||||
}
|
|
||||||
),
|
|
||||||
name="project-issue-draft",
|
|
||||||
),
|
),
|
||||||
]
|
]
|
||||||
|
@ -1,104 +1,40 @@
|
|||||||
from django.urls import path
|
from django.urls import path
|
||||||
|
|
||||||
|
|
||||||
from plane.api.views import (
|
from plane.api.views import (
|
||||||
ModuleViewSet,
|
ModuleAPIEndpoint,
|
||||||
ModuleIssueViewSet,
|
ModuleIssueAPIEndpoint,
|
||||||
ModuleLinkViewSet,
|
ModuleArchiveUnarchiveAPIEndpoint,
|
||||||
ModuleFavoriteViewSet,
|
|
||||||
BulkImportModulesEndpoint,
|
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
urlpatterns = [
|
urlpatterns = [
|
||||||
path(
|
path(
|
||||||
"workspaces/<str:slug>/projects/<uuid:project_id>/modules/",
|
"workspaces/<str:slug>/projects/<uuid:project_id>/modules/",
|
||||||
ModuleViewSet.as_view(
|
ModuleAPIEndpoint.as_view(),
|
||||||
{
|
name="modules",
|
||||||
"get": "list",
|
|
||||||
"post": "create",
|
|
||||||
}
|
|
||||||
),
|
|
||||||
name="project-modules",
|
|
||||||
),
|
),
|
||||||
path(
|
path(
|
||||||
"workspaces/<str:slug>/projects/<uuid:project_id>/modules/<uuid:pk>/",
|
"workspaces/<str:slug>/projects/<uuid:project_id>/modules/<uuid:pk>/",
|
||||||
ModuleViewSet.as_view(
|
ModuleAPIEndpoint.as_view(),
|
||||||
{
|
name="modules",
|
||||||
"get": "retrieve",
|
|
||||||
"put": "update",
|
|
||||||
"patch": "partial_update",
|
|
||||||
"delete": "destroy",
|
|
||||||
}
|
|
||||||
),
|
|
||||||
name="project-modules",
|
|
||||||
),
|
),
|
||||||
path(
|
path(
|
||||||
"workspaces/<str:slug>/projects/<uuid:project_id>/modules/<uuid:module_id>/module-issues/",
|
"workspaces/<str:slug>/projects/<uuid:project_id>/modules/<uuid:module_id>/module-issues/",
|
||||||
ModuleIssueViewSet.as_view(
|
ModuleIssueAPIEndpoint.as_view(),
|
||||||
{
|
name="module-issues",
|
||||||
"get": "list",
|
|
||||||
"post": "create",
|
|
||||||
}
|
|
||||||
),
|
|
||||||
name="project-module-issues",
|
|
||||||
),
|
),
|
||||||
path(
|
path(
|
||||||
"workspaces/<str:slug>/projects/<uuid:project_id>/modules/<uuid:module_id>/module-issues/<uuid:pk>/",
|
"workspaces/<str:slug>/projects/<uuid:project_id>/modules/<uuid:module_id>/module-issues/<uuid:issue_id>/",
|
||||||
ModuleIssueViewSet.as_view(
|
ModuleIssueAPIEndpoint.as_view(),
|
||||||
{
|
name="module-issues",
|
||||||
"get": "retrieve",
|
|
||||||
"put": "update",
|
|
||||||
"patch": "partial_update",
|
|
||||||
"delete": "destroy",
|
|
||||||
}
|
|
||||||
),
|
|
||||||
name="project-module-issues",
|
|
||||||
),
|
),
|
||||||
path(
|
path(
|
||||||
"workspaces/<str:slug>/projects/<uuid:project_id>/modules/<uuid:module_id>/module-links/",
|
"workspaces/<str:slug>/projects/<uuid:project_id>/modules/<uuid:pk>/archive/",
|
||||||
ModuleLinkViewSet.as_view(
|
ModuleArchiveUnarchiveAPIEndpoint.as_view(),
|
||||||
{
|
name="module-archive-unarchive",
|
||||||
"get": "list",
|
|
||||||
"post": "create",
|
|
||||||
}
|
|
||||||
),
|
|
||||||
name="project-issue-module-links",
|
|
||||||
),
|
),
|
||||||
path(
|
path(
|
||||||
"workspaces/<str:slug>/projects/<uuid:project_id>/modules/<uuid:module_id>/module-links/<uuid:pk>/",
|
"workspaces/<str:slug>/projects/<uuid:project_id>/archived-modules/",
|
||||||
ModuleLinkViewSet.as_view(
|
ModuleArchiveUnarchiveAPIEndpoint.as_view(),
|
||||||
{
|
name="module-archive-unarchive",
|
||||||
"get": "retrieve",
|
|
||||||
"put": "update",
|
|
||||||
"patch": "partial_update",
|
|
||||||
"delete": "destroy",
|
|
||||||
}
|
|
||||||
),
|
|
||||||
name="project-issue-module-links",
|
|
||||||
),
|
|
||||||
path(
|
|
||||||
"workspaces/<str:slug>/projects/<uuid:project_id>/user-favorite-modules/",
|
|
||||||
ModuleFavoriteViewSet.as_view(
|
|
||||||
{
|
|
||||||
"get": "list",
|
|
||||||
"post": "create",
|
|
||||||
}
|
|
||||||
),
|
|
||||||
name="user-favorite-module",
|
|
||||||
),
|
|
||||||
path(
|
|
||||||
"workspaces/<str:slug>/projects/<uuid:project_id>/user-favorite-modules/<uuid:module_id>/",
|
|
||||||
ModuleFavoriteViewSet.as_view(
|
|
||||||
{
|
|
||||||
"delete": "destroy",
|
|
||||||
}
|
|
||||||
),
|
|
||||||
name="user-favorite-module",
|
|
||||||
),
|
|
||||||
path(
|
|
||||||
"workspaces/<str:slug>/projects/<uuid:project_id>/bulk-import-modules/<str:service>/",
|
|
||||||
BulkImportModulesEndpoint.as_view(),
|
|
||||||
name="bulk-modules-create",
|
|
||||||
),
|
),
|
||||||
]
|
]
|
||||||
|
@ -1,79 +0,0 @@
|
|||||||
from django.urls import path
|
|
||||||
|
|
||||||
|
|
||||||
from plane.api.views import (
|
|
||||||
PageViewSet,
|
|
||||||
PageBlockViewSet,
|
|
||||||
PageFavoriteViewSet,
|
|
||||||
CreateIssueFromPageBlockEndpoint,
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
urlpatterns = [
|
|
||||||
path(
|
|
||||||
"workspaces/<str:slug>/projects/<uuid:project_id>/pages/",
|
|
||||||
PageViewSet.as_view(
|
|
||||||
{
|
|
||||||
"get": "list",
|
|
||||||
"post": "create",
|
|
||||||
}
|
|
||||||
),
|
|
||||||
name="project-pages",
|
|
||||||
),
|
|
||||||
path(
|
|
||||||
"workspaces/<str:slug>/projects/<uuid:project_id>/pages/<uuid:pk>/",
|
|
||||||
PageViewSet.as_view(
|
|
||||||
{
|
|
||||||
"get": "retrieve",
|
|
||||||
"patch": "partial_update",
|
|
||||||
"delete": "destroy",
|
|
||||||
}
|
|
||||||
),
|
|
||||||
name="project-pages",
|
|
||||||
),
|
|
||||||
path(
|
|
||||||
"workspaces/<str:slug>/projects/<uuid:project_id>/pages/<uuid:page_id>/page-blocks/",
|
|
||||||
PageBlockViewSet.as_view(
|
|
||||||
{
|
|
||||||
"get": "list",
|
|
||||||
"post": "create",
|
|
||||||
}
|
|
||||||
),
|
|
||||||
name="project-page-blocks",
|
|
||||||
),
|
|
||||||
path(
|
|
||||||
"workspaces/<str:slug>/projects/<uuid:project_id>/pages/<uuid:page_id>/page-blocks/<uuid:pk>/",
|
|
||||||
PageBlockViewSet.as_view(
|
|
||||||
{
|
|
||||||
"get": "retrieve",
|
|
||||||
"patch": "partial_update",
|
|
||||||
"delete": "destroy",
|
|
||||||
}
|
|
||||||
),
|
|
||||||
name="project-page-blocks",
|
|
||||||
),
|
|
||||||
path(
|
|
||||||
"workspaces/<str:slug>/projects/<uuid:project_id>/user-favorite-pages/",
|
|
||||||
PageFavoriteViewSet.as_view(
|
|
||||||
{
|
|
||||||
"get": "list",
|
|
||||||
"post": "create",
|
|
||||||
}
|
|
||||||
),
|
|
||||||
name="user-favorite-pages",
|
|
||||||
),
|
|
||||||
path(
|
|
||||||
"workspaces/<str:slug>/projects/<uuid:project_id>/user-favorite-pages/<uuid:page_id>/",
|
|
||||||
PageFavoriteViewSet.as_view(
|
|
||||||
{
|
|
||||||
"delete": "destroy",
|
|
||||||
}
|
|
||||||
),
|
|
||||||
name="user-favorite-pages",
|
|
||||||
),
|
|
||||||
path(
|
|
||||||
"workspaces/<str:slug>/projects/<uuid:project_id>/pages/<uuid:page_id>/page-blocks/<uuid:page_block_id>/issues/",
|
|
||||||
CreateIssueFromPageBlockEndpoint.as_view(),
|
|
||||||
name="page-block-issues",
|
|
||||||
),
|
|
||||||
]
|
|
@ -1,132 +1,24 @@
|
|||||||
from django.urls import path
|
from django.urls import path
|
||||||
|
|
||||||
from plane.api.views import (
|
from plane.api.views import (
|
||||||
ProjectViewSet,
|
ProjectAPIEndpoint,
|
||||||
InviteProjectEndpoint,
|
ProjectArchiveUnarchiveAPIEndpoint,
|
||||||
ProjectMemberViewSet,
|
|
||||||
ProjectMemberInvitationsViewset,
|
|
||||||
ProjectMemberUserEndpoint,
|
|
||||||
ProjectJoinEndpoint,
|
|
||||||
AddTeamToProjectEndpoint,
|
|
||||||
ProjectUserViewsEndpoint,
|
|
||||||
ProjectIdentifierEndpoint,
|
|
||||||
ProjectFavoritesViewSet,
|
|
||||||
LeaveProjectEndpoint,
|
|
||||||
ProjectPublicCoverImagesEndpoint,
|
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
urlpatterns = [
|
urlpatterns = [
|
||||||
path(
|
path(
|
||||||
"workspaces/<str:slug>/projects/",
|
"workspaces/<str:slug>/projects/",
|
||||||
ProjectViewSet.as_view(
|
ProjectAPIEndpoint.as_view(),
|
||||||
{
|
|
||||||
"get": "list",
|
|
||||||
"post": "create",
|
|
||||||
}
|
|
||||||
),
|
|
||||||
name="project",
|
name="project",
|
||||||
),
|
),
|
||||||
path(
|
path(
|
||||||
"workspaces/<str:slug>/projects/<uuid:pk>/",
|
"workspaces/<str:slug>/projects/<uuid:pk>/",
|
||||||
ProjectViewSet.as_view(
|
ProjectAPIEndpoint.as_view(),
|
||||||
{
|
|
||||||
"get": "retrieve",
|
|
||||||
"put": "update",
|
|
||||||
"patch": "partial_update",
|
|
||||||
"delete": "destroy",
|
|
||||||
}
|
|
||||||
),
|
|
||||||
name="project",
|
name="project",
|
||||||
),
|
),
|
||||||
path(
|
path(
|
||||||
"workspaces/<str:slug>/project-identifiers/",
|
"workspaces/<str:slug>/projects/<uuid:project_id>/archive/",
|
||||||
ProjectIdentifierEndpoint.as_view(),
|
ProjectArchiveUnarchiveAPIEndpoint.as_view(),
|
||||||
name="project-identifiers",
|
name="project-archive-unarchive",
|
||||||
),
|
|
||||||
path(
|
|
||||||
"workspaces/<str:slug>/projects/<uuid:project_id>/invite/",
|
|
||||||
InviteProjectEndpoint.as_view(),
|
|
||||||
name="invite-project",
|
|
||||||
),
|
|
||||||
path(
|
|
||||||
"workspaces/<str:slug>/projects/<uuid:project_id>/members/",
|
|
||||||
ProjectMemberViewSet.as_view({"get": "list", "post": "create"}),
|
|
||||||
name="project-member",
|
|
||||||
),
|
|
||||||
path(
|
|
||||||
"workspaces/<str:slug>/projects/<uuid:project_id>/members/<uuid:pk>/",
|
|
||||||
ProjectMemberViewSet.as_view(
|
|
||||||
{
|
|
||||||
"get": "retrieve",
|
|
||||||
"patch": "partial_update",
|
|
||||||
"delete": "destroy",
|
|
||||||
}
|
|
||||||
),
|
|
||||||
name="project-member",
|
|
||||||
),
|
|
||||||
path(
|
|
||||||
"workspaces/<str:slug>/projects/join/",
|
|
||||||
ProjectJoinEndpoint.as_view(),
|
|
||||||
name="project-join",
|
|
||||||
),
|
|
||||||
path(
|
|
||||||
"workspaces/<str:slug>/projects/<uuid:project_id>/team-invite/",
|
|
||||||
AddTeamToProjectEndpoint.as_view(),
|
|
||||||
name="projects",
|
|
||||||
),
|
|
||||||
path(
|
|
||||||
"workspaces/<str:slug>/projects/<uuid:project_id>/invitations/",
|
|
||||||
ProjectMemberInvitationsViewset.as_view({"get": "list"}),
|
|
||||||
name="project-member-invite",
|
|
||||||
),
|
|
||||||
path(
|
|
||||||
"workspaces/<str:slug>/projects/<uuid:project_id>/invitations/<uuid:pk>/",
|
|
||||||
ProjectMemberInvitationsViewset.as_view(
|
|
||||||
{
|
|
||||||
"get": "retrieve",
|
|
||||||
"delete": "destroy",
|
|
||||||
}
|
|
||||||
),
|
|
||||||
name="project-member-invite",
|
|
||||||
),
|
|
||||||
path(
|
|
||||||
"workspaces/<str:slug>/projects/<uuid:project_id>/project-views/",
|
|
||||||
ProjectUserViewsEndpoint.as_view(),
|
|
||||||
name="project-view",
|
|
||||||
),
|
|
||||||
path(
|
|
||||||
"workspaces/<str:slug>/projects/<uuid:project_id>/project-members/me/",
|
|
||||||
ProjectMemberUserEndpoint.as_view(),
|
|
||||||
name="project-member-view",
|
|
||||||
),
|
|
||||||
path(
|
|
||||||
"workspaces/<str:slug>/user-favorite-projects/",
|
|
||||||
ProjectFavoritesViewSet.as_view(
|
|
||||||
{
|
|
||||||
"get": "list",
|
|
||||||
"post": "create",
|
|
||||||
}
|
|
||||||
),
|
|
||||||
name="project-favorite",
|
|
||||||
),
|
|
||||||
path(
|
|
||||||
"workspaces/<str:slug>/user-favorite-projects/<uuid:project_id>/",
|
|
||||||
ProjectFavoritesViewSet.as_view(
|
|
||||||
{
|
|
||||||
"delete": "destroy",
|
|
||||||
}
|
|
||||||
),
|
|
||||||
name="project-favorite",
|
|
||||||
),
|
|
||||||
path(
|
|
||||||
"workspaces/<str:slug>/projects/<uuid:project_id>/members/leave/",
|
|
||||||
LeaveProjectEndpoint.as_view(),
|
|
||||||
name="leave-project",
|
|
||||||
),
|
|
||||||
path(
|
|
||||||
"project-covers/",
|
|
||||||
ProjectPublicCoverImagesEndpoint.as_view(),
|
|
||||||
name="project-covers",
|
|
||||||
),
|
),
|
||||||
]
|
]
|
||||||
|
@ -1,151 +0,0 @@
|
|||||||
from django.urls import path
|
|
||||||
|
|
||||||
|
|
||||||
from plane.api.views import (
|
|
||||||
ProjectDeployBoardViewSet,
|
|
||||||
ProjectDeployBoardPublicSettingsEndpoint,
|
|
||||||
ProjectIssuesPublicEndpoint,
|
|
||||||
IssueRetrievePublicEndpoint,
|
|
||||||
IssueCommentPublicViewSet,
|
|
||||||
IssueReactionPublicViewSet,
|
|
||||||
CommentReactionPublicViewSet,
|
|
||||||
InboxIssuePublicViewSet,
|
|
||||||
IssueVotePublicViewSet,
|
|
||||||
WorkspaceProjectDeployBoardEndpoint,
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
urlpatterns = [
|
|
||||||
path(
|
|
||||||
"workspaces/<str:slug>/projects/<uuid:project_id>/project-deploy-boards/",
|
|
||||||
ProjectDeployBoardViewSet.as_view(
|
|
||||||
{
|
|
||||||
"get": "list",
|
|
||||||
"post": "create",
|
|
||||||
}
|
|
||||||
),
|
|
||||||
name="project-deploy-board",
|
|
||||||
),
|
|
||||||
path(
|
|
||||||
"workspaces/<str:slug>/projects/<uuid:project_id>/project-deploy-boards/<uuid:pk>/",
|
|
||||||
ProjectDeployBoardViewSet.as_view(
|
|
||||||
{
|
|
||||||
"get": "retrieve",
|
|
||||||
"patch": "partial_update",
|
|
||||||
"delete": "destroy",
|
|
||||||
}
|
|
||||||
),
|
|
||||||
name="project-deploy-board",
|
|
||||||
),
|
|
||||||
path(
|
|
||||||
"public/workspaces/<str:slug>/project-boards/<uuid:project_id>/settings/",
|
|
||||||
ProjectDeployBoardPublicSettingsEndpoint.as_view(),
|
|
||||||
name="project-deploy-board-settings",
|
|
||||||
),
|
|
||||||
path(
|
|
||||||
"public/workspaces/<str:slug>/project-boards/<uuid:project_id>/issues/",
|
|
||||||
ProjectIssuesPublicEndpoint.as_view(),
|
|
||||||
name="project-deploy-board",
|
|
||||||
),
|
|
||||||
path(
|
|
||||||
"public/workspaces/<str:slug>/project-boards/<uuid:project_id>/issues/<uuid:issue_id>/",
|
|
||||||
IssueRetrievePublicEndpoint.as_view(),
|
|
||||||
name="workspace-project-boards",
|
|
||||||
),
|
|
||||||
path(
|
|
||||||
"public/workspaces/<str:slug>/project-boards/<uuid:project_id>/issues/<uuid:issue_id>/comments/",
|
|
||||||
IssueCommentPublicViewSet.as_view(
|
|
||||||
{
|
|
||||||
"get": "list",
|
|
||||||
"post": "create",
|
|
||||||
}
|
|
||||||
),
|
|
||||||
name="issue-comments-project-board",
|
|
||||||
),
|
|
||||||
path(
|
|
||||||
"public/workspaces/<str:slug>/project-boards/<uuid:project_id>/issues/<uuid:issue_id>/comments/<uuid:pk>/",
|
|
||||||
IssueCommentPublicViewSet.as_view(
|
|
||||||
{
|
|
||||||
"get": "retrieve",
|
|
||||||
"patch": "partial_update",
|
|
||||||
"delete": "destroy",
|
|
||||||
}
|
|
||||||
),
|
|
||||||
name="issue-comments-project-board",
|
|
||||||
),
|
|
||||||
path(
|
|
||||||
"public/workspaces/<str:slug>/project-boards/<uuid:project_id>/issues/<uuid:issue_id>/reactions/",
|
|
||||||
IssueReactionPublicViewSet.as_view(
|
|
||||||
{
|
|
||||||
"get": "list",
|
|
||||||
"post": "create",
|
|
||||||
}
|
|
||||||
),
|
|
||||||
name="issue-reactions-project-board",
|
|
||||||
),
|
|
||||||
path(
|
|
||||||
"public/workspaces/<str:slug>/project-boards/<uuid:project_id>/issues/<uuid:issue_id>/reactions/<str:reaction_code>/",
|
|
||||||
IssueReactionPublicViewSet.as_view(
|
|
||||||
{
|
|
||||||
"delete": "destroy",
|
|
||||||
}
|
|
||||||
),
|
|
||||||
name="issue-reactions-project-board",
|
|
||||||
),
|
|
||||||
path(
|
|
||||||
"public/workspaces/<str:slug>/project-boards/<uuid:project_id>/comments/<uuid:comment_id>/reactions/",
|
|
||||||
CommentReactionPublicViewSet.as_view(
|
|
||||||
{
|
|
||||||
"get": "list",
|
|
||||||
"post": "create",
|
|
||||||
}
|
|
||||||
),
|
|
||||||
name="comment-reactions-project-board",
|
|
||||||
),
|
|
||||||
path(
|
|
||||||
"public/workspaces/<str:slug>/project-boards/<uuid:project_id>/comments/<uuid:comment_id>/reactions/<str:reaction_code>/",
|
|
||||||
CommentReactionPublicViewSet.as_view(
|
|
||||||
{
|
|
||||||
"delete": "destroy",
|
|
||||||
}
|
|
||||||
),
|
|
||||||
name="comment-reactions-project-board",
|
|
||||||
),
|
|
||||||
path(
|
|
||||||
"public/workspaces/<str:slug>/project-boards/<uuid:project_id>/inboxes/<uuid:inbox_id>/inbox-issues/",
|
|
||||||
InboxIssuePublicViewSet.as_view(
|
|
||||||
{
|
|
||||||
"get": "list",
|
|
||||||
"post": "create",
|
|
||||||
}
|
|
||||||
),
|
|
||||||
name="inbox-issue",
|
|
||||||
),
|
|
||||||
path(
|
|
||||||
"public/workspaces/<str:slug>/project-boards/<uuid:project_id>/inboxes/<uuid:inbox_id>/inbox-issues/<uuid:pk>/",
|
|
||||||
InboxIssuePublicViewSet.as_view(
|
|
||||||
{
|
|
||||||
"get": "retrieve",
|
|
||||||
"patch": "partial_update",
|
|
||||||
"delete": "destroy",
|
|
||||||
}
|
|
||||||
),
|
|
||||||
name="inbox-issue",
|
|
||||||
),
|
|
||||||
path(
|
|
||||||
"public/workspaces/<str:slug>/project-boards/<uuid:project_id>/issues/<uuid:issue_id>/votes/",
|
|
||||||
IssueVotePublicViewSet.as_view(
|
|
||||||
{
|
|
||||||
"get": "list",
|
|
||||||
"post": "create",
|
|
||||||
"delete": "destroy",
|
|
||||||
}
|
|
||||||
),
|
|
||||||
name="issue-vote-project-board",
|
|
||||||
),
|
|
||||||
path(
|
|
||||||
"public/workspaces/<str:slug>/project-boards/",
|
|
||||||
WorkspaceProjectDeployBoardEndpoint.as_view(),
|
|
||||||
name="workspace-project-boards",
|
|
||||||
),
|
|
||||||
]
|
|
@ -1,13 +0,0 @@
|
|||||||
from django.urls import path
|
|
||||||
|
|
||||||
|
|
||||||
from plane.api.views import ReleaseNotesEndpoint
|
|
||||||
|
|
||||||
|
|
||||||
urlpatterns = [
|
|
||||||
path(
|
|
||||||
"release-notes/",
|
|
||||||
ReleaseNotesEndpoint.as_view(),
|
|
||||||
name="release-notes",
|
|
||||||
),
|
|
||||||
]
|
|
@ -1,38 +1,16 @@
|
|||||||
from django.urls import path
|
from django.urls import path
|
||||||
|
|
||||||
|
from plane.api.views import StateAPIEndpoint
|
||||||
from plane.api.views import StateViewSet
|
|
||||||
|
|
||||||
|
|
||||||
urlpatterns = [
|
urlpatterns = [
|
||||||
path(
|
path(
|
||||||
"workspaces/<str:slug>/projects/<uuid:project_id>/states/",
|
"workspaces/<str:slug>/projects/<uuid:project_id>/states/",
|
||||||
StateViewSet.as_view(
|
StateAPIEndpoint.as_view(),
|
||||||
{
|
name="states",
|
||||||
"get": "list",
|
|
||||||
"post": "create",
|
|
||||||
}
|
|
||||||
),
|
|
||||||
name="project-states",
|
|
||||||
),
|
),
|
||||||
path(
|
path(
|
||||||
"workspaces/<str:slug>/projects/<uuid:project_id>/states/<uuid:pk>/",
|
"workspaces/<str:slug>/projects/<uuid:project_id>/states/<uuid:state_id>/",
|
||||||
StateViewSet.as_view(
|
StateAPIEndpoint.as_view(),
|
||||||
{
|
name="states",
|
||||||
"get": "retrieve",
|
|
||||||
"patch": "partial_update",
|
|
||||||
"delete": "destroy",
|
|
||||||
}
|
|
||||||
),
|
|
||||||
name="project-state",
|
|
||||||
),
|
|
||||||
path(
|
|
||||||
"workspaces/<str:slug>/projects/<uuid:project_id>/states/<uuid:pk>/mark-default/",
|
|
||||||
StateViewSet.as_view(
|
|
||||||
{
|
|
||||||
"post": "mark_as_default",
|
|
||||||
}
|
|
||||||
),
|
|
||||||
name="project-state",
|
|
||||||
),
|
),
|
||||||
]
|
]
|
||||||
|
@ -1,13 +0,0 @@
|
|||||||
from django.urls import path
|
|
||||||
|
|
||||||
|
|
||||||
from plane.api.views import UnsplashEndpoint
|
|
||||||
|
|
||||||
|
|
||||||
urlpatterns = [
|
|
||||||
path(
|
|
||||||
"unsplash/",
|
|
||||||
UnsplashEndpoint.as_view(),
|
|
||||||
name="unsplash",
|
|
||||||
),
|
|
||||||
]
|
|
File diff suppressed because it is too large
Load Diff
@ -1,169 +1,26 @@
|
|||||||
from .project import (
|
from .project import ProjectAPIEndpoint, ProjectArchiveUnarchiveAPIEndpoint
|
||||||
ProjectViewSet,
|
|
||||||
ProjectMemberViewSet,
|
|
||||||
UserProjectInvitationsViewset,
|
|
||||||
InviteProjectEndpoint,
|
|
||||||
AddTeamToProjectEndpoint,
|
|
||||||
ProjectMemberInvitationsViewset,
|
|
||||||
ProjectMemberInviteDetailViewSet,
|
|
||||||
ProjectIdentifierEndpoint,
|
|
||||||
ProjectJoinEndpoint,
|
|
||||||
ProjectUserViewsEndpoint,
|
|
||||||
ProjectMemberUserEndpoint,
|
|
||||||
ProjectFavoritesViewSet,
|
|
||||||
ProjectDeployBoardViewSet,
|
|
||||||
ProjectDeployBoardPublicSettingsEndpoint,
|
|
||||||
WorkspaceProjectDeployBoardEndpoint,
|
|
||||||
LeaveProjectEndpoint,
|
|
||||||
ProjectPublicCoverImagesEndpoint,
|
|
||||||
)
|
|
||||||
from .user import (
|
|
||||||
UserEndpoint,
|
|
||||||
UpdateUserOnBoardedEndpoint,
|
|
||||||
UpdateUserTourCompletedEndpoint,
|
|
||||||
UserActivityEndpoint,
|
|
||||||
)
|
|
||||||
|
|
||||||
from .oauth import OauthEndpoint
|
from .state import StateAPIEndpoint
|
||||||
|
|
||||||
from .base import BaseAPIView, BaseViewSet
|
|
||||||
|
|
||||||
from .workspace import (
|
|
||||||
WorkSpaceViewSet,
|
|
||||||
UserWorkSpacesEndpoint,
|
|
||||||
WorkSpaceAvailabilityCheckEndpoint,
|
|
||||||
InviteWorkspaceEndpoint,
|
|
||||||
JoinWorkspaceEndpoint,
|
|
||||||
WorkSpaceMemberViewSet,
|
|
||||||
TeamMemberViewSet,
|
|
||||||
WorkspaceInvitationsViewset,
|
|
||||||
UserWorkspaceInvitationsEndpoint,
|
|
||||||
UserWorkspaceInvitationEndpoint,
|
|
||||||
UserLastProjectWithWorkspaceEndpoint,
|
|
||||||
WorkspaceMemberUserEndpoint,
|
|
||||||
WorkspaceMemberUserViewsEndpoint,
|
|
||||||
UserActivityGraphEndpoint,
|
|
||||||
UserIssueCompletedGraphEndpoint,
|
|
||||||
UserWorkspaceDashboardEndpoint,
|
|
||||||
WorkspaceThemeViewSet,
|
|
||||||
WorkspaceUserProfileStatsEndpoint,
|
|
||||||
WorkspaceUserActivityEndpoint,
|
|
||||||
WorkspaceUserProfileEndpoint,
|
|
||||||
WorkspaceUserProfileIssuesEndpoint,
|
|
||||||
WorkspaceLabelsEndpoint,
|
|
||||||
LeaveWorkspaceEndpoint,
|
|
||||||
)
|
|
||||||
from .state import StateViewSet
|
|
||||||
from .view import GlobalViewViewSet, GlobalViewIssuesViewSet, IssueViewViewSet, IssueViewFavoriteViewSet
|
|
||||||
from .cycle import (
|
|
||||||
CycleViewSet,
|
|
||||||
CycleIssueViewSet,
|
|
||||||
CycleDateCheckEndpoint,
|
|
||||||
CycleFavoriteViewSet,
|
|
||||||
TransferCycleIssueEndpoint,
|
|
||||||
)
|
|
||||||
from .asset import FileAssetEndpoint, UserAssetsEndpoint
|
|
||||||
from .issue import (
|
from .issue import (
|
||||||
IssueViewSet,
|
IssueAPIEndpoint,
|
||||||
WorkSpaceIssuesEndpoint,
|
LabelAPIEndpoint,
|
||||||
IssueActivityEndpoint,
|
IssueLinkAPIEndpoint,
|
||||||
IssueCommentViewSet,
|
IssueCommentAPIEndpoint,
|
||||||
IssueUserDisplayPropertyEndpoint,
|
IssueActivityAPIEndpoint,
|
||||||
LabelViewSet,
|
|
||||||
BulkDeleteIssuesEndpoint,
|
|
||||||
UserWorkSpaceIssues,
|
|
||||||
SubIssuesEndpoint,
|
|
||||||
IssueLinkViewSet,
|
|
||||||
BulkCreateIssueLabelsEndpoint,
|
|
||||||
IssueAttachmentEndpoint,
|
|
||||||
IssueArchiveViewSet,
|
|
||||||
IssueSubscriberViewSet,
|
|
||||||
IssueCommentPublicViewSet,
|
|
||||||
CommentReactionViewSet,
|
|
||||||
IssueReactionViewSet,
|
|
||||||
IssueReactionPublicViewSet,
|
|
||||||
CommentReactionPublicViewSet,
|
|
||||||
IssueVotePublicViewSet,
|
|
||||||
IssueRelationViewSet,
|
|
||||||
IssueRetrievePublicEndpoint,
|
|
||||||
ProjectIssuesPublicEndpoint,
|
|
||||||
IssueDraftViewSet,
|
|
||||||
)
|
)
|
||||||
|
|
||||||
from .auth_extended import (
|
from .cycle import (
|
||||||
VerifyEmailEndpoint,
|
CycleAPIEndpoint,
|
||||||
RequestEmailVerificationEndpoint,
|
CycleIssueAPIEndpoint,
|
||||||
ForgotPasswordEndpoint,
|
TransferCycleIssueAPIEndpoint,
|
||||||
ResetPasswordEndpoint,
|
CycleArchiveUnarchiveAPIEndpoint,
|
||||||
ChangePasswordEndpoint,
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
from .authentication import (
|
|
||||||
SignUpEndpoint,
|
|
||||||
SignInEndpoint,
|
|
||||||
SignOutEndpoint,
|
|
||||||
MagicSignInEndpoint,
|
|
||||||
MagicSignInGenerateEndpoint,
|
|
||||||
)
|
)
|
||||||
|
|
||||||
from .module import (
|
from .module import (
|
||||||
ModuleViewSet,
|
ModuleAPIEndpoint,
|
||||||
ModuleIssueViewSet,
|
ModuleIssueAPIEndpoint,
|
||||||
ModuleLinkViewSet,
|
ModuleArchiveUnarchiveAPIEndpoint,
|
||||||
ModuleFavoriteViewSet,
|
|
||||||
)
|
)
|
||||||
|
|
||||||
from .api_token import ApiTokenEndpoint
|
from .inbox import InboxIssueAPIEndpoint
|
||||||
|
|
||||||
from .integration import (
|
|
||||||
WorkspaceIntegrationViewSet,
|
|
||||||
IntegrationViewSet,
|
|
||||||
GithubIssueSyncViewSet,
|
|
||||||
GithubRepositorySyncViewSet,
|
|
||||||
GithubCommentSyncViewSet,
|
|
||||||
GithubRepositoriesEndpoint,
|
|
||||||
BulkCreateGithubIssueSyncEndpoint,
|
|
||||||
SlackProjectSyncViewSet,
|
|
||||||
)
|
|
||||||
|
|
||||||
from .importer import (
|
|
||||||
ServiceIssueImportSummaryEndpoint,
|
|
||||||
ImportServiceEndpoint,
|
|
||||||
UpdateServiceImportStatusEndpoint,
|
|
||||||
BulkImportIssuesEndpoint,
|
|
||||||
BulkImportModulesEndpoint,
|
|
||||||
)
|
|
||||||
|
|
||||||
from .page import (
|
|
||||||
PageViewSet,
|
|
||||||
PageBlockViewSet,
|
|
||||||
PageFavoriteViewSet,
|
|
||||||
CreateIssueFromPageBlockEndpoint,
|
|
||||||
)
|
|
||||||
|
|
||||||
from .search import GlobalSearchEndpoint, IssueSearchEndpoint
|
|
||||||
|
|
||||||
|
|
||||||
from .external import GPTIntegrationEndpoint, ReleaseNotesEndpoint, UnsplashEndpoint
|
|
||||||
|
|
||||||
from .estimate import (
|
|
||||||
ProjectEstimatePointEndpoint,
|
|
||||||
BulkEstimatePointEndpoint,
|
|
||||||
)
|
|
||||||
|
|
||||||
from .inbox import InboxViewSet, InboxIssueViewSet, InboxIssuePublicViewSet
|
|
||||||
|
|
||||||
from .analytic import (
|
|
||||||
AnalyticsEndpoint,
|
|
||||||
AnalyticViewViewset,
|
|
||||||
SavedAnalyticEndpoint,
|
|
||||||
ExportAnalyticsEndpoint,
|
|
||||||
DefaultAnalyticsEndpoint,
|
|
||||||
)
|
|
||||||
|
|
||||||
from .notification import NotificationViewSet, UnreadNotificationEndpoint, MarkAllReadNotificationViewSet
|
|
||||||
|
|
||||||
from .exporter import ExportIssuesEndpoint
|
|
||||||
|
|
||||||
from .config import ConfigurationEndpoint
|
|
||||||
|
@ -1,47 +0,0 @@
|
|||||||
# Python import
|
|
||||||
from uuid import uuid4
|
|
||||||
|
|
||||||
# Third party
|
|
||||||
from rest_framework.response import Response
|
|
||||||
from rest_framework import status
|
|
||||||
from sentry_sdk import capture_exception
|
|
||||||
|
|
||||||
# Module import
|
|
||||||
from .base import BaseAPIView
|
|
||||||
from plane.db.models import APIToken
|
|
||||||
from plane.api.serializers import APITokenSerializer
|
|
||||||
|
|
||||||
|
|
||||||
class ApiTokenEndpoint(BaseAPIView):
|
|
||||||
def post(self, request):
|
|
||||||
label = request.data.get("label", str(uuid4().hex))
|
|
||||||
workspace = request.data.get("workspace", False)
|
|
||||||
|
|
||||||
if not workspace:
|
|
||||||
return Response(
|
|
||||||
{"error": "Workspace is required"}, status=status.HTTP_200_OK
|
|
||||||
)
|
|
||||||
|
|
||||||
api_token = APIToken.objects.create(
|
|
||||||
label=label, user=request.user, workspace_id=workspace
|
|
||||||
)
|
|
||||||
|
|
||||||
serializer = APITokenSerializer(api_token)
|
|
||||||
# Token will be only vissible while creating
|
|
||||||
return Response(
|
|
||||||
{"api_token": serializer.data, "token": api_token.token},
|
|
||||||
status=status.HTTP_201_CREATED,
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def get(self, request):
|
|
||||||
api_tokens = APIToken.objects.filter(user=request.user)
|
|
||||||
serializer = APITokenSerializer(api_tokens, many=True)
|
|
||||||
return Response(serializer.data, status=status.HTTP_200_OK)
|
|
||||||
|
|
||||||
|
|
||||||
def delete(self, request, pk):
|
|
||||||
api_token = APIToken.objects.get(pk=pk)
|
|
||||||
api_token.delete()
|
|
||||||
return Response(status=status.HTTP_204_NO_CONTENT)
|
|
||||||
|
|
@ -1,75 +0,0 @@
|
|||||||
# Third party imports
|
|
||||||
from rest_framework import status
|
|
||||||
from rest_framework.response import Response
|
|
||||||
from rest_framework.parsers import MultiPartParser, FormParser
|
|
||||||
from sentry_sdk import capture_exception
|
|
||||||
from django.conf import settings
|
|
||||||
# Module imports
|
|
||||||
from .base import BaseAPIView
|
|
||||||
from plane.db.models import FileAsset, Workspace
|
|
||||||
from plane.api.serializers import FileAssetSerializer
|
|
||||||
|
|
||||||
|
|
||||||
class FileAssetEndpoint(BaseAPIView):
|
|
||||||
parser_classes = (MultiPartParser, FormParser)
|
|
||||||
|
|
||||||
"""
|
|
||||||
A viewset for viewing and editing task instances.
|
|
||||||
"""
|
|
||||||
|
|
||||||
def get(self, request, workspace_id, asset_key):
|
|
||||||
asset_key = str(workspace_id) + "/" + asset_key
|
|
||||||
files = FileAsset.objects.filter(asset=asset_key)
|
|
||||||
if files.exists():
|
|
||||||
serializer = FileAssetSerializer(files, context={"request": request}, many=True)
|
|
||||||
return Response({"data": serializer.data, "status": True}, status=status.HTTP_200_OK)
|
|
||||||
else:
|
|
||||||
return Response({"error": "Asset key does not exist", "status": False}, status=status.HTTP_200_OK)
|
|
||||||
|
|
||||||
|
|
||||||
def post(self, request, slug):
|
|
||||||
serializer = FileAssetSerializer(data=request.data)
|
|
||||||
if serializer.is_valid():
|
|
||||||
# Get the workspace
|
|
||||||
workspace = Workspace.objects.get(slug=slug)
|
|
||||||
serializer.save(workspace_id=workspace.id)
|
|
||||||
return Response(serializer.data, status=status.HTTP_201_CREATED)
|
|
||||||
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
|
|
||||||
|
|
||||||
|
|
||||||
def delete(self, request, workspace_id, asset_key):
|
|
||||||
asset_key = str(workspace_id) + "/" + asset_key
|
|
||||||
file_asset = FileAsset.objects.get(asset=asset_key)
|
|
||||||
# Delete the file from storage
|
|
||||||
file_asset.asset.delete(save=False)
|
|
||||||
# Delete the file object
|
|
||||||
file_asset.delete()
|
|
||||||
return Response(status=status.HTTP_204_NO_CONTENT)
|
|
||||||
|
|
||||||
|
|
||||||
class UserAssetsEndpoint(BaseAPIView):
|
|
||||||
parser_classes = (MultiPartParser, FormParser)
|
|
||||||
|
|
||||||
def get(self, request, asset_key):
|
|
||||||
files = FileAsset.objects.filter(asset=asset_key, created_by=request.user)
|
|
||||||
if files.exists():
|
|
||||||
serializer = FileAssetSerializer(files, context={"request": request})
|
|
||||||
return Response({"data": serializer.data, "status": True}, status=status.HTTP_200_OK)
|
|
||||||
else:
|
|
||||||
return Response({"error": "Asset key does not exist", "status": False}, status=status.HTTP_200_OK)
|
|
||||||
|
|
||||||
def post(self, request):
|
|
||||||
serializer = FileAssetSerializer(data=request.data)
|
|
||||||
if serializer.is_valid():
|
|
||||||
serializer.save()
|
|
||||||
return Response(serializer.data, status=status.HTTP_201_CREATED)
|
|
||||||
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
|
|
||||||
|
|
||||||
|
|
||||||
def delete(self, request, asset_key):
|
|
||||||
file_asset = FileAsset.objects.get(asset=asset_key, created_by=request.user)
|
|
||||||
# Delete the file from storage
|
|
||||||
file_asset.asset.delete(save=False)
|
|
||||||
# Delete the file object
|
|
||||||
file_asset.delete()
|
|
||||||
return Response(status=status.HTTP_204_NO_CONTENT)
|
|
@ -1,151 +0,0 @@
|
|||||||
## Python imports
|
|
||||||
import jwt
|
|
||||||
|
|
||||||
## Django imports
|
|
||||||
from django.contrib.auth.tokens import PasswordResetTokenGenerator
|
|
||||||
from django.utils.encoding import (
|
|
||||||
smart_str,
|
|
||||||
smart_bytes,
|
|
||||||
DjangoUnicodeDecodeError,
|
|
||||||
)
|
|
||||||
from django.utils.http import urlsafe_base64_decode, urlsafe_base64_encode
|
|
||||||
from django.conf import settings
|
|
||||||
|
|
||||||
## Third Party Imports
|
|
||||||
from rest_framework import status
|
|
||||||
from rest_framework.response import Response
|
|
||||||
from rest_framework import permissions
|
|
||||||
from rest_framework_simplejwt.tokens import RefreshToken
|
|
||||||
|
|
||||||
from sentry_sdk import capture_exception
|
|
||||||
|
|
||||||
## Module imports
|
|
||||||
from . import BaseAPIView
|
|
||||||
from plane.api.serializers import (
|
|
||||||
ChangePasswordSerializer,
|
|
||||||
ResetPasswordSerializer,
|
|
||||||
)
|
|
||||||
from plane.db.models import User
|
|
||||||
from plane.bgtasks.email_verification_task import email_verification
|
|
||||||
from plane.bgtasks.forgot_password_task import forgot_password
|
|
||||||
|
|
||||||
|
|
||||||
class RequestEmailVerificationEndpoint(BaseAPIView):
|
|
||||||
def get(self, request):
|
|
||||||
token = RefreshToken.for_user(request.user).access_token
|
|
||||||
current_site = settings.WEB_URL
|
|
||||||
email_verification.delay(
|
|
||||||
request.user.first_name, request.user.email, token, current_site
|
|
||||||
)
|
|
||||||
return Response(
|
|
||||||
{"message": "Email sent successfully"}, status=status.HTTP_200_OK
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
class VerifyEmailEndpoint(BaseAPIView):
|
|
||||||
def get(self, request):
|
|
||||||
token = request.GET.get("token")
|
|
||||||
try:
|
|
||||||
payload = jwt.decode(token, settings.SECRET_KEY, algorithms="HS256")
|
|
||||||
user = User.objects.get(id=payload["user_id"])
|
|
||||||
|
|
||||||
if not user.is_email_verified:
|
|
||||||
user.is_email_verified = True
|
|
||||||
user.save()
|
|
||||||
return Response(
|
|
||||||
{"email": "Successfully activated"}, status=status.HTTP_200_OK
|
|
||||||
)
|
|
||||||
except jwt.ExpiredSignatureError as _indentifier:
|
|
||||||
return Response(
|
|
||||||
{"email": "Activation expired"}, status=status.HTTP_400_BAD_REQUEST
|
|
||||||
)
|
|
||||||
except jwt.exceptions.DecodeError as _indentifier:
|
|
||||||
return Response(
|
|
||||||
{"email": "Invalid token"}, status=status.HTTP_400_BAD_REQUEST
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
class ForgotPasswordEndpoint(BaseAPIView):
|
|
||||||
permission_classes = [permissions.AllowAny]
|
|
||||||
|
|
||||||
def post(self, request):
|
|
||||||
email = request.data.get("email")
|
|
||||||
|
|
||||||
if User.objects.filter(email=email).exists():
|
|
||||||
user = User.objects.get(email=email)
|
|
||||||
uidb64 = urlsafe_base64_encode(smart_bytes(user.id))
|
|
||||||
token = PasswordResetTokenGenerator().make_token(user)
|
|
||||||
|
|
||||||
current_site = settings.WEB_URL
|
|
||||||
|
|
||||||
forgot_password.delay(
|
|
||||||
user.first_name, user.email, uidb64, token, current_site
|
|
||||||
)
|
|
||||||
|
|
||||||
return Response(
|
|
||||||
{"message": "Check your email to reset your password"},
|
|
||||||
status=status.HTTP_200_OK,
|
|
||||||
)
|
|
||||||
return Response(
|
|
||||||
{"error": "Please check the email"}, status=status.HTTP_400_BAD_REQUEST
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
class ResetPasswordEndpoint(BaseAPIView):
|
|
||||||
permission_classes = [permissions.AllowAny]
|
|
||||||
|
|
||||||
def post(self, request, uidb64, token):
|
|
||||||
try:
|
|
||||||
id = smart_str(urlsafe_base64_decode(uidb64))
|
|
||||||
user = User.objects.get(id=id)
|
|
||||||
if not PasswordResetTokenGenerator().check_token(user, token):
|
|
||||||
return Response(
|
|
||||||
{"error": "token is not valid, please check the new one"},
|
|
||||||
status=status.HTTP_401_UNAUTHORIZED,
|
|
||||||
)
|
|
||||||
serializer = ResetPasswordSerializer(data=request.data)
|
|
||||||
|
|
||||||
if serializer.is_valid():
|
|
||||||
# set_password also hashes the password that the user will get
|
|
||||||
user.set_password(serializer.data.get("new_password"))
|
|
||||||
user.save()
|
|
||||||
response = {
|
|
||||||
"status": "success",
|
|
||||||
"code": status.HTTP_200_OK,
|
|
||||||
"message": "Password updated successfully",
|
|
||||||
}
|
|
||||||
|
|
||||||
return Response(response)
|
|
||||||
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
|
|
||||||
|
|
||||||
except DjangoUnicodeDecodeError as indentifier:
|
|
||||||
return Response(
|
|
||||||
{"error": "token is not valid, please check the new one"},
|
|
||||||
status=status.HTTP_401_UNAUTHORIZED,
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
class ChangePasswordEndpoint(BaseAPIView):
|
|
||||||
def post(self, request):
|
|
||||||
serializer = ChangePasswordSerializer(data=request.data)
|
|
||||||
|
|
||||||
user = User.objects.get(pk=request.user.id)
|
|
||||||
if serializer.is_valid():
|
|
||||||
# Check old password
|
|
||||||
if not user.object.check_password(serializer.data.get("old_password")):
|
|
||||||
return Response(
|
|
||||||
{"old_password": ["Wrong password."]},
|
|
||||||
status=status.HTTP_400_BAD_REQUEST,
|
|
||||||
)
|
|
||||||
# set_password also hashes the password that the user will get
|
|
||||||
self.object.set_password(serializer.data.get("new_password"))
|
|
||||||
self.object.save()
|
|
||||||
response = {
|
|
||||||
"status": "success",
|
|
||||||
"code": status.HTTP_200_OK,
|
|
||||||
"message": "Password updated successfully",
|
|
||||||
}
|
|
||||||
|
|
||||||
return Response(response)
|
|
||||||
|
|
||||||
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
|
|
@ -1,397 +0,0 @@
|
|||||||
# Python imports
|
|
||||||
import uuid
|
|
||||||
import random
|
|
||||||
import string
|
|
||||||
import json
|
|
||||||
import requests
|
|
||||||
|
|
||||||
# Django imports
|
|
||||||
from django.utils import timezone
|
|
||||||
from django.core.exceptions import ValidationError
|
|
||||||
from django.core.validators import validate_email
|
|
||||||
from django.conf import settings
|
|
||||||
from django.contrib.auth.hashers import make_password
|
|
||||||
|
|
||||||
# Third party imports
|
|
||||||
from rest_framework.response import Response
|
|
||||||
from rest_framework.permissions import AllowAny
|
|
||||||
from rest_framework import status
|
|
||||||
from rest_framework_simplejwt.tokens import RefreshToken
|
|
||||||
|
|
||||||
from sentry_sdk import capture_exception, capture_message
|
|
||||||
|
|
||||||
# Module imports
|
|
||||||
from . import BaseAPIView
|
|
||||||
from plane.db.models import User
|
|
||||||
from plane.api.serializers import UserSerializer
|
|
||||||
from plane.settings.redis import redis_instance
|
|
||||||
from plane.bgtasks.magic_link_code_task import magic_link
|
|
||||||
|
|
||||||
|
|
||||||
def get_tokens_for_user(user):
|
|
||||||
refresh = RefreshToken.for_user(user)
|
|
||||||
return (
|
|
||||||
str(refresh.access_token),
|
|
||||||
str(refresh),
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
class SignUpEndpoint(BaseAPIView):
|
|
||||||
permission_classes = (AllowAny,)
|
|
||||||
|
|
||||||
def post(self, request):
|
|
||||||
if not settings.ENABLE_SIGNUP:
|
|
||||||
return Response(
|
|
||||||
{
|
|
||||||
"error": "New account creation is disabled. Please contact your site administrator"
|
|
||||||
},
|
|
||||||
status=status.HTTP_400_BAD_REQUEST,
|
|
||||||
)
|
|
||||||
|
|
||||||
email = request.data.get("email", False)
|
|
||||||
password = request.data.get("password", False)
|
|
||||||
|
|
||||||
## Raise exception if any of the above are missing
|
|
||||||
if not email or not password:
|
|
||||||
return Response(
|
|
||||||
{"error": "Both email and password are required"},
|
|
||||||
status=status.HTTP_400_BAD_REQUEST,
|
|
||||||
)
|
|
||||||
|
|
||||||
email = email.strip().lower()
|
|
||||||
|
|
||||||
try:
|
|
||||||
validate_email(email)
|
|
||||||
except ValidationError as e:
|
|
||||||
return Response(
|
|
||||||
{"error": "Please provide a valid email address."},
|
|
||||||
status=status.HTTP_400_BAD_REQUEST,
|
|
||||||
)
|
|
||||||
|
|
||||||
# Check if the user already exists
|
|
||||||
if User.objects.filter(email=email).exists():
|
|
||||||
return Response(
|
|
||||||
{"error": "User with this email already exists"},
|
|
||||||
status=status.HTTP_400_BAD_REQUEST,
|
|
||||||
)
|
|
||||||
|
|
||||||
user = User.objects.create(email=email, username=uuid.uuid4().hex)
|
|
||||||
user.set_password(password)
|
|
||||||
|
|
||||||
# settings last actives for the user
|
|
||||||
user.last_active = timezone.now()
|
|
||||||
user.last_login_time = timezone.now()
|
|
||||||
user.last_login_ip = request.META.get("REMOTE_ADDR")
|
|
||||||
user.last_login_uagent = request.META.get("HTTP_USER_AGENT")
|
|
||||||
user.token_updated_at = timezone.now()
|
|
||||||
user.save()
|
|
||||||
|
|
||||||
access_token, refresh_token = get_tokens_for_user(user)
|
|
||||||
|
|
||||||
data = {
|
|
||||||
"access_token": access_token,
|
|
||||||
"refresh_token": refresh_token,
|
|
||||||
}
|
|
||||||
|
|
||||||
# Send Analytics
|
|
||||||
if settings.ANALYTICS_BASE_API:
|
|
||||||
_ = requests.post(
|
|
||||||
settings.ANALYTICS_BASE_API,
|
|
||||||
headers={
|
|
||||||
"Content-Type": "application/json",
|
|
||||||
"X-Auth-Token": settings.ANALYTICS_SECRET_KEY,
|
|
||||||
},
|
|
||||||
json={
|
|
||||||
"event_id": uuid.uuid4().hex,
|
|
||||||
"event_data": {
|
|
||||||
"medium": "email",
|
|
||||||
},
|
|
||||||
"user": {"email": email, "id": str(user.id)},
|
|
||||||
"device_ctx": {
|
|
||||||
"ip": request.META.get("REMOTE_ADDR"),
|
|
||||||
"user_agent": request.META.get("HTTP_USER_AGENT"),
|
|
||||||
},
|
|
||||||
"event_type": "SIGN_UP",
|
|
||||||
},
|
|
||||||
)
|
|
||||||
|
|
||||||
return Response(data, status=status.HTTP_200_OK)
|
|
||||||
|
|
||||||
|
|
||||||
class SignInEndpoint(BaseAPIView):
|
|
||||||
permission_classes = (AllowAny,)
|
|
||||||
|
|
||||||
def post(self, request):
|
|
||||||
email = request.data.get("email", False)
|
|
||||||
password = request.data.get("password", False)
|
|
||||||
|
|
||||||
## Raise exception if any of the above are missing
|
|
||||||
if not email or not password:
|
|
||||||
return Response(
|
|
||||||
{"error": "Both email and password are required"},
|
|
||||||
status=status.HTTP_400_BAD_REQUEST,
|
|
||||||
)
|
|
||||||
|
|
||||||
email = email.strip().lower()
|
|
||||||
|
|
||||||
try:
|
|
||||||
validate_email(email)
|
|
||||||
except ValidationError as e:
|
|
||||||
return Response(
|
|
||||||
{"error": "Please provide a valid email address."},
|
|
||||||
status=status.HTTP_400_BAD_REQUEST,
|
|
||||||
)
|
|
||||||
|
|
||||||
user = User.objects.filter(email=email).first()
|
|
||||||
|
|
||||||
if user is None:
|
|
||||||
return Response(
|
|
||||||
{
|
|
||||||
"error": "Sorry, we could not find a user with the provided credentials. Please try again."
|
|
||||||
},
|
|
||||||
status=status.HTTP_403_FORBIDDEN,
|
|
||||||
)
|
|
||||||
|
|
||||||
# Sign up Process
|
|
||||||
if not user.check_password(password):
|
|
||||||
return Response(
|
|
||||||
{
|
|
||||||
"error": "Sorry, we could not find a user with the provided credentials. Please try again."
|
|
||||||
},
|
|
||||||
status=status.HTTP_403_FORBIDDEN,
|
|
||||||
)
|
|
||||||
if not user.is_active:
|
|
||||||
return Response(
|
|
||||||
{
|
|
||||||
"error": "Your account has been deactivated. Please contact your site administrator."
|
|
||||||
},
|
|
||||||
status=status.HTTP_403_FORBIDDEN,
|
|
||||||
)
|
|
||||||
|
|
||||||
# settings last active for the user
|
|
||||||
user.last_active = timezone.now()
|
|
||||||
user.last_login_time = timezone.now()
|
|
||||||
user.last_login_ip = request.META.get("REMOTE_ADDR")
|
|
||||||
user.last_login_uagent = request.META.get("HTTP_USER_AGENT")
|
|
||||||
user.token_updated_at = timezone.now()
|
|
||||||
user.save()
|
|
||||||
|
|
||||||
access_token, refresh_token = get_tokens_for_user(user)
|
|
||||||
# Send Analytics
|
|
||||||
if settings.ANALYTICS_BASE_API:
|
|
||||||
_ = requests.post(
|
|
||||||
settings.ANALYTICS_BASE_API,
|
|
||||||
headers={
|
|
||||||
"Content-Type": "application/json",
|
|
||||||
"X-Auth-Token": settings.ANALYTICS_SECRET_KEY,
|
|
||||||
},
|
|
||||||
json={
|
|
||||||
"event_id": uuid.uuid4().hex,
|
|
||||||
"event_data": {
|
|
||||||
"medium": "email",
|
|
||||||
},
|
|
||||||
"user": {"email": email, "id": str(user.id)},
|
|
||||||
"device_ctx": {
|
|
||||||
"ip": request.META.get("REMOTE_ADDR"),
|
|
||||||
"user_agent": request.META.get("HTTP_USER_AGENT"),
|
|
||||||
},
|
|
||||||
"event_type": "SIGN_IN",
|
|
||||||
},
|
|
||||||
)
|
|
||||||
data = {
|
|
||||||
"access_token": access_token,
|
|
||||||
"refresh_token": refresh_token,
|
|
||||||
}
|
|
||||||
|
|
||||||
return Response(data, status=status.HTTP_200_OK)
|
|
||||||
|
|
||||||
|
|
||||||
class SignOutEndpoint(BaseAPIView):
|
|
||||||
def post(self, request):
|
|
||||||
refresh_token = request.data.get("refresh_token", False)
|
|
||||||
|
|
||||||
if not refresh_token:
|
|
||||||
capture_message("No refresh token provided")
|
|
||||||
return Response(
|
|
||||||
{"error": "No refresh token provided"},
|
|
||||||
status=status.HTTP_400_BAD_REQUEST,
|
|
||||||
)
|
|
||||||
|
|
||||||
user = User.objects.get(pk=request.user.id)
|
|
||||||
|
|
||||||
user.last_logout_time = timezone.now()
|
|
||||||
user.last_logout_ip = request.META.get("REMOTE_ADDR")
|
|
||||||
|
|
||||||
user.save()
|
|
||||||
|
|
||||||
token = RefreshToken(refresh_token)
|
|
||||||
token.blacklist()
|
|
||||||
return Response({"message": "success"}, status=status.HTTP_200_OK)
|
|
||||||
|
|
||||||
|
|
||||||
class MagicSignInGenerateEndpoint(BaseAPIView):
|
|
||||||
permission_classes = [
|
|
||||||
AllowAny,
|
|
||||||
]
|
|
||||||
|
|
||||||
def post(self, request):
|
|
||||||
email = request.data.get("email", False)
|
|
||||||
|
|
||||||
if not email:
|
|
||||||
return Response(
|
|
||||||
{"error": "Please provide a valid email address"},
|
|
||||||
status=status.HTTP_400_BAD_REQUEST,
|
|
||||||
)
|
|
||||||
|
|
||||||
# Clean up
|
|
||||||
email = email.strip().lower()
|
|
||||||
validate_email(email)
|
|
||||||
|
|
||||||
## Generate a random token
|
|
||||||
token = (
|
|
||||||
"".join(random.choices(string.ascii_lowercase, k=4))
|
|
||||||
+ "-"
|
|
||||||
+ "".join(random.choices(string.ascii_lowercase, k=4))
|
|
||||||
+ "-"
|
|
||||||
+ "".join(random.choices(string.ascii_lowercase, k=4))
|
|
||||||
)
|
|
||||||
|
|
||||||
ri = redis_instance()
|
|
||||||
|
|
||||||
key = "magic_" + str(email)
|
|
||||||
|
|
||||||
# Check if the key already exists in python
|
|
||||||
if ri.exists(key):
|
|
||||||
data = json.loads(ri.get(key))
|
|
||||||
|
|
||||||
current_attempt = data["current_attempt"] + 1
|
|
||||||
|
|
||||||
if data["current_attempt"] > 2:
|
|
||||||
return Response(
|
|
||||||
{"error": "Max attempts exhausted. Please try again later."},
|
|
||||||
status=status.HTTP_400_BAD_REQUEST,
|
|
||||||
)
|
|
||||||
|
|
||||||
value = {
|
|
||||||
"current_attempt": current_attempt,
|
|
||||||
"email": email,
|
|
||||||
"token": token,
|
|
||||||
}
|
|
||||||
expiry = 600
|
|
||||||
|
|
||||||
ri.set(key, json.dumps(value), ex=expiry)
|
|
||||||
|
|
||||||
else:
|
|
||||||
value = {"current_attempt": 0, "email": email, "token": token}
|
|
||||||
expiry = 600
|
|
||||||
|
|
||||||
ri.set(key, json.dumps(value), ex=expiry)
|
|
||||||
|
|
||||||
current_site = settings.WEB_URL
|
|
||||||
magic_link.delay(email, key, token, current_site)
|
|
||||||
|
|
||||||
return Response({"key": key}, status=status.HTTP_200_OK)
|
|
||||||
|
|
||||||
|
|
||||||
class MagicSignInEndpoint(BaseAPIView):
|
|
||||||
permission_classes = [
|
|
||||||
AllowAny,
|
|
||||||
]
|
|
||||||
|
|
||||||
def post(self, request):
|
|
||||||
user_token = request.data.get("token", "").strip()
|
|
||||||
key = request.data.get("key", False).strip().lower()
|
|
||||||
|
|
||||||
if not key or user_token == "":
|
|
||||||
return Response(
|
|
||||||
{"error": "User token and key are required"},
|
|
||||||
status=status.HTTP_400_BAD_REQUEST,
|
|
||||||
)
|
|
||||||
|
|
||||||
ri = redis_instance()
|
|
||||||
|
|
||||||
if ri.exists(key):
|
|
||||||
data = json.loads(ri.get(key))
|
|
||||||
|
|
||||||
token = data["token"]
|
|
||||||
email = data["email"]
|
|
||||||
|
|
||||||
if str(token) == str(user_token):
|
|
||||||
if User.objects.filter(email=email).exists():
|
|
||||||
user = User.objects.get(email=email)
|
|
||||||
# Send event to Jitsu for tracking
|
|
||||||
if settings.ANALYTICS_BASE_API:
|
|
||||||
_ = requests.post(
|
|
||||||
settings.ANALYTICS_BASE_API,
|
|
||||||
headers={
|
|
||||||
"Content-Type": "application/json",
|
|
||||||
"X-Auth-Token": settings.ANALYTICS_SECRET_KEY,
|
|
||||||
},
|
|
||||||
json={
|
|
||||||
"event_id": uuid.uuid4().hex,
|
|
||||||
"event_data": {
|
|
||||||
"medium": "code",
|
|
||||||
},
|
|
||||||
"user": {"email": email, "id": str(user.id)},
|
|
||||||
"device_ctx": {
|
|
||||||
"ip": request.META.get("REMOTE_ADDR"),
|
|
||||||
"user_agent": request.META.get("HTTP_USER_AGENT"),
|
|
||||||
},
|
|
||||||
"event_type": "SIGN_IN",
|
|
||||||
},
|
|
||||||
)
|
|
||||||
else:
|
|
||||||
user = User.objects.create(
|
|
||||||
email=email,
|
|
||||||
username=uuid.uuid4().hex,
|
|
||||||
password=make_password(uuid.uuid4().hex),
|
|
||||||
is_password_autoset=True,
|
|
||||||
)
|
|
||||||
# Send event to Jitsu for tracking
|
|
||||||
if settings.ANALYTICS_BASE_API:
|
|
||||||
_ = requests.post(
|
|
||||||
settings.ANALYTICS_BASE_API,
|
|
||||||
headers={
|
|
||||||
"Content-Type": "application/json",
|
|
||||||
"X-Auth-Token": settings.ANALYTICS_SECRET_KEY,
|
|
||||||
},
|
|
||||||
json={
|
|
||||||
"event_id": uuid.uuid4().hex,
|
|
||||||
"event_data": {
|
|
||||||
"medium": "code",
|
|
||||||
},
|
|
||||||
"user": {"email": email, "id": str(user.id)},
|
|
||||||
"device_ctx": {
|
|
||||||
"ip": request.META.get("REMOTE_ADDR"),
|
|
||||||
"user_agent": request.META.get("HTTP_USER_AGENT"),
|
|
||||||
},
|
|
||||||
"event_type": "SIGN_UP",
|
|
||||||
},
|
|
||||||
)
|
|
||||||
|
|
||||||
user.last_active = timezone.now()
|
|
||||||
user.last_login_time = timezone.now()
|
|
||||||
user.last_login_ip = request.META.get("REMOTE_ADDR")
|
|
||||||
user.last_login_uagent = request.META.get("HTTP_USER_AGENT")
|
|
||||||
user.token_updated_at = timezone.now()
|
|
||||||
user.save()
|
|
||||||
|
|
||||||
access_token, refresh_token = get_tokens_for_user(user)
|
|
||||||
data = {
|
|
||||||
"access_token": access_token,
|
|
||||||
"refresh_token": refresh_token,
|
|
||||||
}
|
|
||||||
|
|
||||||
return Response(data, status=status.HTTP_200_OK)
|
|
||||||
|
|
||||||
else:
|
|
||||||
return Response(
|
|
||||||
{"error": "Your login code was incorrect. Please try again."},
|
|
||||||
status=status.HTTP_400_BAD_REQUEST,
|
|
||||||
)
|
|
||||||
|
|
||||||
else:
|
|
||||||
return Response(
|
|
||||||
{"error": "The magic code/link has expired please try again"},
|
|
||||||
status=status.HTTP_400_BAD_REQUEST,
|
|
||||||
)
|
|
@ -1,26 +1,26 @@
|
|||||||
# Python imports
|
# Python imports
|
||||||
|
from urllib.parse import urlparse
|
||||||
|
|
||||||
import zoneinfo
|
import zoneinfo
|
||||||
|
|
||||||
# Django imports
|
# Django imports
|
||||||
from django.urls import resolve
|
|
||||||
from django.conf import settings
|
from django.conf import settings
|
||||||
from django.utils import timezone
|
|
||||||
from django.db import IntegrityError
|
|
||||||
from django.core.exceptions import ObjectDoesNotExist, ValidationError
|
from django.core.exceptions import ObjectDoesNotExist, ValidationError
|
||||||
|
from django.db import IntegrityError
|
||||||
# Third part imports
|
from django.urls import resolve
|
||||||
|
from django.utils import timezone
|
||||||
from rest_framework import status
|
from rest_framework import status
|
||||||
from rest_framework import status
|
|
||||||
from rest_framework.viewsets import ModelViewSet
|
|
||||||
from rest_framework.response import Response
|
|
||||||
from rest_framework.exceptions import APIException
|
|
||||||
from rest_framework.views import APIView
|
|
||||||
from rest_framework.filters import SearchFilter
|
|
||||||
from rest_framework.permissions import IsAuthenticated
|
from rest_framework.permissions import IsAuthenticated
|
||||||
from sentry_sdk import capture_exception
|
from rest_framework.response import Response
|
||||||
from django_filters.rest_framework import DjangoFilterBackend
|
|
||||||
|
# Third party imports
|
||||||
|
from rest_framework.views import APIView
|
||||||
|
|
||||||
# Module imports
|
# Module imports
|
||||||
|
from plane.api.middleware.api_authentication import APIKeyAuthentication
|
||||||
|
from plane.api.rate_limit import ApiKeyRateThrottle
|
||||||
|
from plane.bgtasks.webhook_task import send_webhook
|
||||||
|
from plane.utils.exception_logger import log_exception
|
||||||
from plane.utils.paginator import BasePaginator
|
from plane.utils.paginator import BasePaginator
|
||||||
|
|
||||||
|
|
||||||
@ -29,6 +29,7 @@ class TimezoneMixin:
|
|||||||
This enables timezone conversion according
|
This enables timezone conversion according
|
||||||
to the user set timezone
|
to the user set timezone
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def initial(self, request, *args, **kwargs):
|
def initial(self, request, *args, **kwargs):
|
||||||
super().initial(request, *args, **kwargs)
|
super().initial(request, *args, **kwargs)
|
||||||
if request.user.is_authenticated:
|
if request.user.is_authenticated:
|
||||||
@ -37,29 +38,57 @@ class TimezoneMixin:
|
|||||||
timezone.deactivate()
|
timezone.deactivate()
|
||||||
|
|
||||||
|
|
||||||
class BaseViewSet(TimezoneMixin, ModelViewSet, BasePaginator):
|
class WebhookMixin:
|
||||||
|
webhook_event = None
|
||||||
|
bulk = False
|
||||||
|
|
||||||
model = None
|
def finalize_response(self, request, response, *args, **kwargs):
|
||||||
|
response = super().finalize_response(
|
||||||
|
request, response, *args, **kwargs
|
||||||
|
)
|
||||||
|
|
||||||
|
# Check for the case should webhook be sent
|
||||||
|
if (
|
||||||
|
self.webhook_event
|
||||||
|
and self.request.method in ["POST", "PATCH", "DELETE"]
|
||||||
|
and response.status_code in [200, 201, 204]
|
||||||
|
):
|
||||||
|
url = request.build_absolute_uri()
|
||||||
|
parsed_url = urlparse(url)
|
||||||
|
# Extract the scheme and netloc
|
||||||
|
scheme = parsed_url.scheme
|
||||||
|
netloc = parsed_url.netloc
|
||||||
|
# Push the object to delay
|
||||||
|
send_webhook.delay(
|
||||||
|
event=self.webhook_event,
|
||||||
|
payload=response.data,
|
||||||
|
kw=self.kwargs,
|
||||||
|
action=self.request.method,
|
||||||
|
slug=self.workspace_slug,
|
||||||
|
bulk=self.bulk,
|
||||||
|
current_site=f"{scheme}://{netloc}",
|
||||||
|
)
|
||||||
|
|
||||||
|
return response
|
||||||
|
|
||||||
|
|
||||||
|
class BaseAPIView(TimezoneMixin, APIView, BasePaginator):
|
||||||
|
authentication_classes = [
|
||||||
|
APIKeyAuthentication,
|
||||||
|
]
|
||||||
|
|
||||||
permission_classes = [
|
permission_classes = [
|
||||||
IsAuthenticated,
|
IsAuthenticated,
|
||||||
]
|
]
|
||||||
|
|
||||||
filter_backends = (
|
throttle_classes = [
|
||||||
DjangoFilterBackend,
|
ApiKeyRateThrottle,
|
||||||
SearchFilter,
|
]
|
||||||
)
|
|
||||||
|
|
||||||
filterset_fields = []
|
def filter_queryset(self, queryset):
|
||||||
|
for backend in list(self.filter_backends):
|
||||||
search_fields = []
|
queryset = backend().filter_queryset(self.request, queryset, self)
|
||||||
|
return queryset
|
||||||
def get_queryset(self):
|
|
||||||
try:
|
|
||||||
return self.model.objects.all()
|
|
||||||
except Exception as e:
|
|
||||||
capture_exception(e)
|
|
||||||
raise APIException("Please check the view", status.HTTP_400_BAD_REQUEST)
|
|
||||||
|
|
||||||
def handle_exception(self, exc):
|
def handle_exception(self, exc):
|
||||||
"""
|
"""
|
||||||
@ -71,28 +100,38 @@ class BaseViewSet(TimezoneMixin, ModelViewSet, BasePaginator):
|
|||||||
return response
|
return response
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
if isinstance(e, IntegrityError):
|
if isinstance(e, IntegrityError):
|
||||||
return Response({"error": "The payload is not valid"}, status=status.HTTP_400_BAD_REQUEST)
|
return Response(
|
||||||
|
{"error": "The payload is not valid"},
|
||||||
|
status=status.HTTP_400_BAD_REQUEST,
|
||||||
|
)
|
||||||
|
|
||||||
if isinstance(e, ValidationError):
|
if isinstance(e, ValidationError):
|
||||||
return Response({"error": "Please provide valid detail"}, status=status.HTTP_400_BAD_REQUEST)
|
return Response(
|
||||||
|
{"error": "Please provide valid detail"},
|
||||||
|
status=status.HTTP_400_BAD_REQUEST,
|
||||||
|
)
|
||||||
|
|
||||||
if isinstance(e, ObjectDoesNotExist):
|
if isinstance(e, ObjectDoesNotExist):
|
||||||
model_name = str(exc).split(" matching query does not exist.")[0]
|
return Response(
|
||||||
return Response({"error": f"{model_name} does not exist."}, status=status.HTTP_404_NOT_FOUND)
|
{"error": "The requested resource does not exist."},
|
||||||
|
status=status.HTTP_404_NOT_FOUND,
|
||||||
|
)
|
||||||
|
|
||||||
if isinstance(e, KeyError):
|
if isinstance(e, KeyError):
|
||||||
capture_exception(e)
|
return Response(
|
||||||
return Response({"error": f"key {e} does not exist"}, status=status.HTTP_400_BAD_REQUEST)
|
{"error": "The required key does not exist."},
|
||||||
|
status=status.HTTP_400_BAD_REQUEST,
|
||||||
print(e) if settings.DEBUG else print("Server Error")
|
)
|
||||||
capture_exception(e)
|
|
||||||
return Response({"error": "Something went wrong please try again later"}, status=status.HTTP_500_INTERNAL_SERVER_ERROR)
|
|
||||||
|
|
||||||
|
log_exception(e)
|
||||||
|
return Response(
|
||||||
|
{"error": "Something went wrong please try again later"},
|
||||||
|
status=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||||
|
)
|
||||||
|
|
||||||
def dispatch(self, request, *args, **kwargs):
|
def dispatch(self, request, *args, **kwargs):
|
||||||
try:
|
try:
|
||||||
response = super().dispatch(request, *args, **kwargs)
|
response = super().dispatch(request, *args, **kwargs)
|
||||||
|
|
||||||
if settings.DEBUG:
|
if settings.DEBUG:
|
||||||
from django.db import connection
|
from django.db import connection
|
||||||
|
|
||||||
@ -100,11 +139,27 @@ class BaseViewSet(TimezoneMixin, ModelViewSet, BasePaginator):
|
|||||||
f"{request.method} - {request.get_full_path()} of Queries: {len(connection.queries)}"
|
f"{request.method} - {request.get_full_path()} of Queries: {len(connection.queries)}"
|
||||||
)
|
)
|
||||||
return response
|
return response
|
||||||
|
|
||||||
except Exception as exc:
|
except Exception as exc:
|
||||||
response = self.handle_exception(exc)
|
response = self.handle_exception(exc)
|
||||||
return exc
|
return exc
|
||||||
|
|
||||||
|
def finalize_response(self, request, response, *args, **kwargs):
|
||||||
|
# Call super to get the default response
|
||||||
|
response = super().finalize_response(
|
||||||
|
request, response, *args, **kwargs
|
||||||
|
)
|
||||||
|
|
||||||
|
# Add custom headers if they exist in the request META
|
||||||
|
ratelimit_remaining = request.META.get("X-RateLimit-Remaining")
|
||||||
|
if ratelimit_remaining is not None:
|
||||||
|
response["X-RateLimit-Remaining"] = ratelimit_remaining
|
||||||
|
|
||||||
|
ratelimit_reset = request.META.get("X-RateLimit-Reset")
|
||||||
|
if ratelimit_reset is not None:
|
||||||
|
response["X-RateLimit-Reset"] = ratelimit_reset
|
||||||
|
|
||||||
|
return response
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def workspace_slug(self):
|
def workspace_slug(self):
|
||||||
return self.kwargs.get("slug", None)
|
return self.kwargs.get("slug", None)
|
||||||
@ -118,75 +173,20 @@ class BaseViewSet(TimezoneMixin, ModelViewSet, BasePaginator):
|
|||||||
if resolve(self.request.path_info).url_name == "project":
|
if resolve(self.request.path_info).url_name == "project":
|
||||||
return self.kwargs.get("pk", None)
|
return self.kwargs.get("pk", None)
|
||||||
|
|
||||||
|
@property
|
||||||
class BaseAPIView(TimezoneMixin, APIView, BasePaginator):
|
def fields(self):
|
||||||
|
fields = [
|
||||||
permission_classes = [
|
field
|
||||||
IsAuthenticated,
|
for field in self.request.GET.get("fields", "").split(",")
|
||||||
]
|
if field
|
||||||
|
]
|
||||||
filter_backends = (
|
return fields if fields else None
|
||||||
DjangoFilterBackend,
|
|
||||||
SearchFilter,
|
|
||||||
)
|
|
||||||
|
|
||||||
filterset_fields = []
|
|
||||||
|
|
||||||
search_fields = []
|
|
||||||
|
|
||||||
def filter_queryset(self, queryset):
|
|
||||||
for backend in list(self.filter_backends):
|
|
||||||
queryset = backend().filter_queryset(self.request, queryset, self)
|
|
||||||
return queryset
|
|
||||||
|
|
||||||
|
|
||||||
def handle_exception(self, exc):
|
|
||||||
"""
|
|
||||||
Handle any exception that occurs, by returning an appropriate response,
|
|
||||||
or re-raising the error.
|
|
||||||
"""
|
|
||||||
try:
|
|
||||||
response = super().handle_exception(exc)
|
|
||||||
return response
|
|
||||||
except Exception as e:
|
|
||||||
if isinstance(e, IntegrityError):
|
|
||||||
return Response({"error": "The payload is not valid"}, status=status.HTTP_400_BAD_REQUEST)
|
|
||||||
|
|
||||||
if isinstance(e, ValidationError):
|
|
||||||
return Response({"error": "Please provide valid detail"}, status=status.HTTP_400_BAD_REQUEST)
|
|
||||||
|
|
||||||
if isinstance(e, ObjectDoesNotExist):
|
|
||||||
model_name = str(exc).split(" matching query does not exist.")[0]
|
|
||||||
return Response({"error": f"{model_name} does not exist."}, status=status.HTTP_404_NOT_FOUND)
|
|
||||||
|
|
||||||
if isinstance(e, KeyError):
|
|
||||||
return Response({"error": f"key {e} does not exist"}, status=status.HTTP_400_BAD_REQUEST)
|
|
||||||
|
|
||||||
print(e) if settings.DEBUG else print("Server Error")
|
|
||||||
capture_exception(e)
|
|
||||||
return Response({"error": "Something went wrong please try again later"}, status=status.HTTP_500_INTERNAL_SERVER_ERROR)
|
|
||||||
|
|
||||||
|
|
||||||
def dispatch(self, request, *args, **kwargs):
|
|
||||||
try:
|
|
||||||
response = super().dispatch(request, *args, **kwargs)
|
|
||||||
|
|
||||||
if settings.DEBUG:
|
|
||||||
from django.db import connection
|
|
||||||
|
|
||||||
print(
|
|
||||||
f"{request.method} - {request.get_full_path()} of Queries: {len(connection.queries)}"
|
|
||||||
)
|
|
||||||
return response
|
|
||||||
|
|
||||||
except Exception as exc:
|
|
||||||
response = self.handle_exception(exc)
|
|
||||||
return exc
|
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def workspace_slug(self):
|
def expand(self):
|
||||||
return self.kwargs.get("slug", None)
|
expand = [
|
||||||
|
expand
|
||||||
@property
|
for expand in self.request.GET.get("expand", "").split(",")
|
||||||
def project_id(self):
|
if expand
|
||||||
return self.kwargs.get("project_id", None)
|
]
|
||||||
|
return expand if expand else None
|
||||||
|
@ -1,34 +0,0 @@
|
|||||||
# Python imports
|
|
||||||
import os
|
|
||||||
|
|
||||||
# Django imports
|
|
||||||
from django.conf import settings
|
|
||||||
|
|
||||||
# Third party imports
|
|
||||||
from rest_framework.permissions import AllowAny
|
|
||||||
from rest_framework import status
|
|
||||||
from rest_framework.response import Response
|
|
||||||
from sentry_sdk import capture_exception
|
|
||||||
|
|
||||||
# Module imports
|
|
||||||
from .base import BaseAPIView
|
|
||||||
|
|
||||||
|
|
||||||
class ConfigurationEndpoint(BaseAPIView):
|
|
||||||
permission_classes = [
|
|
||||||
AllowAny,
|
|
||||||
]
|
|
||||||
|
|
||||||
def get(self, request):
|
|
||||||
data = {}
|
|
||||||
data["google_client_id"] = os.environ.get("GOOGLE_CLIENT_ID", None)
|
|
||||||
data["github_client_id"] = os.environ.get("GITHUB_CLIENT_ID", None)
|
|
||||||
data["github_app_name"] = os.environ.get("GITHUB_APP_NAME", None)
|
|
||||||
data["magic_login"] = (
|
|
||||||
bool(settings.EMAIL_HOST_USER) and bool(settings.EMAIL_HOST_PASSWORD)
|
|
||||||
) and os.environ.get("ENABLE_MAGIC_LINK_LOGIN", "0") == "1"
|
|
||||||
data["email_password_login"] = (
|
|
||||||
os.environ.get("ENABLE_EMAIL_PASSWORD", "0") == "1"
|
|
||||||
)
|
|
||||||
data["slack_client_id"] = os.environ.get("SLACK_CLIENT_ID", None)
|
|
||||||
return Response(data, status=status.HTTP_200_OK)
|
|
File diff suppressed because it is too large
Load Diff
@ -1,526 +0,0 @@
|
|||||||
# Python imports
|
|
||||||
import uuid
|
|
||||||
|
|
||||||
# Third party imports
|
|
||||||
from rest_framework import status
|
|
||||||
from rest_framework.response import Response
|
|
||||||
from sentry_sdk import capture_exception
|
|
||||||
|
|
||||||
# Django imports
|
|
||||||
from django.db.models import Max, Q
|
|
||||||
|
|
||||||
# Module imports
|
|
||||||
from plane.api.views import BaseAPIView
|
|
||||||
from plane.db.models import (
|
|
||||||
WorkspaceIntegration,
|
|
||||||
Importer,
|
|
||||||
APIToken,
|
|
||||||
Project,
|
|
||||||
State,
|
|
||||||
IssueSequence,
|
|
||||||
Issue,
|
|
||||||
IssueActivity,
|
|
||||||
IssueComment,
|
|
||||||
IssueLink,
|
|
||||||
IssueLabel,
|
|
||||||
Workspace,
|
|
||||||
IssueAssignee,
|
|
||||||
Module,
|
|
||||||
ModuleLink,
|
|
||||||
ModuleIssue,
|
|
||||||
Label,
|
|
||||||
)
|
|
||||||
from plane.api.serializers import (
|
|
||||||
ImporterSerializer,
|
|
||||||
IssueFlatSerializer,
|
|
||||||
ModuleSerializer,
|
|
||||||
)
|
|
||||||
from plane.utils.integrations.github import get_github_repo_details
|
|
||||||
from plane.utils.importers.jira import jira_project_issue_summary
|
|
||||||
from plane.bgtasks.importer_task import service_importer
|
|
||||||
from plane.utils.html_processor import strip_tags
|
|
||||||
from plane.api.permissions import WorkSpaceAdminPermission
|
|
||||||
|
|
||||||
|
|
||||||
class ServiceIssueImportSummaryEndpoint(BaseAPIView):
|
|
||||||
|
|
||||||
def get(self, request, slug, service):
|
|
||||||
if service == "github":
|
|
||||||
owner = request.GET.get("owner", False)
|
|
||||||
repo = request.GET.get("repo", False)
|
|
||||||
|
|
||||||
if not owner or not repo:
|
|
||||||
return Response(
|
|
||||||
{"error": "Owner and repo are required"},
|
|
||||||
status=status.HTTP_400_BAD_REQUEST,
|
|
||||||
)
|
|
||||||
|
|
||||||
workspace_integration = WorkspaceIntegration.objects.get(
|
|
||||||
integration__provider="github", workspace__slug=slug
|
|
||||||
)
|
|
||||||
|
|
||||||
access_tokens_url = workspace_integration.metadata.get(
|
|
||||||
"access_tokens_url", False
|
|
||||||
)
|
|
||||||
|
|
||||||
if not access_tokens_url:
|
|
||||||
return Response(
|
|
||||||
{
|
|
||||||
"error": "There was an error during the installation of the GitHub app. To resolve this issue, we recommend reinstalling the GitHub app."
|
|
||||||
},
|
|
||||||
status=status.HTTP_400_BAD_REQUEST,
|
|
||||||
)
|
|
||||||
|
|
||||||
issue_count, labels, collaborators = get_github_repo_details(
|
|
||||||
access_tokens_url, owner, repo
|
|
||||||
)
|
|
||||||
return Response(
|
|
||||||
{
|
|
||||||
"issue_count": issue_count,
|
|
||||||
"labels": labels,
|
|
||||||
"collaborators": collaborators,
|
|
||||||
},
|
|
||||||
status=status.HTTP_200_OK,
|
|
||||||
)
|
|
||||||
|
|
||||||
if service == "jira":
|
|
||||||
# Check for all the keys
|
|
||||||
params = {
|
|
||||||
"project_key": "Project key is required",
|
|
||||||
"api_token": "API token is required",
|
|
||||||
"email": "Email is required",
|
|
||||||
"cloud_hostname": "Cloud hostname is required",
|
|
||||||
}
|
|
||||||
|
|
||||||
for key, error_message in params.items():
|
|
||||||
if not request.GET.get(key, False):
|
|
||||||
return Response(
|
|
||||||
{"error": error_message}, status=status.HTTP_400_BAD_REQUEST
|
|
||||||
)
|
|
||||||
|
|
||||||
project_key = request.GET.get("project_key", "")
|
|
||||||
api_token = request.GET.get("api_token", "")
|
|
||||||
email = request.GET.get("email", "")
|
|
||||||
cloud_hostname = request.GET.get("cloud_hostname", "")
|
|
||||||
|
|
||||||
response = jira_project_issue_summary(
|
|
||||||
email, api_token, project_key, cloud_hostname
|
|
||||||
)
|
|
||||||
if "error" in response:
|
|
||||||
return Response(response, status=status.HTTP_400_BAD_REQUEST)
|
|
||||||
else:
|
|
||||||
return Response(
|
|
||||||
response,
|
|
||||||
status=status.HTTP_200_OK,
|
|
||||||
)
|
|
||||||
return Response(
|
|
||||||
{"error": "Service not supported yet"},
|
|
||||||
status=status.HTTP_400_BAD_REQUEST,
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
class ImportServiceEndpoint(BaseAPIView):
|
|
||||||
permission_classes = [
|
|
||||||
WorkSpaceAdminPermission,
|
|
||||||
]
|
|
||||||
def post(self, request, slug, service):
|
|
||||||
project_id = request.data.get("project_id", False)
|
|
||||||
|
|
||||||
if not project_id:
|
|
||||||
return Response(
|
|
||||||
{"error": "Project ID is required"},
|
|
||||||
status=status.HTTP_400_BAD_REQUEST,
|
|
||||||
)
|
|
||||||
|
|
||||||
workspace = Workspace.objects.get(slug=slug)
|
|
||||||
|
|
||||||
if service == "github":
|
|
||||||
data = request.data.get("data", False)
|
|
||||||
metadata = request.data.get("metadata", False)
|
|
||||||
config = request.data.get("config", False)
|
|
||||||
if not data or not metadata or not config:
|
|
||||||
return Response(
|
|
||||||
{"error": "Data, config and metadata are required"},
|
|
||||||
status=status.HTTP_400_BAD_REQUEST,
|
|
||||||
)
|
|
||||||
|
|
||||||
api_token = APIToken.objects.filter(
|
|
||||||
user=request.user, workspace=workspace
|
|
||||||
).first()
|
|
||||||
if api_token is None:
|
|
||||||
api_token = APIToken.objects.create(
|
|
||||||
user=request.user,
|
|
||||||
label="Importer",
|
|
||||||
workspace=workspace,
|
|
||||||
)
|
|
||||||
|
|
||||||
importer = Importer.objects.create(
|
|
||||||
service=service,
|
|
||||||
project_id=project_id,
|
|
||||||
status="queued",
|
|
||||||
initiated_by=request.user,
|
|
||||||
data=data,
|
|
||||||
metadata=metadata,
|
|
||||||
token=api_token,
|
|
||||||
config=config,
|
|
||||||
created_by=request.user,
|
|
||||||
updated_by=request.user,
|
|
||||||
)
|
|
||||||
|
|
||||||
service_importer.delay(service, importer.id)
|
|
||||||
serializer = ImporterSerializer(importer)
|
|
||||||
return Response(serializer.data, status=status.HTTP_201_CREATED)
|
|
||||||
|
|
||||||
if service == "jira":
|
|
||||||
data = request.data.get("data", False)
|
|
||||||
metadata = request.data.get("metadata", False)
|
|
||||||
config = request.data.get("config", False)
|
|
||||||
if not data or not metadata:
|
|
||||||
return Response(
|
|
||||||
{"error": "Data, config and metadata are required"},
|
|
||||||
status=status.HTTP_400_BAD_REQUEST,
|
|
||||||
)
|
|
||||||
api_token = APIToken.objects.filter(
|
|
||||||
user=request.user, workspace=workspace
|
|
||||||
).first()
|
|
||||||
if api_token is None:
|
|
||||||
api_token = APIToken.objects.create(
|
|
||||||
user=request.user,
|
|
||||||
label="Importer",
|
|
||||||
workspace=workspace,
|
|
||||||
)
|
|
||||||
|
|
||||||
importer = Importer.objects.create(
|
|
||||||
service=service,
|
|
||||||
project_id=project_id,
|
|
||||||
status="queued",
|
|
||||||
initiated_by=request.user,
|
|
||||||
data=data,
|
|
||||||
metadata=metadata,
|
|
||||||
token=api_token,
|
|
||||||
config=config,
|
|
||||||
created_by=request.user,
|
|
||||||
updated_by=request.user,
|
|
||||||
)
|
|
||||||
|
|
||||||
service_importer.delay(service, importer.id)
|
|
||||||
serializer = ImporterSerializer(importer)
|
|
||||||
return Response(serializer.data, status=status.HTTP_201_CREATED)
|
|
||||||
|
|
||||||
return Response(
|
|
||||||
{"error": "Servivce not supported yet"},
|
|
||||||
status=status.HTTP_400_BAD_REQUEST,
|
|
||||||
)
|
|
||||||
|
|
||||||
def get(self, request, slug):
|
|
||||||
imports = (
|
|
||||||
Importer.objects.filter(workspace__slug=slug)
|
|
||||||
.order_by("-created_at")
|
|
||||||
.select_related("initiated_by", "project", "workspace")
|
|
||||||
)
|
|
||||||
serializer = ImporterSerializer(imports, many=True)
|
|
||||||
return Response(serializer.data)
|
|
||||||
|
|
||||||
def delete(self, request, slug, service, pk):
|
|
||||||
importer = Importer.objects.get(
|
|
||||||
pk=pk, service=service, workspace__slug=slug
|
|
||||||
)
|
|
||||||
|
|
||||||
if importer.imported_data is not None:
|
|
||||||
# Delete all imported Issues
|
|
||||||
imported_issues = importer.imported_data.get("issues", [])
|
|
||||||
Issue.issue_objects.filter(id__in=imported_issues).delete()
|
|
||||||
|
|
||||||
# Delete all imported Labels
|
|
||||||
imported_labels = importer.imported_data.get("labels", [])
|
|
||||||
Label.objects.filter(id__in=imported_labels).delete()
|
|
||||||
|
|
||||||
if importer.service == "jira":
|
|
||||||
imported_modules = importer.imported_data.get("modules", [])
|
|
||||||
Module.objects.filter(id__in=imported_modules).delete()
|
|
||||||
importer.delete()
|
|
||||||
return Response(status=status.HTTP_204_NO_CONTENT)
|
|
||||||
|
|
||||||
def patch(self, request, slug, service, pk):
|
|
||||||
importer = Importer.objects.get(
|
|
||||||
pk=pk, service=service, workspace__slug=slug
|
|
||||||
)
|
|
||||||
serializer = ImporterSerializer(importer, data=request.data, partial=True)
|
|
||||||
if serializer.is_valid():
|
|
||||||
serializer.save()
|
|
||||||
return Response(serializer.data, status=status.HTTP_200_OK)
|
|
||||||
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
|
|
||||||
|
|
||||||
|
|
||||||
class UpdateServiceImportStatusEndpoint(BaseAPIView):
|
|
||||||
def post(self, request, slug, project_id, service, importer_id):
|
|
||||||
importer = Importer.objects.get(
|
|
||||||
pk=importer_id,
|
|
||||||
workspace__slug=slug,
|
|
||||||
project_id=project_id,
|
|
||||||
service=service,
|
|
||||||
)
|
|
||||||
importer.status = request.data.get("status", "processing")
|
|
||||||
importer.save()
|
|
||||||
return Response(status.HTTP_200_OK)
|
|
||||||
|
|
||||||
|
|
||||||
class BulkImportIssuesEndpoint(BaseAPIView):
|
|
||||||
def post(self, request, slug, project_id, service):
|
|
||||||
# Get the project
|
|
||||||
project = Project.objects.get(pk=project_id, workspace__slug=slug)
|
|
||||||
|
|
||||||
# Get the default state
|
|
||||||
default_state = State.objects.filter(
|
|
||||||
~Q(name="Triage"), project_id=project_id, default=True
|
|
||||||
).first()
|
|
||||||
# if there is no default state assign any random state
|
|
||||||
if default_state is None:
|
|
||||||
default_state = State.objects.filter(
|
|
||||||
~Q(name="Triage"), project_id=project_id
|
|
||||||
).first()
|
|
||||||
|
|
||||||
# Get the maximum sequence_id
|
|
||||||
last_id = IssueSequence.objects.filter(project_id=project_id).aggregate(
|
|
||||||
largest=Max("sequence")
|
|
||||||
)["largest"]
|
|
||||||
|
|
||||||
last_id = 1 if last_id is None else last_id + 1
|
|
||||||
|
|
||||||
# Get the maximum sort order
|
|
||||||
largest_sort_order = Issue.objects.filter(
|
|
||||||
project_id=project_id, state=default_state
|
|
||||||
).aggregate(largest=Max("sort_order"))["largest"]
|
|
||||||
|
|
||||||
largest_sort_order = (
|
|
||||||
65535 if largest_sort_order is None else largest_sort_order + 10000
|
|
||||||
)
|
|
||||||
|
|
||||||
# Get the issues_data
|
|
||||||
issues_data = request.data.get("issues_data", [])
|
|
||||||
|
|
||||||
if not len(issues_data):
|
|
||||||
return Response(
|
|
||||||
{"error": "Issue data is required"},
|
|
||||||
status=status.HTTP_400_BAD_REQUEST,
|
|
||||||
)
|
|
||||||
|
|
||||||
# Issues
|
|
||||||
bulk_issues = []
|
|
||||||
for issue_data in issues_data:
|
|
||||||
bulk_issues.append(
|
|
||||||
Issue(
|
|
||||||
project_id=project_id,
|
|
||||||
workspace_id=project.workspace_id,
|
|
||||||
state_id=issue_data.get("state")
|
|
||||||
if issue_data.get("state", False)
|
|
||||||
else default_state.id,
|
|
||||||
name=issue_data.get("name", "Issue Created through Bulk"),
|
|
||||||
description_html=issue_data.get("description_html", "<p></p>"),
|
|
||||||
description_stripped=(
|
|
||||||
None
|
|
||||||
if (
|
|
||||||
issue_data.get("description_html") == ""
|
|
||||||
or issue_data.get("description_html") is None
|
|
||||||
)
|
|
||||||
else strip_tags(issue_data.get("description_html"))
|
|
||||||
),
|
|
||||||
sequence_id=last_id,
|
|
||||||
sort_order=largest_sort_order,
|
|
||||||
start_date=issue_data.get("start_date", None),
|
|
||||||
target_date=issue_data.get("target_date", None),
|
|
||||||
priority=issue_data.get("priority", "none"),
|
|
||||||
created_by=request.user,
|
|
||||||
)
|
|
||||||
)
|
|
||||||
|
|
||||||
largest_sort_order = largest_sort_order + 10000
|
|
||||||
last_id = last_id + 1
|
|
||||||
|
|
||||||
issues = Issue.objects.bulk_create(
|
|
||||||
bulk_issues,
|
|
||||||
batch_size=100,
|
|
||||||
ignore_conflicts=True,
|
|
||||||
)
|
|
||||||
|
|
||||||
# Sequences
|
|
||||||
_ = IssueSequence.objects.bulk_create(
|
|
||||||
[
|
|
||||||
IssueSequence(
|
|
||||||
issue=issue,
|
|
||||||
sequence=issue.sequence_id,
|
|
||||||
project_id=project_id,
|
|
||||||
workspace_id=project.workspace_id,
|
|
||||||
)
|
|
||||||
for issue in issues
|
|
||||||
],
|
|
||||||
batch_size=100,
|
|
||||||
)
|
|
||||||
|
|
||||||
# Attach Labels
|
|
||||||
bulk_issue_labels = []
|
|
||||||
for issue, issue_data in zip(issues, issues_data):
|
|
||||||
labels_list = issue_data.get("labels_list", [])
|
|
||||||
bulk_issue_labels = bulk_issue_labels + [
|
|
||||||
IssueLabel(
|
|
||||||
issue=issue,
|
|
||||||
label_id=label_id,
|
|
||||||
project_id=project_id,
|
|
||||||
workspace_id=project.workspace_id,
|
|
||||||
created_by=request.user,
|
|
||||||
)
|
|
||||||
for label_id in labels_list
|
|
||||||
]
|
|
||||||
|
|
||||||
_ = IssueLabel.objects.bulk_create(
|
|
||||||
bulk_issue_labels, batch_size=100, ignore_conflicts=True
|
|
||||||
)
|
|
||||||
|
|
||||||
# Attach Assignees
|
|
||||||
bulk_issue_assignees = []
|
|
||||||
for issue, issue_data in zip(issues, issues_data):
|
|
||||||
assignees_list = issue_data.get("assignees_list", [])
|
|
||||||
bulk_issue_assignees = bulk_issue_assignees + [
|
|
||||||
IssueAssignee(
|
|
||||||
issue=issue,
|
|
||||||
assignee_id=assignee_id,
|
|
||||||
project_id=project_id,
|
|
||||||
workspace_id=project.workspace_id,
|
|
||||||
created_by=request.user,
|
|
||||||
)
|
|
||||||
for assignee_id in assignees_list
|
|
||||||
]
|
|
||||||
|
|
||||||
_ = IssueAssignee.objects.bulk_create(
|
|
||||||
bulk_issue_assignees, batch_size=100, ignore_conflicts=True
|
|
||||||
)
|
|
||||||
|
|
||||||
# Track the issue activities
|
|
||||||
IssueActivity.objects.bulk_create(
|
|
||||||
[
|
|
||||||
IssueActivity(
|
|
||||||
issue=issue,
|
|
||||||
actor=request.user,
|
|
||||||
project_id=project_id,
|
|
||||||
workspace_id=project.workspace_id,
|
|
||||||
comment=f"imported the issue from {service}",
|
|
||||||
verb="created",
|
|
||||||
created_by=request.user,
|
|
||||||
)
|
|
||||||
for issue in issues
|
|
||||||
],
|
|
||||||
batch_size=100,
|
|
||||||
)
|
|
||||||
|
|
||||||
# Create Comments
|
|
||||||
bulk_issue_comments = []
|
|
||||||
for issue, issue_data in zip(issues, issues_data):
|
|
||||||
comments_list = issue_data.get("comments_list", [])
|
|
||||||
bulk_issue_comments = bulk_issue_comments + [
|
|
||||||
IssueComment(
|
|
||||||
issue=issue,
|
|
||||||
comment_html=comment.get("comment_html", "<p></p>"),
|
|
||||||
actor=request.user,
|
|
||||||
project_id=project_id,
|
|
||||||
workspace_id=project.workspace_id,
|
|
||||||
created_by=request.user,
|
|
||||||
)
|
|
||||||
for comment in comments_list
|
|
||||||
]
|
|
||||||
|
|
||||||
_ = IssueComment.objects.bulk_create(bulk_issue_comments, batch_size=100)
|
|
||||||
|
|
||||||
# Attach Links
|
|
||||||
_ = IssueLink.objects.bulk_create(
|
|
||||||
[
|
|
||||||
IssueLink(
|
|
||||||
issue=issue,
|
|
||||||
url=issue_data.get("link", {}).get("url", "https://github.com"),
|
|
||||||
title=issue_data.get("link", {}).get("title", "Original Issue"),
|
|
||||||
project_id=project_id,
|
|
||||||
workspace_id=project.workspace_id,
|
|
||||||
created_by=request.user,
|
|
||||||
)
|
|
||||||
for issue, issue_data in zip(issues, issues_data)
|
|
||||||
]
|
|
||||||
)
|
|
||||||
|
|
||||||
return Response(
|
|
||||||
{"issues": IssueFlatSerializer(issues, many=True).data},
|
|
||||||
status=status.HTTP_201_CREATED,
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
class BulkImportModulesEndpoint(BaseAPIView):
|
|
||||||
def post(self, request, slug, project_id, service):
|
|
||||||
modules_data = request.data.get("modules_data", [])
|
|
||||||
project = Project.objects.get(pk=project_id, workspace__slug=slug)
|
|
||||||
|
|
||||||
modules = Module.objects.bulk_create(
|
|
||||||
[
|
|
||||||
Module(
|
|
||||||
name=module.get("name", uuid.uuid4().hex),
|
|
||||||
description=module.get("description", ""),
|
|
||||||
start_date=module.get("start_date", None),
|
|
||||||
target_date=module.get("target_date", None),
|
|
||||||
project_id=project_id,
|
|
||||||
workspace_id=project.workspace_id,
|
|
||||||
created_by=request.user,
|
|
||||||
)
|
|
||||||
for module in modules_data
|
|
||||||
],
|
|
||||||
batch_size=100,
|
|
||||||
ignore_conflicts=True,
|
|
||||||
)
|
|
||||||
|
|
||||||
modules = Module.objects.filter(id__in=[module.id for module in modules])
|
|
||||||
|
|
||||||
if len(modules) == len(modules_data):
|
|
||||||
_ = ModuleLink.objects.bulk_create(
|
|
||||||
[
|
|
||||||
ModuleLink(
|
|
||||||
module=module,
|
|
||||||
url=module_data.get("link", {}).get(
|
|
||||||
"url", "https://plane.so"
|
|
||||||
),
|
|
||||||
title=module_data.get("link", {}).get(
|
|
||||||
"title", "Original Issue"
|
|
||||||
),
|
|
||||||
project_id=project_id,
|
|
||||||
workspace_id=project.workspace_id,
|
|
||||||
created_by=request.user,
|
|
||||||
)
|
|
||||||
for module, module_data in zip(modules, modules_data)
|
|
||||||
],
|
|
||||||
batch_size=100,
|
|
||||||
ignore_conflicts=True,
|
|
||||||
)
|
|
||||||
|
|
||||||
bulk_module_issues = []
|
|
||||||
for module, module_data in zip(modules, modules_data):
|
|
||||||
module_issues_list = module_data.get("module_issues_list", [])
|
|
||||||
bulk_module_issues = bulk_module_issues + [
|
|
||||||
ModuleIssue(
|
|
||||||
issue_id=issue,
|
|
||||||
module=module,
|
|
||||||
project_id=project_id,
|
|
||||||
workspace_id=project.workspace_id,
|
|
||||||
created_by=request.user,
|
|
||||||
)
|
|
||||||
for issue in module_issues_list
|
|
||||||
]
|
|
||||||
|
|
||||||
_ = ModuleIssue.objects.bulk_create(
|
|
||||||
bulk_module_issues, batch_size=100, ignore_conflicts=True
|
|
||||||
)
|
|
||||||
|
|
||||||
serializer = ModuleSerializer(modules, many=True)
|
|
||||||
return Response(
|
|
||||||
{"modules": serializer.data}, status=status.HTTP_201_CREATED
|
|
||||||
)
|
|
||||||
|
|
||||||
else:
|
|
||||||
return Response(
|
|
||||||
{"message": "Modules created but issues could not be imported"},
|
|
||||||
status=status.HTTP_200_OK,
|
|
||||||
)
|
|
@ -1,83 +1,38 @@
|
|||||||
# Python imports
|
# Python imports
|
||||||
import json
|
import json
|
||||||
|
|
||||||
# Django import
|
# Django improts
|
||||||
from django.utils import timezone
|
|
||||||
from django.db.models import Q, Count, OuterRef, Func, F, Prefetch
|
|
||||||
from django.core.serializers.json import DjangoJSONEncoder
|
from django.core.serializers.json import DjangoJSONEncoder
|
||||||
|
from django.db.models import Q
|
||||||
|
from django.utils import timezone
|
||||||
|
|
||||||
# Third party imports
|
# Third party imports
|
||||||
from rest_framework import status
|
from rest_framework import status
|
||||||
from rest_framework.response import Response
|
from rest_framework.response import Response
|
||||||
from sentry_sdk import capture_exception
|
|
||||||
|
|
||||||
# Module imports
|
# Module imports
|
||||||
from .base import BaseViewSet
|
from plane.api.serializers import InboxIssueSerializer, IssueSerializer
|
||||||
from plane.api.permissions import ProjectBasePermission, ProjectLitePermission
|
from plane.app.permissions import ProjectLitePermission
|
||||||
|
from plane.bgtasks.issue_activites_task import issue_activity
|
||||||
from plane.db.models import (
|
from plane.db.models import (
|
||||||
Inbox,
|
Inbox,
|
||||||
InboxIssue,
|
InboxIssue,
|
||||||
Issue,
|
Issue,
|
||||||
State,
|
Project,
|
||||||
IssueLink,
|
|
||||||
IssueAttachment,
|
|
||||||
ProjectMember,
|
ProjectMember,
|
||||||
ProjectDeployBoard,
|
State,
|
||||||
)
|
)
|
||||||
from plane.api.serializers import (
|
|
||||||
IssueSerializer,
|
from .base import BaseAPIView
|
||||||
InboxSerializer,
|
|
||||||
InboxIssueSerializer,
|
|
||||||
IssueCreateSerializer,
|
|
||||||
IssueStateInboxSerializer,
|
|
||||||
)
|
|
||||||
from plane.utils.issue_filters import issue_filters
|
|
||||||
from plane.bgtasks.issue_activites_task import issue_activity
|
|
||||||
|
|
||||||
|
|
||||||
class InboxViewSet(BaseViewSet):
|
class InboxIssueAPIEndpoint(BaseAPIView):
|
||||||
permission_classes = [
|
"""
|
||||||
ProjectBasePermission,
|
This viewset automatically provides `list`, `create`, `retrieve`,
|
||||||
]
|
`update` and `destroy` actions related to inbox issues.
|
||||||
|
|
||||||
serializer_class = InboxSerializer
|
"""
|
||||||
model = Inbox
|
|
||||||
|
|
||||||
def get_queryset(self):
|
|
||||||
return (
|
|
||||||
super()
|
|
||||||
.get_queryset()
|
|
||||||
.filter(
|
|
||||||
workspace__slug=self.kwargs.get("slug"),
|
|
||||||
project_id=self.kwargs.get("project_id"),
|
|
||||||
)
|
|
||||||
.annotate(
|
|
||||||
pending_issue_count=Count(
|
|
||||||
"issue_inbox",
|
|
||||||
filter=Q(issue_inbox__status=-2),
|
|
||||||
)
|
|
||||||
)
|
|
||||||
.select_related("workspace", "project")
|
|
||||||
)
|
|
||||||
|
|
||||||
def perform_create(self, serializer):
|
|
||||||
serializer.save(project_id=self.kwargs.get("project_id"))
|
|
||||||
|
|
||||||
def destroy(self, request, slug, project_id, pk):
|
|
||||||
inbox = Inbox.objects.get(
|
|
||||||
workspace__slug=slug, project_id=project_id, pk=pk
|
|
||||||
)
|
|
||||||
# Handle default inbox delete
|
|
||||||
if inbox.is_default:
|
|
||||||
return Response(
|
|
||||||
{"error": "You cannot delete the default inbox"},
|
|
||||||
status=status.HTTP_400_BAD_REQUEST,
|
|
||||||
)
|
|
||||||
inbox.delete()
|
|
||||||
return Response(status=status.HTTP_204_NO_CONTENT)
|
|
||||||
|
|
||||||
|
|
||||||
class InboxIssueViewSet(BaseViewSet):
|
|
||||||
permission_classes = [
|
permission_classes = [
|
||||||
ProjectLitePermission,
|
ProjectLitePermission,
|
||||||
]
|
]
|
||||||
@ -90,75 +45,82 @@ class InboxIssueViewSet(BaseViewSet):
|
|||||||
]
|
]
|
||||||
|
|
||||||
def get_queryset(self):
|
def get_queryset(self):
|
||||||
return self.filter_queryset(
|
inbox = Inbox.objects.filter(
|
||||||
super()
|
workspace__slug=self.kwargs.get("slug"),
|
||||||
.get_queryset()
|
project_id=self.kwargs.get("project_id"),
|
||||||
.filter(
|
).first()
|
||||||
Q(snoozed_till__gte=timezone.now()) | Q(snoozed_till__isnull=True),
|
|
||||||
|
project = Project.objects.get(
|
||||||
|
workspace__slug=self.kwargs.get("slug"),
|
||||||
|
pk=self.kwargs.get("project_id"),
|
||||||
|
)
|
||||||
|
|
||||||
|
if inbox is None and not project.inbox_view:
|
||||||
|
return InboxIssue.objects.none()
|
||||||
|
|
||||||
|
return (
|
||||||
|
InboxIssue.objects.filter(
|
||||||
|
Q(snoozed_till__gte=timezone.now())
|
||||||
|
| Q(snoozed_till__isnull=True),
|
||||||
workspace__slug=self.kwargs.get("slug"),
|
workspace__slug=self.kwargs.get("slug"),
|
||||||
project_id=self.kwargs.get("project_id"),
|
project_id=self.kwargs.get("project_id"),
|
||||||
inbox_id=self.kwargs.get("inbox_id"),
|
inbox_id=inbox.id,
|
||||||
)
|
)
|
||||||
.select_related("issue", "workspace", "project")
|
.select_related("issue", "workspace", "project")
|
||||||
|
.order_by(self.kwargs.get("order_by", "-created_at"))
|
||||||
)
|
)
|
||||||
|
|
||||||
def list(self, request, slug, project_id, inbox_id):
|
def get(self, request, slug, project_id, issue_id=None):
|
||||||
filters = issue_filters(request.query_params, "GET")
|
if issue_id:
|
||||||
issues = (
|
inbox_issue_queryset = self.get_queryset().get(issue_id=issue_id)
|
||||||
Issue.objects.filter(
|
inbox_issue_data = InboxIssueSerializer(
|
||||||
issue_inbox__inbox_id=inbox_id,
|
inbox_issue_queryset,
|
||||||
workspace__slug=slug,
|
fields=self.fields,
|
||||||
project_id=project_id,
|
expand=self.expand,
|
||||||
|
).data
|
||||||
|
return Response(
|
||||||
|
inbox_issue_data,
|
||||||
|
status=status.HTTP_200_OK,
|
||||||
)
|
)
|
||||||
.filter(**filters)
|
issue_queryset = self.get_queryset()
|
||||||
.annotate(bridge_id=F("issue_inbox__id"))
|
return self.paginate(
|
||||||
.select_related("workspace", "project", "state", "parent")
|
request=request,
|
||||||
.prefetch_related("assignees", "labels")
|
queryset=(issue_queryset),
|
||||||
.order_by("issue_inbox__snoozed_till", "issue_inbox__status")
|
on_results=lambda inbox_issues: InboxIssueSerializer(
|
||||||
.annotate(
|
inbox_issues,
|
||||||
sub_issues_count=Issue.issue_objects.filter(parent=OuterRef("id"))
|
many=True,
|
||||||
.order_by()
|
fields=self.fields,
|
||||||
.annotate(count=Func(F("id"), function="Count"))
|
expand=self.expand,
|
||||||
.values("count")
|
).data,
|
||||||
)
|
|
||||||
.annotate(
|
|
||||||
link_count=IssueLink.objects.filter(issue=OuterRef("id"))
|
|
||||||
.order_by()
|
|
||||||
.annotate(count=Func(F("id"), function="Count"))
|
|
||||||
.values("count")
|
|
||||||
)
|
|
||||||
.annotate(
|
|
||||||
attachment_count=IssueAttachment.objects.filter(
|
|
||||||
issue=OuterRef("id")
|
|
||||||
)
|
|
||||||
.order_by()
|
|
||||||
.annotate(count=Func(F("id"), function="Count"))
|
|
||||||
.values("count")
|
|
||||||
)
|
|
||||||
.prefetch_related(
|
|
||||||
Prefetch(
|
|
||||||
"issue_inbox",
|
|
||||||
queryset=InboxIssue.objects.only(
|
|
||||||
"status", "duplicate_to", "snoozed_till", "source"
|
|
||||||
),
|
|
||||||
)
|
|
||||||
)
|
|
||||||
)
|
|
||||||
issues_data = IssueStateInboxSerializer(issues, many=True).data
|
|
||||||
return Response(
|
|
||||||
issues_data,
|
|
||||||
status=status.HTTP_200_OK,
|
|
||||||
)
|
)
|
||||||
|
|
||||||
|
def post(self, request, slug, project_id):
|
||||||
def create(self, request, slug, project_id, inbox_id):
|
|
||||||
if not request.data.get("issue", {}).get("name", False):
|
if not request.data.get("issue", {}).get("name", False):
|
||||||
return Response(
|
return Response(
|
||||||
{"error": "Name is required"}, status=status.HTTP_400_BAD_REQUEST
|
{"error": "Name is required"},
|
||||||
|
status=status.HTTP_400_BAD_REQUEST,
|
||||||
|
)
|
||||||
|
|
||||||
|
inbox = Inbox.objects.filter(
|
||||||
|
workspace__slug=slug, project_id=project_id
|
||||||
|
).first()
|
||||||
|
|
||||||
|
project = Project.objects.get(
|
||||||
|
workspace__slug=slug,
|
||||||
|
pk=project_id,
|
||||||
|
)
|
||||||
|
|
||||||
|
# Inbox view
|
||||||
|
if inbox is None and not project.inbox_view:
|
||||||
|
return Response(
|
||||||
|
{
|
||||||
|
"error": "Inbox is not enabled for this project enable it through the project's api"
|
||||||
|
},
|
||||||
|
status=status.HTTP_400_BAD_REQUEST,
|
||||||
)
|
)
|
||||||
|
|
||||||
# Check for valid priority
|
# Check for valid priority
|
||||||
if not request.data.get("issue", {}).get("priority", "none") in [
|
if request.data.get("issue", {}).get("priority", "none") not in [
|
||||||
"low",
|
"low",
|
||||||
"medium",
|
"medium",
|
||||||
"high",
|
"high",
|
||||||
@ -166,16 +128,18 @@ class InboxIssueViewSet(BaseViewSet):
|
|||||||
"none",
|
"none",
|
||||||
]:
|
]:
|
||||||
return Response(
|
return Response(
|
||||||
{"error": "Invalid priority"}, status=status.HTTP_400_BAD_REQUEST
|
{"error": "Invalid priority"},
|
||||||
|
status=status.HTTP_400_BAD_REQUEST,
|
||||||
)
|
)
|
||||||
|
|
||||||
# Create or get state
|
# Create or get state
|
||||||
state, _ = State.objects.get_or_create(
|
state, _ = State.objects.get_or_create(
|
||||||
name="Triage",
|
name="Triage",
|
||||||
group="backlog",
|
group="triage",
|
||||||
description="Default state for managing all Inbox Issues",
|
description="Default state for managing all Inbox Issues",
|
||||||
project_id=project_id,
|
project_id=project_id,
|
||||||
color="#ff7700",
|
color="#ff7700",
|
||||||
|
is_triage=True,
|
||||||
)
|
)
|
||||||
|
|
||||||
# create an issue
|
# create an issue
|
||||||
@ -198,46 +162,85 @@ class InboxIssueViewSet(BaseViewSet):
|
|||||||
issue_id=str(issue.id),
|
issue_id=str(issue.id),
|
||||||
project_id=str(project_id),
|
project_id=str(project_id),
|
||||||
current_instance=None,
|
current_instance=None,
|
||||||
epoch=int(timezone.now().timestamp())
|
epoch=int(timezone.now().timestamp()),
|
||||||
)
|
)
|
||||||
|
|
||||||
# create an inbox issue
|
# create an inbox issue
|
||||||
InboxIssue.objects.create(
|
inbox_issue = InboxIssue.objects.create(
|
||||||
inbox_id=inbox_id,
|
inbox_id=inbox.id,
|
||||||
project_id=project_id,
|
project_id=project_id,
|
||||||
issue=issue,
|
issue=issue,
|
||||||
source=request.data.get("source", "in-app"),
|
source=request.data.get("source", "in-app"),
|
||||||
)
|
)
|
||||||
|
|
||||||
serializer = IssueStateInboxSerializer(issue)
|
serializer = InboxIssueSerializer(inbox_issue)
|
||||||
return Response(serializer.data, status=status.HTTP_200_OK)
|
return Response(serializer.data, status=status.HTTP_200_OK)
|
||||||
|
|
||||||
def partial_update(self, request, slug, project_id, inbox_id, pk):
|
def patch(self, request, slug, project_id, issue_id):
|
||||||
inbox_issue = InboxIssue.objects.get(
|
inbox = Inbox.objects.filter(
|
||||||
pk=pk, workspace__slug=slug, project_id=project_id, inbox_id=inbox_id
|
workspace__slug=slug, project_id=project_id
|
||||||
|
).first()
|
||||||
|
|
||||||
|
project = Project.objects.get(
|
||||||
|
workspace__slug=slug,
|
||||||
|
pk=project_id,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
# Inbox view
|
||||||
|
if inbox is None and not project.inbox_view:
|
||||||
|
return Response(
|
||||||
|
{
|
||||||
|
"error": "Inbox is not enabled for this project enable it through the project's api"
|
||||||
|
},
|
||||||
|
status=status.HTTP_400_BAD_REQUEST,
|
||||||
|
)
|
||||||
|
|
||||||
|
# Get the inbox issue
|
||||||
|
inbox_issue = InboxIssue.objects.get(
|
||||||
|
issue_id=issue_id,
|
||||||
|
workspace__slug=slug,
|
||||||
|
project_id=project_id,
|
||||||
|
inbox_id=inbox.id,
|
||||||
|
)
|
||||||
|
|
||||||
# Get the project member
|
# Get the project member
|
||||||
project_member = ProjectMember.objects.get(workspace__slug=slug, project_id=project_id, member=request.user)
|
project_member = ProjectMember.objects.get(
|
||||||
|
workspace__slug=slug,
|
||||||
|
project_id=project_id,
|
||||||
|
member=request.user,
|
||||||
|
is_active=True,
|
||||||
|
)
|
||||||
|
|
||||||
# Only project members admins and created_by users can access this endpoint
|
# Only project members admins and created_by users can access this endpoint
|
||||||
if project_member.role <= 10 and str(inbox_issue.created_by_id) != str(request.user.id):
|
if project_member.role <= 10 and str(inbox_issue.created_by_id) != str(
|
||||||
return Response({"error": "You cannot edit inbox issues"}, status=status.HTTP_400_BAD_REQUEST)
|
request.user.id
|
||||||
|
):
|
||||||
|
return Response(
|
||||||
|
{"error": "You cannot edit inbox issues"},
|
||||||
|
status=status.HTTP_400_BAD_REQUEST,
|
||||||
|
)
|
||||||
|
|
||||||
# Get issue data
|
# Get issue data
|
||||||
issue_data = request.data.pop("issue", False)
|
issue_data = request.data.pop("issue", False)
|
||||||
|
|
||||||
if bool(issue_data):
|
if bool(issue_data):
|
||||||
issue = Issue.objects.get(
|
issue = Issue.objects.get(
|
||||||
pk=inbox_issue.issue_id, workspace__slug=slug, project_id=project_id
|
pk=issue_id, workspace__slug=slug, project_id=project_id
|
||||||
)
|
)
|
||||||
# Only allow guests and viewers to edit name and description
|
# Only allow guests and viewers to edit name and description
|
||||||
if project_member.role <= 10:
|
if project_member.role <= 10:
|
||||||
# viewers and guests since only viewers and guests
|
# viewers and guests since only viewers and guests
|
||||||
issue_data = {
|
issue_data = {
|
||||||
"name": issue_data.get("name", issue.name),
|
"name": issue_data.get("name", issue.name),
|
||||||
"description_html": issue_data.get("description_html", issue.description_html),
|
"description_html": issue_data.get(
|
||||||
"description": issue_data.get("description", issue.description)
|
"description_html", issue.description_html
|
||||||
|
),
|
||||||
|
"description": issue_data.get(
|
||||||
|
"description", issue.description
|
||||||
|
),
|
||||||
}
|
}
|
||||||
|
|
||||||
issue_serializer = IssueCreateSerializer(
|
issue_serializer = IssueSerializer(
|
||||||
issue, data=issue_data, partial=True
|
issue, data=issue_data, partial=True
|
||||||
)
|
)
|
||||||
|
|
||||||
@ -250,13 +253,13 @@ class InboxIssueViewSet(BaseViewSet):
|
|||||||
type="issue.activity.updated",
|
type="issue.activity.updated",
|
||||||
requested_data=requested_data,
|
requested_data=requested_data,
|
||||||
actor_id=str(request.user.id),
|
actor_id=str(request.user.id),
|
||||||
issue_id=str(issue.id),
|
issue_id=str(issue_id),
|
||||||
project_id=str(project_id),
|
project_id=str(project_id),
|
||||||
current_instance=json.dumps(
|
current_instance=json.dumps(
|
||||||
IssueSerializer(current_instance).data,
|
IssueSerializer(current_instance).data,
|
||||||
cls=DjangoJSONEncoder,
|
cls=DjangoJSONEncoder,
|
||||||
),
|
),
|
||||||
epoch=int(timezone.now().timestamp())
|
epoch=int(timezone.now().timestamp()),
|
||||||
)
|
)
|
||||||
issue_serializer.save()
|
issue_serializer.save()
|
||||||
else:
|
else:
|
||||||
@ -275,12 +278,14 @@ class InboxIssueViewSet(BaseViewSet):
|
|||||||
# Update the issue state if the issue is rejected or marked as duplicate
|
# Update the issue state if the issue is rejected or marked as duplicate
|
||||||
if serializer.data["status"] in [-1, 2]:
|
if serializer.data["status"] in [-1, 2]:
|
||||||
issue = Issue.objects.get(
|
issue = Issue.objects.get(
|
||||||
pk=inbox_issue.issue_id,
|
pk=issue_id,
|
||||||
workspace__slug=slug,
|
workspace__slug=slug,
|
||||||
project_id=project_id,
|
project_id=project_id,
|
||||||
)
|
)
|
||||||
state = State.objects.filter(
|
state = State.objects.filter(
|
||||||
group="cancelled", workspace__slug=slug, project_id=project_id
|
group="cancelled",
|
||||||
|
workspace__slug=slug,
|
||||||
|
project_id=project_id,
|
||||||
).first()
|
).first()
|
||||||
if state is not None:
|
if state is not None:
|
||||||
issue.state = state
|
issue.state = state
|
||||||
@ -289,271 +294,83 @@ class InboxIssueViewSet(BaseViewSet):
|
|||||||
# Update the issue state if it is accepted
|
# Update the issue state if it is accepted
|
||||||
if serializer.data["status"] in [1]:
|
if serializer.data["status"] in [1]:
|
||||||
issue = Issue.objects.get(
|
issue = Issue.objects.get(
|
||||||
pk=inbox_issue.issue_id,
|
pk=issue_id,
|
||||||
workspace__slug=slug,
|
workspace__slug=slug,
|
||||||
project_id=project_id,
|
project_id=project_id,
|
||||||
)
|
)
|
||||||
|
|
||||||
# Update the issue state only if it is in triage state
|
# Update the issue state only if it is in triage state
|
||||||
if issue.state.name == "Triage":
|
if issue.state.is_triage:
|
||||||
# Move to default state
|
# Move to default state
|
||||||
state = State.objects.filter(
|
state = State.objects.filter(
|
||||||
workspace__slug=slug, project_id=project_id, default=True
|
workspace__slug=slug,
|
||||||
|
project_id=project_id,
|
||||||
|
default=True,
|
||||||
).first()
|
).first()
|
||||||
if state is not None:
|
if state is not None:
|
||||||
issue.state = state
|
issue.state = state
|
||||||
issue.save()
|
issue.save()
|
||||||
|
|
||||||
return Response(serializer.data, status=status.HTTP_200_OK)
|
return Response(serializer.data, status=status.HTTP_200_OK)
|
||||||
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
|
return Response(
|
||||||
|
serializer.errors, status=status.HTTP_400_BAD_REQUEST
|
||||||
|
)
|
||||||
else:
|
else:
|
||||||
return Response(InboxIssueSerializer(inbox_issue).data, status=status.HTTP_200_OK)
|
return Response(
|
||||||
|
InboxIssueSerializer(inbox_issue).data,
|
||||||
|
status=status.HTTP_200_OK,
|
||||||
|
)
|
||||||
|
|
||||||
def retrieve(self, request, slug, project_id, inbox_id, pk):
|
def delete(self, request, slug, project_id, issue_id):
|
||||||
inbox_issue = InboxIssue.objects.get(
|
inbox = Inbox.objects.filter(
|
||||||
pk=pk, workspace__slug=slug, project_id=project_id, inbox_id=inbox_id
|
workspace__slug=slug, project_id=project_id
|
||||||
)
|
).first()
|
||||||
issue = Issue.objects.get(
|
|
||||||
pk=inbox_issue.issue_id, workspace__slug=slug, project_id=project_id
|
|
||||||
)
|
|
||||||
serializer = IssueStateInboxSerializer(issue)
|
|
||||||
return Response(serializer.data, status=status.HTTP_200_OK)
|
|
||||||
|
|
||||||
def destroy(self, request, slug, project_id, inbox_id, pk):
|
project = Project.objects.get(
|
||||||
inbox_issue = InboxIssue.objects.get(
|
workspace__slug=slug,
|
||||||
pk=pk, workspace__slug=slug, project_id=project_id, inbox_id=inbox_id
|
pk=project_id,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
# Inbox view
|
||||||
|
if inbox is None and not project.inbox_view:
|
||||||
|
return Response(
|
||||||
|
{
|
||||||
|
"error": "Inbox is not enabled for this project enable it through the project's api"
|
||||||
|
},
|
||||||
|
status=status.HTTP_400_BAD_REQUEST,
|
||||||
|
)
|
||||||
|
|
||||||
|
# Get the inbox issue
|
||||||
|
inbox_issue = InboxIssue.objects.get(
|
||||||
|
issue_id=issue_id,
|
||||||
|
workspace__slug=slug,
|
||||||
|
project_id=project_id,
|
||||||
|
inbox_id=inbox.id,
|
||||||
|
)
|
||||||
|
|
||||||
# Get the project member
|
# Get the project member
|
||||||
project_member = ProjectMember.objects.get(workspace__slug=slug, project_id=project_id, member=request.user)
|
project_member = ProjectMember.objects.get(
|
||||||
|
workspace__slug=slug,
|
||||||
|
project_id=project_id,
|
||||||
|
member=request.user,
|
||||||
|
is_active=True,
|
||||||
|
)
|
||||||
|
|
||||||
if project_member.role <= 10 and str(inbox_issue.created_by_id) != str(request.user.id):
|
# Check the inbox issue created
|
||||||
return Response({"error": "You cannot delete inbox issue"}, status=status.HTTP_400_BAD_REQUEST)
|
if project_member.role <= 10 and str(inbox_issue.created_by_id) != str(
|
||||||
|
request.user.id
|
||||||
|
):
|
||||||
|
return Response(
|
||||||
|
{"error": "You cannot delete inbox issue"},
|
||||||
|
status=status.HTTP_400_BAD_REQUEST,
|
||||||
|
)
|
||||||
|
|
||||||
# Check the issue status
|
# Check the issue status
|
||||||
if inbox_issue.status in [-2, -1, 0, 2]:
|
if inbox_issue.status in [-2, -1, 0, 2]:
|
||||||
# Delete the issue also
|
# Delete the issue also
|
||||||
Issue.objects.filter(workspace__slug=slug, project_id=project_id, pk=inbox_issue.issue_id).delete()
|
|
||||||
|
|
||||||
inbox_issue.delete()
|
|
||||||
return Response(status=status.HTTP_204_NO_CONTENT)
|
|
||||||
|
|
||||||
|
|
||||||
class InboxIssuePublicViewSet(BaseViewSet):
|
|
||||||
serializer_class = InboxIssueSerializer
|
|
||||||
model = InboxIssue
|
|
||||||
|
|
||||||
filterset_fields = [
|
|
||||||
"status",
|
|
||||||
]
|
|
||||||
|
|
||||||
def get_queryset(self):
|
|
||||||
project_deploy_board = ProjectDeployBoard.objects.get(workspace__slug=self.kwargs.get("slug"), project_id=self.kwargs.get("project_id"))
|
|
||||||
if project_deploy_board is not None:
|
|
||||||
return self.filter_queryset(
|
|
||||||
super()
|
|
||||||
.get_queryset()
|
|
||||||
.filter(
|
|
||||||
Q(snoozed_till__gte=timezone.now()) | Q(snoozed_till__isnull=True),
|
|
||||||
project_id=self.kwargs.get("project_id"),
|
|
||||||
workspace__slug=self.kwargs.get("slug"),
|
|
||||||
inbox_id=self.kwargs.get("inbox_id"),
|
|
||||||
)
|
|
||||||
.select_related("issue", "workspace", "project")
|
|
||||||
)
|
|
||||||
return InboxIssue.objects.none()
|
|
||||||
|
|
||||||
def list(self, request, slug, project_id, inbox_id):
|
|
||||||
project_deploy_board = ProjectDeployBoard.objects.get(workspace__slug=slug, project_id=project_id)
|
|
||||||
if project_deploy_board.inbox is None:
|
|
||||||
return Response({"error": "Inbox is not enabled for this Project Board"}, status=status.HTTP_400_BAD_REQUEST)
|
|
||||||
|
|
||||||
filters = issue_filters(request.query_params, "GET")
|
|
||||||
issues = (
|
|
||||||
Issue.objects.filter(
|
Issue.objects.filter(
|
||||||
issue_inbox__inbox_id=inbox_id,
|
workspace__slug=slug, project_id=project_id, pk=issue_id
|
||||||
workspace__slug=slug,
|
).delete()
|
||||||
project_id=project_id,
|
|
||||||
)
|
|
||||||
.filter(**filters)
|
|
||||||
.annotate(bridge_id=F("issue_inbox__id"))
|
|
||||||
.select_related("workspace", "project", "state", "parent")
|
|
||||||
.prefetch_related("assignees", "labels")
|
|
||||||
.order_by("issue_inbox__snoozed_till", "issue_inbox__status")
|
|
||||||
.annotate(
|
|
||||||
sub_issues_count=Issue.issue_objects.filter(parent=OuterRef("id"))
|
|
||||||
.order_by()
|
|
||||||
.annotate(count=Func(F("id"), function="Count"))
|
|
||||||
.values("count")
|
|
||||||
)
|
|
||||||
.annotate(
|
|
||||||
link_count=IssueLink.objects.filter(issue=OuterRef("id"))
|
|
||||||
.order_by()
|
|
||||||
.annotate(count=Func(F("id"), function="Count"))
|
|
||||||
.values("count")
|
|
||||||
)
|
|
||||||
.annotate(
|
|
||||||
attachment_count=IssueAttachment.objects.filter(
|
|
||||||
issue=OuterRef("id")
|
|
||||||
)
|
|
||||||
.order_by()
|
|
||||||
.annotate(count=Func(F("id"), function="Count"))
|
|
||||||
.values("count")
|
|
||||||
)
|
|
||||||
.prefetch_related(
|
|
||||||
Prefetch(
|
|
||||||
"issue_inbox",
|
|
||||||
queryset=InboxIssue.objects.only(
|
|
||||||
"status", "duplicate_to", "snoozed_till", "source"
|
|
||||||
),
|
|
||||||
)
|
|
||||||
)
|
|
||||||
)
|
|
||||||
issues_data = IssueStateInboxSerializer(issues, many=True).data
|
|
||||||
return Response(
|
|
||||||
issues_data,
|
|
||||||
status=status.HTTP_200_OK,
|
|
||||||
)
|
|
||||||
|
|
||||||
def create(self, request, slug, project_id, inbox_id):
|
|
||||||
project_deploy_board = ProjectDeployBoard.objects.get(workspace__slug=slug, project_id=project_id)
|
|
||||||
if project_deploy_board.inbox is None:
|
|
||||||
return Response({"error": "Inbox is not enabled for this Project Board"}, status=status.HTTP_400_BAD_REQUEST)
|
|
||||||
|
|
||||||
if not request.data.get("issue", {}).get("name", False):
|
|
||||||
return Response(
|
|
||||||
{"error": "Name is required"}, status=status.HTTP_400_BAD_REQUEST
|
|
||||||
)
|
|
||||||
|
|
||||||
# Check for valid priority
|
|
||||||
if not request.data.get("issue", {}).get("priority", "none") in [
|
|
||||||
"low",
|
|
||||||
"medium",
|
|
||||||
"high",
|
|
||||||
"urgent",
|
|
||||||
"none",
|
|
||||||
]:
|
|
||||||
return Response(
|
|
||||||
{"error": "Invalid priority"}, status=status.HTTP_400_BAD_REQUEST
|
|
||||||
)
|
|
||||||
|
|
||||||
# Create or get state
|
|
||||||
state, _ = State.objects.get_or_create(
|
|
||||||
name="Triage",
|
|
||||||
group="backlog",
|
|
||||||
description="Default state for managing all Inbox Issues",
|
|
||||||
project_id=project_id,
|
|
||||||
color="#ff7700",
|
|
||||||
)
|
|
||||||
|
|
||||||
# create an issue
|
|
||||||
issue = Issue.objects.create(
|
|
||||||
name=request.data.get("issue", {}).get("name"),
|
|
||||||
description=request.data.get("issue", {}).get("description", {}),
|
|
||||||
description_html=request.data.get("issue", {}).get(
|
|
||||||
"description_html", "<p></p>"
|
|
||||||
),
|
|
||||||
priority=request.data.get("issue", {}).get("priority", "low"),
|
|
||||||
project_id=project_id,
|
|
||||||
state=state,
|
|
||||||
)
|
|
||||||
|
|
||||||
# Create an Issue Activity
|
|
||||||
issue_activity.delay(
|
|
||||||
type="issue.activity.created",
|
|
||||||
requested_data=json.dumps(request.data, cls=DjangoJSONEncoder),
|
|
||||||
actor_id=str(request.user.id),
|
|
||||||
issue_id=str(issue.id),
|
|
||||||
project_id=str(project_id),
|
|
||||||
current_instance=None,
|
|
||||||
epoch=int(timezone.now().timestamp())
|
|
||||||
)
|
|
||||||
# create an inbox issue
|
|
||||||
InboxIssue.objects.create(
|
|
||||||
inbox_id=inbox_id,
|
|
||||||
project_id=project_id,
|
|
||||||
issue=issue,
|
|
||||||
source=request.data.get("source", "in-app"),
|
|
||||||
)
|
|
||||||
|
|
||||||
serializer = IssueStateInboxSerializer(issue)
|
|
||||||
return Response(serializer.data, status=status.HTTP_200_OK)
|
|
||||||
|
|
||||||
def partial_update(self, request, slug, project_id, inbox_id, pk):
|
|
||||||
project_deploy_board = ProjectDeployBoard.objects.get(workspace__slug=slug, project_id=project_id)
|
|
||||||
if project_deploy_board.inbox is None:
|
|
||||||
return Response({"error": "Inbox is not enabled for this Project Board"}, status=status.HTTP_400_BAD_REQUEST)
|
|
||||||
|
|
||||||
inbox_issue = InboxIssue.objects.get(
|
|
||||||
pk=pk, workspace__slug=slug, project_id=project_id, inbox_id=inbox_id
|
|
||||||
)
|
|
||||||
# Get the project member
|
|
||||||
if str(inbox_issue.created_by_id) != str(request.user.id):
|
|
||||||
return Response({"error": "You cannot edit inbox issues"}, status=status.HTTP_400_BAD_REQUEST)
|
|
||||||
|
|
||||||
# Get issue data
|
|
||||||
issue_data = request.data.pop("issue", False)
|
|
||||||
|
|
||||||
|
|
||||||
issue = Issue.objects.get(
|
|
||||||
pk=inbox_issue.issue_id, workspace__slug=slug, project_id=project_id
|
|
||||||
)
|
|
||||||
# viewers and guests since only viewers and guests
|
|
||||||
issue_data = {
|
|
||||||
"name": issue_data.get("name", issue.name),
|
|
||||||
"description_html": issue_data.get("description_html", issue.description_html),
|
|
||||||
"description": issue_data.get("description", issue.description)
|
|
||||||
}
|
|
||||||
|
|
||||||
issue_serializer = IssueCreateSerializer(
|
|
||||||
issue, data=issue_data, partial=True
|
|
||||||
)
|
|
||||||
|
|
||||||
if issue_serializer.is_valid():
|
|
||||||
current_instance = issue
|
|
||||||
# Log all the updates
|
|
||||||
requested_data = json.dumps(issue_data, cls=DjangoJSONEncoder)
|
|
||||||
if issue is not None:
|
|
||||||
issue_activity.delay(
|
|
||||||
type="issue.activity.updated",
|
|
||||||
requested_data=requested_data,
|
|
||||||
actor_id=str(request.user.id),
|
|
||||||
issue_id=str(issue.id),
|
|
||||||
project_id=str(project_id),
|
|
||||||
current_instance=json.dumps(
|
|
||||||
IssueSerializer(current_instance).data,
|
|
||||||
cls=DjangoJSONEncoder,
|
|
||||||
),
|
|
||||||
epoch=int(timezone.now().timestamp())
|
|
||||||
)
|
|
||||||
issue_serializer.save()
|
|
||||||
return Response(issue_serializer.data, status=status.HTTP_200_OK)
|
|
||||||
return Response(issue_serializer.errors, status=status.HTTP_400_BAD_REQUEST)
|
|
||||||
|
|
||||||
def retrieve(self, request, slug, project_id, inbox_id, pk):
|
|
||||||
project_deploy_board = ProjectDeployBoard.objects.get(workspace__slug=slug, project_id=project_id)
|
|
||||||
if project_deploy_board.inbox is None:
|
|
||||||
return Response({"error": "Inbox is not enabled for this Project Board"}, status=status.HTTP_400_BAD_REQUEST)
|
|
||||||
|
|
||||||
inbox_issue = InboxIssue.objects.get(
|
|
||||||
pk=pk, workspace__slug=slug, project_id=project_id, inbox_id=inbox_id
|
|
||||||
)
|
|
||||||
issue = Issue.objects.get(
|
|
||||||
pk=inbox_issue.issue_id, workspace__slug=slug, project_id=project_id
|
|
||||||
)
|
|
||||||
serializer = IssueStateInboxSerializer(issue)
|
|
||||||
return Response(serializer.data, status=status.HTTP_200_OK)
|
|
||||||
|
|
||||||
def destroy(self, request, slug, project_id, inbox_id, pk):
|
|
||||||
project_deploy_board = ProjectDeployBoard.objects.get(workspace__slug=slug, project_id=project_id)
|
|
||||||
if project_deploy_board.inbox is None:
|
|
||||||
return Response({"error": "Inbox is not enabled for this Project Board"}, status=status.HTTP_400_BAD_REQUEST)
|
|
||||||
|
|
||||||
inbox_issue = InboxIssue.objects.get(
|
|
||||||
pk=pk, workspace__slug=slug, project_id=project_id, inbox_id=inbox_id
|
|
||||||
)
|
|
||||||
|
|
||||||
if str(inbox_issue.created_by_id) != str(request.user.id):
|
|
||||||
return Response({"error": "You cannot delete inbox issue"}, status=status.HTTP_400_BAD_REQUEST)
|
|
||||||
|
|
||||||
inbox_issue.delete()
|
inbox_issue.delete()
|
||||||
return Response(status=status.HTTP_204_NO_CONTENT)
|
return Response(status=status.HTTP_204_NO_CONTENT)
|
||||||
|
@ -1,9 +0,0 @@
|
|||||||
from .base import IntegrationViewSet, WorkspaceIntegrationViewSet
|
|
||||||
from .github import (
|
|
||||||
GithubRepositorySyncViewSet,
|
|
||||||
GithubIssueSyncViewSet,
|
|
||||||
BulkCreateGithubIssueSyncEndpoint,
|
|
||||||
GithubCommentSyncViewSet,
|
|
||||||
GithubRepositoriesEndpoint,
|
|
||||||
)
|
|
||||||
from .slack import SlackProjectSyncViewSet
|
|
@ -1,171 +0,0 @@
|
|||||||
# Python improts
|
|
||||||
import uuid
|
|
||||||
import requests
|
|
||||||
# Django imports
|
|
||||||
from django.contrib.auth.hashers import make_password
|
|
||||||
|
|
||||||
# Third party imports
|
|
||||||
from rest_framework.response import Response
|
|
||||||
from rest_framework import status
|
|
||||||
from sentry_sdk import capture_exception
|
|
||||||
|
|
||||||
# Module imports
|
|
||||||
from plane.api.views import BaseViewSet
|
|
||||||
from plane.db.models import (
|
|
||||||
Integration,
|
|
||||||
WorkspaceIntegration,
|
|
||||||
Workspace,
|
|
||||||
User,
|
|
||||||
WorkspaceMember,
|
|
||||||
APIToken,
|
|
||||||
)
|
|
||||||
from plane.api.serializers import IntegrationSerializer, WorkspaceIntegrationSerializer
|
|
||||||
from plane.utils.integrations.github import (
|
|
||||||
get_github_metadata,
|
|
||||||
delete_github_installation,
|
|
||||||
)
|
|
||||||
from plane.api.permissions import WorkSpaceAdminPermission
|
|
||||||
from plane.utils.integrations.slack import slack_oauth
|
|
||||||
|
|
||||||
class IntegrationViewSet(BaseViewSet):
|
|
||||||
serializer_class = IntegrationSerializer
|
|
||||||
model = Integration
|
|
||||||
|
|
||||||
def create(self, request):
|
|
||||||
serializer = IntegrationSerializer(data=request.data)
|
|
||||||
if serializer.is_valid():
|
|
||||||
serializer.save()
|
|
||||||
return Response(serializer.data, status=status.HTTP_201_CREATED)
|
|
||||||
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
|
|
||||||
|
|
||||||
def partial_update(self, request, pk):
|
|
||||||
integration = Integration.objects.get(pk=pk)
|
|
||||||
if integration.verified:
|
|
||||||
return Response(
|
|
||||||
{"error": "Verified integrations cannot be updated"},
|
|
||||||
status=status.HTTP_400_BAD_REQUEST,
|
|
||||||
)
|
|
||||||
|
|
||||||
serializer = IntegrationSerializer(
|
|
||||||
integration, data=request.data, partial=True
|
|
||||||
)
|
|
||||||
|
|
||||||
if serializer.is_valid():
|
|
||||||
serializer.save()
|
|
||||||
return Response(serializer.data, status=status.HTTP_200_OK)
|
|
||||||
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
|
|
||||||
|
|
||||||
def destroy(self, request, pk):
|
|
||||||
integration = Integration.objects.get(pk=pk)
|
|
||||||
if integration.verified:
|
|
||||||
return Response(
|
|
||||||
{"error": "Verified integrations cannot be updated"},
|
|
||||||
status=status.HTTP_400_BAD_REQUEST,
|
|
||||||
)
|
|
||||||
|
|
||||||
integration.delete()
|
|
||||||
return Response(status=status.HTTP_204_NO_CONTENT)
|
|
||||||
|
|
||||||
|
|
||||||
class WorkspaceIntegrationViewSet(BaseViewSet):
|
|
||||||
serializer_class = WorkspaceIntegrationSerializer
|
|
||||||
model = WorkspaceIntegration
|
|
||||||
|
|
||||||
permission_classes = [
|
|
||||||
WorkSpaceAdminPermission,
|
|
||||||
]
|
|
||||||
|
|
||||||
def get_queryset(self):
|
|
||||||
return (
|
|
||||||
super()
|
|
||||||
.get_queryset()
|
|
||||||
.filter(workspace__slug=self.kwargs.get("slug"))
|
|
||||||
.select_related("integration")
|
|
||||||
)
|
|
||||||
|
|
||||||
def create(self, request, slug, provider):
|
|
||||||
workspace = Workspace.objects.get(slug=slug)
|
|
||||||
integration = Integration.objects.get(provider=provider)
|
|
||||||
config = {}
|
|
||||||
if provider == "github":
|
|
||||||
installation_id = request.data.get("installation_id", None)
|
|
||||||
if not installation_id:
|
|
||||||
return Response(
|
|
||||||
{"error": "Installation ID is required"},
|
|
||||||
status=status.HTTP_400_BAD_REQUEST,
|
|
||||||
)
|
|
||||||
metadata = get_github_metadata(installation_id)
|
|
||||||
config = {"installation_id": installation_id}
|
|
||||||
|
|
||||||
if provider == "slack":
|
|
||||||
code = request.data.get("code", False)
|
|
||||||
|
|
||||||
if not code:
|
|
||||||
return Response({"error": "Code is required"}, status=status.HTTP_400_BAD_REQUEST)
|
|
||||||
|
|
||||||
slack_response = slack_oauth(code=code)
|
|
||||||
|
|
||||||
metadata = slack_response
|
|
||||||
access_token = metadata.get("access_token", False)
|
|
||||||
team_id = metadata.get("team", {}).get("id", False)
|
|
||||||
if not metadata or not access_token or not team_id:
|
|
||||||
return Response(
|
|
||||||
{"error": "Slack could not be installed. Please try again later"},
|
|
||||||
status=status.HTTP_400_BAD_REQUEST,
|
|
||||||
)
|
|
||||||
config = {"team_id": team_id, "access_token": access_token}
|
|
||||||
|
|
||||||
# Create a bot user
|
|
||||||
bot_user = User.objects.create(
|
|
||||||
email=f"{uuid.uuid4().hex}@plane.so",
|
|
||||||
username=uuid.uuid4().hex,
|
|
||||||
password=make_password(uuid.uuid4().hex),
|
|
||||||
is_password_autoset=True,
|
|
||||||
is_bot=True,
|
|
||||||
first_name=integration.title,
|
|
||||||
avatar=integration.avatar_url
|
|
||||||
if integration.avatar_url is not None
|
|
||||||
else "",
|
|
||||||
)
|
|
||||||
|
|
||||||
# Create an API Token for the bot user
|
|
||||||
api_token = APIToken.objects.create(
|
|
||||||
user=bot_user,
|
|
||||||
user_type=1, # bot user
|
|
||||||
workspace=workspace,
|
|
||||||
)
|
|
||||||
|
|
||||||
workspace_integration = WorkspaceIntegration.objects.create(
|
|
||||||
workspace=workspace,
|
|
||||||
integration=integration,
|
|
||||||
actor=bot_user,
|
|
||||||
api_token=api_token,
|
|
||||||
metadata=metadata,
|
|
||||||
config=config,
|
|
||||||
)
|
|
||||||
|
|
||||||
# Add bot user as a member of workspace
|
|
||||||
_ = WorkspaceMember.objects.create(
|
|
||||||
workspace=workspace_integration.workspace,
|
|
||||||
member=bot_user,
|
|
||||||
role=20,
|
|
||||||
)
|
|
||||||
return Response(
|
|
||||||
WorkspaceIntegrationSerializer(workspace_integration).data,
|
|
||||||
status=status.HTTP_201_CREATED,
|
|
||||||
)
|
|
||||||
|
|
||||||
def destroy(self, request, slug, pk):
|
|
||||||
workspace_integration = WorkspaceIntegration.objects.get(
|
|
||||||
pk=pk, workspace__slug=slug
|
|
||||||
)
|
|
||||||
|
|
||||||
if workspace_integration.integration.provider == "github":
|
|
||||||
installation_id = workspace_integration.config.get(
|
|
||||||
"installation_id", False
|
|
||||||
)
|
|
||||||
if installation_id:
|
|
||||||
delete_github_installation(installation_id=installation_id)
|
|
||||||
|
|
||||||
workspace_integration.delete()
|
|
||||||
return Response(status=status.HTTP_204_NO_CONTENT)
|
|
@ -1,200 +0,0 @@
|
|||||||
# Third party imports
|
|
||||||
from rest_framework import status
|
|
||||||
from rest_framework.response import Response
|
|
||||||
from sentry_sdk import capture_exception
|
|
||||||
|
|
||||||
# Module imports
|
|
||||||
from plane.api.views import BaseViewSet, BaseAPIView
|
|
||||||
from plane.db.models import (
|
|
||||||
GithubIssueSync,
|
|
||||||
GithubRepositorySync,
|
|
||||||
GithubRepository,
|
|
||||||
WorkspaceIntegration,
|
|
||||||
ProjectMember,
|
|
||||||
Label,
|
|
||||||
GithubCommentSync,
|
|
||||||
Project,
|
|
||||||
)
|
|
||||||
from plane.api.serializers import (
|
|
||||||
GithubIssueSyncSerializer,
|
|
||||||
GithubRepositorySyncSerializer,
|
|
||||||
GithubCommentSyncSerializer,
|
|
||||||
)
|
|
||||||
from plane.utils.integrations.github import get_github_repos
|
|
||||||
from plane.api.permissions import ProjectBasePermission, ProjectEntityPermission
|
|
||||||
|
|
||||||
|
|
||||||
class GithubRepositoriesEndpoint(BaseAPIView):
|
|
||||||
permission_classes = [
|
|
||||||
ProjectBasePermission,
|
|
||||||
]
|
|
||||||
|
|
||||||
def get(self, request, slug, workspace_integration_id):
|
|
||||||
page = request.GET.get("page", 1)
|
|
||||||
workspace_integration = WorkspaceIntegration.objects.get(
|
|
||||||
workspace__slug=slug, pk=workspace_integration_id
|
|
||||||
)
|
|
||||||
|
|
||||||
if workspace_integration.integration.provider != "github":
|
|
||||||
return Response(
|
|
||||||
{"error": "Not a github integration"},
|
|
||||||
status=status.HTTP_400_BAD_REQUEST,
|
|
||||||
)
|
|
||||||
|
|
||||||
access_tokens_url = workspace_integration.metadata["access_tokens_url"]
|
|
||||||
repositories_url = (
|
|
||||||
workspace_integration.metadata["repositories_url"]
|
|
||||||
+ f"?per_page=100&page={page}"
|
|
||||||
)
|
|
||||||
repositories = get_github_repos(access_tokens_url, repositories_url)
|
|
||||||
return Response(repositories, status=status.HTTP_200_OK)
|
|
||||||
|
|
||||||
|
|
||||||
class GithubRepositorySyncViewSet(BaseViewSet):
|
|
||||||
permission_classes = [
|
|
||||||
ProjectBasePermission,
|
|
||||||
]
|
|
||||||
|
|
||||||
serializer_class = GithubRepositorySyncSerializer
|
|
||||||
model = GithubRepositorySync
|
|
||||||
|
|
||||||
def perform_create(self, serializer):
|
|
||||||
serializer.save(project_id=self.kwargs.get("project_id"))
|
|
||||||
|
|
||||||
def get_queryset(self):
|
|
||||||
return (
|
|
||||||
super()
|
|
||||||
.get_queryset()
|
|
||||||
.filter(workspace__slug=self.kwargs.get("slug"))
|
|
||||||
.filter(project_id=self.kwargs.get("project_id"))
|
|
||||||
)
|
|
||||||
|
|
||||||
def create(self, request, slug, project_id, workspace_integration_id):
|
|
||||||
name = request.data.get("name", False)
|
|
||||||
url = request.data.get("url", False)
|
|
||||||
config = request.data.get("config", {})
|
|
||||||
repository_id = request.data.get("repository_id", False)
|
|
||||||
owner = request.data.get("owner", False)
|
|
||||||
|
|
||||||
if not name or not url or not repository_id or not owner:
|
|
||||||
return Response(
|
|
||||||
{"error": "Name, url, repository_id and owner are required"},
|
|
||||||
status=status.HTTP_400_BAD_REQUEST,
|
|
||||||
)
|
|
||||||
|
|
||||||
# Get the workspace integration
|
|
||||||
workspace_integration = WorkspaceIntegration.objects.get(
|
|
||||||
pk=workspace_integration_id
|
|
||||||
)
|
|
||||||
|
|
||||||
# Delete the old repository object
|
|
||||||
GithubRepositorySync.objects.filter(
|
|
||||||
project_id=project_id, workspace__slug=slug
|
|
||||||
).delete()
|
|
||||||
GithubRepository.objects.filter(
|
|
||||||
project_id=project_id, workspace__slug=slug
|
|
||||||
).delete()
|
|
||||||
|
|
||||||
# Create repository
|
|
||||||
repo = GithubRepository.objects.create(
|
|
||||||
name=name,
|
|
||||||
url=url,
|
|
||||||
config=config,
|
|
||||||
repository_id=repository_id,
|
|
||||||
owner=owner,
|
|
||||||
project_id=project_id,
|
|
||||||
)
|
|
||||||
|
|
||||||
# Create a Label for github
|
|
||||||
label = Label.objects.filter(
|
|
||||||
name="GitHub",
|
|
||||||
project_id=project_id,
|
|
||||||
).first()
|
|
||||||
|
|
||||||
if label is None:
|
|
||||||
label = Label.objects.create(
|
|
||||||
name="GitHub",
|
|
||||||
project_id=project_id,
|
|
||||||
description="Label to sync Plane issues with GitHub issues",
|
|
||||||
color="#003773",
|
|
||||||
)
|
|
||||||
|
|
||||||
# Create repo sync
|
|
||||||
repo_sync = GithubRepositorySync.objects.create(
|
|
||||||
repository=repo,
|
|
||||||
workspace_integration=workspace_integration,
|
|
||||||
actor=workspace_integration.actor,
|
|
||||||
credentials=request.data.get("credentials", {}),
|
|
||||||
project_id=project_id,
|
|
||||||
label=label,
|
|
||||||
)
|
|
||||||
|
|
||||||
# Add bot as a member in the project
|
|
||||||
_ = ProjectMember.objects.get_or_create(
|
|
||||||
member=workspace_integration.actor, role=20, project_id=project_id
|
|
||||||
)
|
|
||||||
|
|
||||||
# Return Response
|
|
||||||
return Response(
|
|
||||||
GithubRepositorySyncSerializer(repo_sync).data,
|
|
||||||
status=status.HTTP_201_CREATED,
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
class GithubIssueSyncViewSet(BaseViewSet):
|
|
||||||
permission_classes = [
|
|
||||||
ProjectEntityPermission,
|
|
||||||
]
|
|
||||||
|
|
||||||
serializer_class = GithubIssueSyncSerializer
|
|
||||||
model = GithubIssueSync
|
|
||||||
|
|
||||||
def perform_create(self, serializer):
|
|
||||||
serializer.save(
|
|
||||||
project_id=self.kwargs.get("project_id"),
|
|
||||||
repository_sync_id=self.kwargs.get("repo_sync_id"),
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
class BulkCreateGithubIssueSyncEndpoint(BaseAPIView):
|
|
||||||
def post(self, request, slug, project_id, repo_sync_id):
|
|
||||||
project = Project.objects.get(pk=project_id, workspace__slug=slug)
|
|
||||||
|
|
||||||
github_issue_syncs = request.data.get("github_issue_syncs", [])
|
|
||||||
github_issue_syncs = GithubIssueSync.objects.bulk_create(
|
|
||||||
[
|
|
||||||
GithubIssueSync(
|
|
||||||
issue_id=github_issue_sync.get("issue"),
|
|
||||||
repo_issue_id=github_issue_sync.get("repo_issue_id"),
|
|
||||||
issue_url=github_issue_sync.get("issue_url"),
|
|
||||||
github_issue_id=github_issue_sync.get("github_issue_id"),
|
|
||||||
repository_sync_id=repo_sync_id,
|
|
||||||
project_id=project_id,
|
|
||||||
workspace_id=project.workspace_id,
|
|
||||||
created_by=request.user,
|
|
||||||
updated_by=request.user,
|
|
||||||
)
|
|
||||||
for github_issue_sync in github_issue_syncs
|
|
||||||
],
|
|
||||||
batch_size=100,
|
|
||||||
ignore_conflicts=True,
|
|
||||||
)
|
|
||||||
|
|
||||||
serializer = GithubIssueSyncSerializer(github_issue_syncs, many=True)
|
|
||||||
return Response(serializer.data, status=status.HTTP_201_CREATED)
|
|
||||||
|
|
||||||
|
|
||||||
class GithubCommentSyncViewSet(BaseViewSet):
|
|
||||||
|
|
||||||
permission_classes = [
|
|
||||||
ProjectEntityPermission,
|
|
||||||
]
|
|
||||||
|
|
||||||
serializer_class = GithubCommentSyncSerializer
|
|
||||||
model = GithubCommentSync
|
|
||||||
|
|
||||||
def perform_create(self, serializer):
|
|
||||||
serializer.save(
|
|
||||||
project_id=self.kwargs.get("project_id"),
|
|
||||||
issue_sync_id=self.kwargs.get("issue_sync_id"),
|
|
||||||
)
|
|
@ -1,79 +0,0 @@
|
|||||||
# Django import
|
|
||||||
from django.db import IntegrityError
|
|
||||||
|
|
||||||
# Third party imports
|
|
||||||
from rest_framework import status
|
|
||||||
from rest_framework.response import Response
|
|
||||||
from sentry_sdk import capture_exception
|
|
||||||
|
|
||||||
# Module imports
|
|
||||||
from plane.api.views import BaseViewSet, BaseAPIView
|
|
||||||
from plane.db.models import SlackProjectSync, WorkspaceIntegration, ProjectMember
|
|
||||||
from plane.api.serializers import SlackProjectSyncSerializer
|
|
||||||
from plane.api.permissions import ProjectBasePermission, ProjectEntityPermission
|
|
||||||
from plane.utils.integrations.slack import slack_oauth
|
|
||||||
|
|
||||||
|
|
||||||
class SlackProjectSyncViewSet(BaseViewSet):
|
|
||||||
permission_classes = [
|
|
||||||
ProjectBasePermission,
|
|
||||||
]
|
|
||||||
serializer_class = SlackProjectSyncSerializer
|
|
||||||
model = SlackProjectSync
|
|
||||||
|
|
||||||
def get_queryset(self):
|
|
||||||
return (
|
|
||||||
super()
|
|
||||||
.get_queryset()
|
|
||||||
.filter(
|
|
||||||
workspace__slug=self.kwargs.get("slug"),
|
|
||||||
project_id=self.kwargs.get("project_id"),
|
|
||||||
)
|
|
||||||
.filter(project__project_projectmember__member=self.request.user)
|
|
||||||
)
|
|
||||||
|
|
||||||
def create(self, request, slug, project_id, workspace_integration_id):
|
|
||||||
try:
|
|
||||||
code = request.data.get("code", False)
|
|
||||||
|
|
||||||
if not code:
|
|
||||||
return Response(
|
|
||||||
{"error": "Code is required"}, status=status.HTTP_400_BAD_REQUEST
|
|
||||||
)
|
|
||||||
|
|
||||||
slack_response = slack_oauth(code=code)
|
|
||||||
|
|
||||||
workspace_integration = WorkspaceIntegration.objects.get(
|
|
||||||
workspace__slug=slug, pk=workspace_integration_id
|
|
||||||
)
|
|
||||||
|
|
||||||
workspace_integration = WorkspaceIntegration.objects.get(
|
|
||||||
pk=workspace_integration_id, workspace__slug=slug
|
|
||||||
)
|
|
||||||
slack_project_sync = SlackProjectSync.objects.create(
|
|
||||||
access_token=slack_response.get("access_token"),
|
|
||||||
scopes=slack_response.get("scope"),
|
|
||||||
bot_user_id=slack_response.get("bot_user_id"),
|
|
||||||
webhook_url=slack_response.get("incoming_webhook", {}).get("url"),
|
|
||||||
data=slack_response,
|
|
||||||
team_id=slack_response.get("team", {}).get("id"),
|
|
||||||
team_name=slack_response.get("team", {}).get("name"),
|
|
||||||
workspace_integration=workspace_integration,
|
|
||||||
project_id=project_id,
|
|
||||||
)
|
|
||||||
_ = ProjectMember.objects.get_or_create(
|
|
||||||
member=workspace_integration.actor, role=20, project_id=project_id
|
|
||||||
)
|
|
||||||
serializer = SlackProjectSyncSerializer(slack_project_sync)
|
|
||||||
return Response(serializer.data, status=status.HTTP_200_OK)
|
|
||||||
except IntegrityError as e:
|
|
||||||
if "already exists" in str(e):
|
|
||||||
return Response(
|
|
||||||
{"error": "Slack is already installed for the project"},
|
|
||||||
status=status.HTTP_410_GONE,
|
|
||||||
)
|
|
||||||
capture_exception(e)
|
|
||||||
return Response(
|
|
||||||
{"error": "Slack could not be installed. Please try again later"},
|
|
||||||
status=status.HTTP_400_BAD_REQUEST,
|
|
||||||
)
|
|
File diff suppressed because it is too large
Load Diff
@ -1,74 +1,54 @@
|
|||||||
# Python imports
|
# Python imports
|
||||||
import json
|
import json
|
||||||
|
|
||||||
# Django Imports
|
# Django imports
|
||||||
from django.utils import timezone
|
|
||||||
from django.db import IntegrityError
|
|
||||||
from django.db.models import Prefetch, F, OuterRef, Func, Exists, Count, Q
|
|
||||||
from django.core import serializers
|
from django.core import serializers
|
||||||
from django.utils.decorators import method_decorator
|
from django.db.models import Count, F, Func, OuterRef, Prefetch, Q
|
||||||
from django.views.decorators.gzip import gzip_page
|
from django.utils import timezone
|
||||||
|
|
||||||
# Third party imports
|
# Third party imports
|
||||||
from rest_framework.response import Response
|
|
||||||
from rest_framework import status
|
from rest_framework import status
|
||||||
from sentry_sdk import capture_exception
|
from rest_framework.response import Response
|
||||||
|
|
||||||
# Module imports
|
# Module imports
|
||||||
from . import BaseViewSet
|
|
||||||
from plane.api.serializers import (
|
from plane.api.serializers import (
|
||||||
ModuleWriteSerializer,
|
IssueSerializer,
|
||||||
ModuleSerializer,
|
|
||||||
ModuleIssueSerializer,
|
ModuleIssueSerializer,
|
||||||
ModuleLinkSerializer,
|
ModuleSerializer,
|
||||||
ModuleFavoriteSerializer,
|
|
||||||
IssueStateSerializer,
|
|
||||||
)
|
)
|
||||||
from plane.api.permissions import ProjectEntityPermission
|
from plane.app.permissions import ProjectEntityPermission
|
||||||
|
from plane.bgtasks.issue_activites_task import issue_activity
|
||||||
from plane.db.models import (
|
from plane.db.models import (
|
||||||
|
Issue,
|
||||||
|
IssueAttachment,
|
||||||
|
IssueLink,
|
||||||
Module,
|
Module,
|
||||||
ModuleIssue,
|
ModuleIssue,
|
||||||
Project,
|
|
||||||
Issue,
|
|
||||||
ModuleLink,
|
ModuleLink,
|
||||||
ModuleFavorite,
|
Project,
|
||||||
IssueLink,
|
|
||||||
IssueAttachment,
|
|
||||||
)
|
)
|
||||||
from plane.bgtasks.issue_activites_task import issue_activity
|
|
||||||
from plane.utils.grouper import group_results
|
from .base import BaseAPIView, WebhookMixin
|
||||||
from plane.utils.issue_filters import issue_filters
|
|
||||||
from plane.utils.analytics_plot import burndown_plot
|
|
||||||
|
|
||||||
|
|
||||||
class ModuleViewSet(BaseViewSet):
|
class ModuleAPIEndpoint(WebhookMixin, BaseAPIView):
|
||||||
|
"""
|
||||||
|
This viewset automatically provides `list`, `create`, `retrieve`,
|
||||||
|
`update` and `destroy` actions related to module.
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
model = Module
|
model = Module
|
||||||
permission_classes = [
|
permission_classes = [
|
||||||
ProjectEntityPermission,
|
ProjectEntityPermission,
|
||||||
]
|
]
|
||||||
|
serializer_class = ModuleSerializer
|
||||||
def get_serializer_class(self):
|
webhook_event = "module"
|
||||||
return (
|
|
||||||
ModuleWriteSerializer
|
|
||||||
if self.action in ["create", "update", "partial_update"]
|
|
||||||
else ModuleSerializer
|
|
||||||
)
|
|
||||||
|
|
||||||
def get_queryset(self):
|
def get_queryset(self):
|
||||||
order_by = self.request.GET.get("order_by", "sort_order")
|
|
||||||
|
|
||||||
subquery = ModuleFavorite.objects.filter(
|
|
||||||
user=self.request.user,
|
|
||||||
module_id=OuterRef("pk"),
|
|
||||||
project_id=self.kwargs.get("project_id"),
|
|
||||||
workspace__slug=self.kwargs.get("slug"),
|
|
||||||
)
|
|
||||||
return (
|
return (
|
||||||
super()
|
Module.objects.filter(project_id=self.kwargs.get("project_id"))
|
||||||
.get_queryset()
|
|
||||||
.filter(project_id=self.kwargs.get("project_id"))
|
|
||||||
.filter(workspace__slug=self.kwargs.get("slug"))
|
.filter(workspace__slug=self.kwargs.get("slug"))
|
||||||
.annotate(is_favorite=Exists(subquery))
|
|
||||||
.select_related("project")
|
.select_related("project")
|
||||||
.select_related("workspace")
|
.select_related("workspace")
|
||||||
.select_related("lead")
|
.select_related("lead")
|
||||||
@ -76,7 +56,9 @@ class ModuleViewSet(BaseViewSet):
|
|||||||
.prefetch_related(
|
.prefetch_related(
|
||||||
Prefetch(
|
Prefetch(
|
||||||
"link_module",
|
"link_module",
|
||||||
queryset=ModuleLink.objects.select_related("module", "created_by"),
|
queryset=ModuleLink.objects.select_related(
|
||||||
|
"module", "created_by"
|
||||||
|
),
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
.annotate(
|
.annotate(
|
||||||
@ -86,6 +68,7 @@ class ModuleViewSet(BaseViewSet):
|
|||||||
issue_module__issue__archived_at__isnull=True,
|
issue_module__issue__archived_at__isnull=True,
|
||||||
issue_module__issue__is_draft=False,
|
issue_module__issue__is_draft=False,
|
||||||
),
|
),
|
||||||
|
distinct=True,
|
||||||
),
|
),
|
||||||
)
|
)
|
||||||
.annotate(
|
.annotate(
|
||||||
@ -96,6 +79,7 @@ class ModuleViewSet(BaseViewSet):
|
|||||||
issue_module__issue__archived_at__isnull=True,
|
issue_module__issue__archived_at__isnull=True,
|
||||||
issue_module__issue__is_draft=False,
|
issue_module__issue__is_draft=False,
|
||||||
),
|
),
|
||||||
|
distinct=True,
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
.annotate(
|
.annotate(
|
||||||
@ -106,6 +90,7 @@ class ModuleViewSet(BaseViewSet):
|
|||||||
issue_module__issue__archived_at__isnull=True,
|
issue_module__issue__archived_at__isnull=True,
|
||||||
issue_module__issue__is_draft=False,
|
issue_module__issue__is_draft=False,
|
||||||
),
|
),
|
||||||
|
distinct=True,
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
.annotate(
|
.annotate(
|
||||||
@ -116,6 +101,7 @@ class ModuleViewSet(BaseViewSet):
|
|||||||
issue_module__issue__archived_at__isnull=True,
|
issue_module__issue__archived_at__isnull=True,
|
||||||
issue_module__issue__is_draft=False,
|
issue_module__issue__is_draft=False,
|
||||||
),
|
),
|
||||||
|
distinct=True,
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
.annotate(
|
.annotate(
|
||||||
@ -126,6 +112,7 @@ class ModuleViewSet(BaseViewSet):
|
|||||||
issue_module__issue__archived_at__isnull=True,
|
issue_module__issue__archived_at__isnull=True,
|
||||||
issue_module__issue__is_draft=False,
|
issue_module__issue__is_draft=False,
|
||||||
),
|
),
|
||||||
|
distinct=True,
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
.annotate(
|
.annotate(
|
||||||
@ -136,135 +123,123 @@ class ModuleViewSet(BaseViewSet):
|
|||||||
issue_module__issue__archived_at__isnull=True,
|
issue_module__issue__archived_at__isnull=True,
|
||||||
issue_module__issue__is_draft=False,
|
issue_module__issue__is_draft=False,
|
||||||
),
|
),
|
||||||
|
distinct=True,
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
.order_by(order_by, "name")
|
.order_by(self.kwargs.get("order_by", "-created_at"))
|
||||||
)
|
)
|
||||||
|
|
||||||
def create(self, request, slug, project_id):
|
def post(self, request, slug, project_id):
|
||||||
project = Project.objects.get(workspace__slug=slug, pk=project_id)
|
project = Project.objects.get(pk=project_id, workspace__slug=slug)
|
||||||
serializer = ModuleWriteSerializer(
|
serializer = ModuleSerializer(
|
||||||
data=request.data, context={"project": project}
|
data=request.data,
|
||||||
|
context={
|
||||||
|
"project_id": project_id,
|
||||||
|
"workspace_id": project.workspace_id,
|
||||||
|
},
|
||||||
)
|
)
|
||||||
|
|
||||||
if serializer.is_valid():
|
if serializer.is_valid():
|
||||||
|
if (
|
||||||
|
request.data.get("external_id")
|
||||||
|
and request.data.get("external_source")
|
||||||
|
and Module.objects.filter(
|
||||||
|
project_id=project_id,
|
||||||
|
workspace__slug=slug,
|
||||||
|
external_source=request.data.get("external_source"),
|
||||||
|
external_id=request.data.get("external_id"),
|
||||||
|
).exists()
|
||||||
|
):
|
||||||
|
module = Module.objects.filter(
|
||||||
|
project_id=project_id,
|
||||||
|
workspace__slug=slug,
|
||||||
|
external_source=request.data.get("external_source"),
|
||||||
|
external_id=request.data.get("external_id"),
|
||||||
|
).first()
|
||||||
|
return Response(
|
||||||
|
{
|
||||||
|
"error": "Module with the same external id and external source already exists",
|
||||||
|
"id": str(module.id),
|
||||||
|
},
|
||||||
|
status=status.HTTP_409_CONFLICT,
|
||||||
|
)
|
||||||
serializer.save()
|
serializer.save()
|
||||||
|
|
||||||
module = Module.objects.get(pk=serializer.data["id"])
|
module = Module.objects.get(pk=serializer.data["id"])
|
||||||
serializer = ModuleSerializer(module)
|
serializer = ModuleSerializer(module)
|
||||||
return Response(serializer.data, status=status.HTTP_201_CREATED)
|
return Response(serializer.data, status=status.HTTP_201_CREATED)
|
||||||
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
|
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
|
||||||
|
|
||||||
def retrieve(self, request, slug, project_id, pk):
|
def patch(self, request, slug, project_id, pk):
|
||||||
queryset = self.get_queryset().get(pk=pk)
|
module = Module.objects.get(
|
||||||
|
pk=pk, project_id=project_id, workspace__slug=slug
|
||||||
|
)
|
||||||
|
if module.archived_at:
|
||||||
|
return Response(
|
||||||
|
{"error": "Archived module cannot be edited"},
|
||||||
|
status=status.HTTP_400_BAD_REQUEST,
|
||||||
|
)
|
||||||
|
serializer = ModuleSerializer(
|
||||||
|
module,
|
||||||
|
data=request.data,
|
||||||
|
context={"project_id": project_id},
|
||||||
|
partial=True,
|
||||||
|
)
|
||||||
|
if serializer.is_valid():
|
||||||
|
if (
|
||||||
|
request.data.get("external_id")
|
||||||
|
and (module.external_id != request.data.get("external_id"))
|
||||||
|
and Module.objects.filter(
|
||||||
|
project_id=project_id,
|
||||||
|
workspace__slug=slug,
|
||||||
|
external_source=request.data.get(
|
||||||
|
"external_source", module.external_source
|
||||||
|
),
|
||||||
|
external_id=request.data.get("external_id"),
|
||||||
|
).exists()
|
||||||
|
):
|
||||||
|
return Response(
|
||||||
|
{
|
||||||
|
"error": "Module with the same external id and external source already exists",
|
||||||
|
"id": str(module.id),
|
||||||
|
},
|
||||||
|
status=status.HTTP_409_CONFLICT,
|
||||||
|
)
|
||||||
|
serializer.save()
|
||||||
|
return Response(serializer.data, status=status.HTTP_200_OK)
|
||||||
|
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
|
||||||
|
|
||||||
assignee_distribution = (
|
def get(self, request, slug, project_id, pk=None):
|
||||||
Issue.objects.filter(
|
if pk:
|
||||||
issue_module__module_id=pk,
|
queryset = (
|
||||||
workspace__slug=slug,
|
self.get_queryset().filter(archived_at__isnull=True).get(pk=pk)
|
||||||
project_id=project_id,
|
|
||||||
)
|
)
|
||||||
.annotate(first_name=F("assignees__first_name"))
|
data = ModuleSerializer(
|
||||||
.annotate(last_name=F("assignees__last_name"))
|
queryset,
|
||||||
.annotate(assignee_id=F("assignees__id"))
|
fields=self.fields,
|
||||||
.annotate(display_name=F("assignees__display_name"))
|
expand=self.expand,
|
||||||
.annotate(avatar=F("assignees__avatar"))
|
).data
|
||||||
.values("first_name", "last_name", "assignee_id", "avatar", "display_name")
|
return Response(
|
||||||
.annotate(
|
data,
|
||||||
total_issues=Count(
|
status=status.HTTP_200_OK,
|
||||||
"assignee_id",
|
|
||||||
filter=Q(
|
|
||||||
archived_at__isnull=True,
|
|
||||||
is_draft=False,
|
|
||||||
),
|
|
||||||
)
|
|
||||||
)
|
)
|
||||||
.annotate(
|
return self.paginate(
|
||||||
completed_issues=Count(
|
request=request,
|
||||||
"assignee_id",
|
queryset=(self.get_queryset().filter(archived_at__isnull=True)),
|
||||||
filter=Q(
|
on_results=lambda modules: ModuleSerializer(
|
||||||
completed_at__isnull=False,
|
modules,
|
||||||
archived_at__isnull=True,
|
many=True,
|
||||||
is_draft=False,
|
fields=self.fields,
|
||||||
),
|
expand=self.expand,
|
||||||
)
|
).data,
|
||||||
)
|
|
||||||
.annotate(
|
|
||||||
pending_issues=Count(
|
|
||||||
"assignee_id",
|
|
||||||
filter=Q(
|
|
||||||
completed_at__isnull=True,
|
|
||||||
archived_at__isnull=True,
|
|
||||||
is_draft=False,
|
|
||||||
),
|
|
||||||
)
|
|
||||||
)
|
|
||||||
.order_by("first_name", "last_name")
|
|
||||||
)
|
)
|
||||||
|
|
||||||
label_distribution = (
|
def delete(self, request, slug, project_id, pk):
|
||||||
Issue.objects.filter(
|
module = Module.objects.get(
|
||||||
issue_module__module_id=pk,
|
workspace__slug=slug, project_id=project_id, pk=pk
|
||||||
workspace__slug=slug,
|
|
||||||
project_id=project_id,
|
|
||||||
)
|
|
||||||
.annotate(label_name=F("labels__name"))
|
|
||||||
.annotate(color=F("labels__color"))
|
|
||||||
.annotate(label_id=F("labels__id"))
|
|
||||||
.values("label_name", "color", "label_id")
|
|
||||||
.annotate(
|
|
||||||
total_issues=Count(
|
|
||||||
"label_id",
|
|
||||||
filter=Q(
|
|
||||||
archived_at__isnull=True,
|
|
||||||
is_draft=False,
|
|
||||||
),
|
|
||||||
),
|
|
||||||
)
|
|
||||||
.annotate(
|
|
||||||
completed_issues=Count(
|
|
||||||
"label_id",
|
|
||||||
filter=Q(
|
|
||||||
completed_at__isnull=False,
|
|
||||||
archived_at__isnull=True,
|
|
||||||
is_draft=False,
|
|
||||||
),
|
|
||||||
)
|
|
||||||
)
|
|
||||||
.annotate(
|
|
||||||
pending_issues=Count(
|
|
||||||
"label_id",
|
|
||||||
filter=Q(
|
|
||||||
completed_at__isnull=True,
|
|
||||||
archived_at__isnull=True,
|
|
||||||
is_draft=False,
|
|
||||||
),
|
|
||||||
)
|
|
||||||
)
|
|
||||||
.order_by("label_name")
|
|
||||||
)
|
)
|
||||||
|
|
||||||
data = ModuleSerializer(queryset).data
|
|
||||||
data["distribution"] = {
|
|
||||||
"assignees": assignee_distribution,
|
|
||||||
"labels": label_distribution,
|
|
||||||
"completion_chart": {},
|
|
||||||
}
|
|
||||||
|
|
||||||
if queryset.start_date and queryset.target_date:
|
|
||||||
data["distribution"]["completion_chart"] = burndown_plot(
|
|
||||||
queryset=queryset, slug=slug, project_id=project_id, module_id=pk
|
|
||||||
)
|
|
||||||
|
|
||||||
return Response(
|
|
||||||
data,
|
|
||||||
status=status.HTTP_200_OK,
|
|
||||||
)
|
|
||||||
|
|
||||||
def destroy(self, request, slug, project_id, pk):
|
|
||||||
module = Module.objects.get(workspace__slug=slug, project_id=project_id, pk=pk)
|
|
||||||
module_issues = list(
|
module_issues = list(
|
||||||
ModuleIssue.objects.filter(module_id=pk).values_list("issue", flat=True)
|
ModuleIssue.objects.filter(module_id=pk).values_list(
|
||||||
|
"issue", flat=True
|
||||||
|
)
|
||||||
)
|
)
|
||||||
issue_activity.delay(
|
issue_activity.delay(
|
||||||
type="module.activity.deleted",
|
type="module.activity.deleted",
|
||||||
@ -276,7 +251,7 @@ class ModuleViewSet(BaseViewSet):
|
|||||||
}
|
}
|
||||||
),
|
),
|
||||||
actor_id=str(request.user.id),
|
actor_id=str(request.user.id),
|
||||||
issue_id=str(pk),
|
issue_id=None,
|
||||||
project_id=str(project_id),
|
project_id=str(project_id),
|
||||||
current_instance=None,
|
current_instance=None,
|
||||||
epoch=int(timezone.now().timestamp()),
|
epoch=int(timezone.now().timestamp()),
|
||||||
@ -285,25 +260,28 @@ class ModuleViewSet(BaseViewSet):
|
|||||||
return Response(status=status.HTTP_204_NO_CONTENT)
|
return Response(status=status.HTTP_204_NO_CONTENT)
|
||||||
|
|
||||||
|
|
||||||
class ModuleIssueViewSet(BaseViewSet):
|
class ModuleIssueAPIEndpoint(WebhookMixin, BaseAPIView):
|
||||||
|
"""
|
||||||
|
This viewset automatically provides `list`, `create`, `retrieve`,
|
||||||
|
`update` and `destroy` actions related to module issues.
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
serializer_class = ModuleIssueSerializer
|
serializer_class = ModuleIssueSerializer
|
||||||
model = ModuleIssue
|
model = ModuleIssue
|
||||||
|
webhook_event = "module_issue"
|
||||||
filterset_fields = [
|
bulk = True
|
||||||
"issue__labels__id",
|
|
||||||
"issue__assignees__id",
|
|
||||||
]
|
|
||||||
|
|
||||||
permission_classes = [
|
permission_classes = [
|
||||||
ProjectEntityPermission,
|
ProjectEntityPermission,
|
||||||
]
|
]
|
||||||
|
|
||||||
def get_queryset(self):
|
def get_queryset(self):
|
||||||
return self.filter_queryset(
|
return (
|
||||||
super()
|
ModuleIssue.objects.annotate(
|
||||||
.get_queryset()
|
sub_issues_count=Issue.issue_objects.filter(
|
||||||
.annotate(
|
parent=OuterRef("issue")
|
||||||
sub_issues_count=Issue.issue_objects.filter(parent=OuterRef("issue"))
|
)
|
||||||
.order_by()
|
.order_by()
|
||||||
.annotate(count=Func(F("id"), function="Count"))
|
.annotate(count=Func(F("id"), function="Count"))
|
||||||
.values("count")
|
.values("count")
|
||||||
@ -311,26 +289,29 @@ class ModuleIssueViewSet(BaseViewSet):
|
|||||||
.filter(workspace__slug=self.kwargs.get("slug"))
|
.filter(workspace__slug=self.kwargs.get("slug"))
|
||||||
.filter(project_id=self.kwargs.get("project_id"))
|
.filter(project_id=self.kwargs.get("project_id"))
|
||||||
.filter(module_id=self.kwargs.get("module_id"))
|
.filter(module_id=self.kwargs.get("module_id"))
|
||||||
.filter(project__project_projectmember__member=self.request.user)
|
.filter(
|
||||||
|
project__project_projectmember__member=self.request.user,
|
||||||
|
project__project_projectmember__is_active=True,
|
||||||
|
)
|
||||||
|
.filter(project__archived_at__isnull=True)
|
||||||
.select_related("project")
|
.select_related("project")
|
||||||
.select_related("workspace")
|
.select_related("workspace")
|
||||||
.select_related("module")
|
.select_related("module")
|
||||||
.select_related("issue", "issue__state", "issue__project")
|
.select_related("issue", "issue__state", "issue__project")
|
||||||
.prefetch_related("issue__assignees", "issue__labels")
|
.prefetch_related("issue__assignees", "issue__labels")
|
||||||
.prefetch_related("module__members")
|
.prefetch_related("module__members")
|
||||||
|
.order_by(self.kwargs.get("order_by", "-created_at"))
|
||||||
.distinct()
|
.distinct()
|
||||||
)
|
)
|
||||||
|
|
||||||
@method_decorator(gzip_page)
|
def get(self, request, slug, project_id, module_id):
|
||||||
def list(self, request, slug, project_id, module_id):
|
|
||||||
order_by = request.GET.get("order_by", "created_at")
|
order_by = request.GET.get("order_by", "created_at")
|
||||||
group_by = request.GET.get("group_by", False)
|
|
||||||
sub_group_by = request.GET.get("sub_group_by", False)
|
|
||||||
filters = issue_filters(request.query_params, "GET")
|
|
||||||
issues = (
|
issues = (
|
||||||
Issue.issue_objects.filter(issue_module__module_id=module_id)
|
Issue.issue_objects.filter(issue_module__module_id=module_id)
|
||||||
.annotate(
|
.annotate(
|
||||||
sub_issues_count=Issue.issue_objects.filter(parent=OuterRef("id"))
|
sub_issues_count=Issue.issue_objects.filter(
|
||||||
|
parent=OuterRef("id")
|
||||||
|
)
|
||||||
.order_by()
|
.order_by()
|
||||||
.annotate(count=Func(F("id"), function="Count"))
|
.annotate(count=Func(F("id"), function="Count"))
|
||||||
.values("count")
|
.values("count")
|
||||||
@ -345,7 +326,6 @@ class ModuleIssueViewSet(BaseViewSet):
|
|||||||
.prefetch_related("assignees")
|
.prefetch_related("assignees")
|
||||||
.prefetch_related("labels")
|
.prefetch_related("labels")
|
||||||
.order_by(order_by)
|
.order_by(order_by)
|
||||||
.filter(**filters)
|
|
||||||
.annotate(
|
.annotate(
|
||||||
link_count=IssueLink.objects.filter(issue=OuterRef("id"))
|
link_count=IssueLink.objects.filter(issue=OuterRef("id"))
|
||||||
.order_by()
|
.order_by()
|
||||||
@ -353,41 +333,40 @@ class ModuleIssueViewSet(BaseViewSet):
|
|||||||
.values("count")
|
.values("count")
|
||||||
)
|
)
|
||||||
.annotate(
|
.annotate(
|
||||||
attachment_count=IssueAttachment.objects.filter(issue=OuterRef("id"))
|
attachment_count=IssueAttachment.objects.filter(
|
||||||
|
issue=OuterRef("id")
|
||||||
|
)
|
||||||
.order_by()
|
.order_by()
|
||||||
.annotate(count=Func(F("id"), function="Count"))
|
.annotate(count=Func(F("id"), function="Count"))
|
||||||
.values("count")
|
.values("count")
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
issues_data = IssueStateSerializer(issues, many=True).data
|
return self.paginate(
|
||||||
|
request=request,
|
||||||
if sub_group_by and sub_group_by == group_by:
|
queryset=(issues),
|
||||||
return Response(
|
on_results=lambda issues: IssueSerializer(
|
||||||
{"error": "Group by and sub group by cannot be same"},
|
issues,
|
||||||
status=status.HTTP_400_BAD_REQUEST,
|
many=True,
|
||||||
)
|
fields=self.fields,
|
||||||
|
expand=self.expand,
|
||||||
if group_by:
|
).data,
|
||||||
grouped_results = group_results(issues_data, group_by, sub_group_by)
|
|
||||||
return Response(
|
|
||||||
grouped_results,
|
|
||||||
status=status.HTTP_200_OK,
|
|
||||||
)
|
|
||||||
|
|
||||||
return Response(
|
|
||||||
issues_data, status=status.HTTP_200_OK
|
|
||||||
)
|
)
|
||||||
|
|
||||||
def create(self, request, slug, project_id, module_id):
|
def post(self, request, slug, project_id, module_id):
|
||||||
issues = request.data.get("issues", [])
|
issues = request.data.get("issues", [])
|
||||||
if not len(issues):
|
if not len(issues):
|
||||||
return Response(
|
return Response(
|
||||||
{"error": "Issues are required"}, status=status.HTTP_400_BAD_REQUEST
|
{"error": "Issues are required"},
|
||||||
|
status=status.HTTP_400_BAD_REQUEST,
|
||||||
)
|
)
|
||||||
module = Module.objects.get(
|
module = Module.objects.get(
|
||||||
workspace__slug=slug, project_id=project_id, pk=module_id
|
workspace__slug=slug, project_id=project_id, pk=module_id
|
||||||
)
|
)
|
||||||
|
|
||||||
|
issues = Issue.objects.filter(
|
||||||
|
workspace__slug=slug, project_id=project_id, pk__in=issues
|
||||||
|
).values_list("id", flat=True)
|
||||||
|
|
||||||
module_issues = list(ModuleIssue.objects.filter(issue_id__in=issues))
|
module_issues = list(ModuleIssue.objects.filter(issue_id__in=issues))
|
||||||
|
|
||||||
update_module_issue_activity = []
|
update_module_issue_activity = []
|
||||||
@ -439,7 +418,7 @@ class ModuleIssueViewSet(BaseViewSet):
|
|||||||
# Capture Issue Activity
|
# Capture Issue Activity
|
||||||
issue_activity.delay(
|
issue_activity.delay(
|
||||||
type="module.activity.created",
|
type="module.activity.created",
|
||||||
requested_data=json.dumps({"modules_list": issues}),
|
requested_data=json.dumps({"modules_list": str(issues)}),
|
||||||
actor_id=str(self.request.user.id),
|
actor_id=str(self.request.user.id),
|
||||||
issue_id=None,
|
issue_id=None,
|
||||||
project_id=str(self.kwargs.get("project_id", None)),
|
project_id=str(self.kwargs.get("project_id", None)),
|
||||||
@ -459,9 +438,12 @@ class ModuleIssueViewSet(BaseViewSet):
|
|||||||
status=status.HTTP_200_OK,
|
status=status.HTTP_200_OK,
|
||||||
)
|
)
|
||||||
|
|
||||||
def destroy(self, request, slug, project_id, module_id, pk):
|
def delete(self, request, slug, project_id, module_id, issue_id):
|
||||||
module_issue = ModuleIssue.objects.get(
|
module_issue = ModuleIssue.objects.get(
|
||||||
workspace__slug=slug, project_id=project_id, module_id=module_id, pk=pk
|
workspace__slug=slug,
|
||||||
|
project_id=project_id,
|
||||||
|
module_id=module_id,
|
||||||
|
issue_id=issue_id,
|
||||||
)
|
)
|
||||||
module_issue.delete()
|
module_issue.delete()
|
||||||
issue_activity.delay(
|
issue_activity.delay(
|
||||||
@ -473,7 +455,7 @@ class ModuleIssueViewSet(BaseViewSet):
|
|||||||
}
|
}
|
||||||
),
|
),
|
||||||
actor_id=str(request.user.id),
|
actor_id=str(request.user.id),
|
||||||
issue_id=str(pk),
|
issue_id=str(issue_id),
|
||||||
project_id=str(project_id),
|
project_id=str(project_id),
|
||||||
current_instance=None,
|
current_instance=None,
|
||||||
epoch=int(timezone.now().timestamp()),
|
epoch=int(timezone.now().timestamp()),
|
||||||
@ -481,59 +463,121 @@ class ModuleIssueViewSet(BaseViewSet):
|
|||||||
return Response(status=status.HTTP_204_NO_CONTENT)
|
return Response(status=status.HTTP_204_NO_CONTENT)
|
||||||
|
|
||||||
|
|
||||||
class ModuleLinkViewSet(BaseViewSet):
|
class ModuleArchiveUnarchiveAPIEndpoint(BaseAPIView):
|
||||||
|
|
||||||
permission_classes = [
|
permission_classes = [
|
||||||
ProjectEntityPermission,
|
ProjectEntityPermission,
|
||||||
]
|
]
|
||||||
|
|
||||||
model = ModuleLink
|
|
||||||
serializer_class = ModuleLinkSerializer
|
|
||||||
|
|
||||||
def perform_create(self, serializer):
|
|
||||||
serializer.save(
|
|
||||||
project_id=self.kwargs.get("project_id"),
|
|
||||||
module_id=self.kwargs.get("module_id"),
|
|
||||||
)
|
|
||||||
|
|
||||||
def get_queryset(self):
|
def get_queryset(self):
|
||||||
return (
|
return (
|
||||||
super()
|
Module.objects.filter(project_id=self.kwargs.get("project_id"))
|
||||||
.get_queryset()
|
|
||||||
.filter(workspace__slug=self.kwargs.get("slug"))
|
.filter(workspace__slug=self.kwargs.get("slug"))
|
||||||
.filter(project_id=self.kwargs.get("project_id"))
|
.filter(archived_at__isnull=False)
|
||||||
.filter(module_id=self.kwargs.get("module_id"))
|
.select_related("project")
|
||||||
.filter(project__project_projectmember__member=self.request.user)
|
.select_related("workspace")
|
||||||
.order_by("-created_at")
|
.select_related("lead")
|
||||||
.distinct()
|
.prefetch_related("members")
|
||||||
|
.prefetch_related(
|
||||||
|
Prefetch(
|
||||||
|
"link_module",
|
||||||
|
queryset=ModuleLink.objects.select_related(
|
||||||
|
"module", "created_by"
|
||||||
|
),
|
||||||
|
)
|
||||||
|
)
|
||||||
|
.annotate(
|
||||||
|
total_issues=Count(
|
||||||
|
"issue_module",
|
||||||
|
filter=Q(
|
||||||
|
issue_module__issue__archived_at__isnull=True,
|
||||||
|
issue_module__issue__is_draft=False,
|
||||||
|
),
|
||||||
|
distinct=True,
|
||||||
|
),
|
||||||
|
)
|
||||||
|
.annotate(
|
||||||
|
completed_issues=Count(
|
||||||
|
"issue_module__issue__state__group",
|
||||||
|
filter=Q(
|
||||||
|
issue_module__issue__state__group="completed",
|
||||||
|
issue_module__issue__archived_at__isnull=True,
|
||||||
|
issue_module__issue__is_draft=False,
|
||||||
|
),
|
||||||
|
distinct=True,
|
||||||
|
)
|
||||||
|
)
|
||||||
|
.annotate(
|
||||||
|
cancelled_issues=Count(
|
||||||
|
"issue_module__issue__state__group",
|
||||||
|
filter=Q(
|
||||||
|
issue_module__issue__state__group="cancelled",
|
||||||
|
issue_module__issue__archived_at__isnull=True,
|
||||||
|
issue_module__issue__is_draft=False,
|
||||||
|
),
|
||||||
|
distinct=True,
|
||||||
|
)
|
||||||
|
)
|
||||||
|
.annotate(
|
||||||
|
started_issues=Count(
|
||||||
|
"issue_module__issue__state__group",
|
||||||
|
filter=Q(
|
||||||
|
issue_module__issue__state__group="started",
|
||||||
|
issue_module__issue__archived_at__isnull=True,
|
||||||
|
issue_module__issue__is_draft=False,
|
||||||
|
),
|
||||||
|
distinct=True,
|
||||||
|
)
|
||||||
|
)
|
||||||
|
.annotate(
|
||||||
|
unstarted_issues=Count(
|
||||||
|
"issue_module__issue__state__group",
|
||||||
|
filter=Q(
|
||||||
|
issue_module__issue__state__group="unstarted",
|
||||||
|
issue_module__issue__archived_at__isnull=True,
|
||||||
|
issue_module__issue__is_draft=False,
|
||||||
|
),
|
||||||
|
distinct=True,
|
||||||
|
)
|
||||||
|
)
|
||||||
|
.annotate(
|
||||||
|
backlog_issues=Count(
|
||||||
|
"issue_module__issue__state__group",
|
||||||
|
filter=Q(
|
||||||
|
issue_module__issue__state__group="backlog",
|
||||||
|
issue_module__issue__archived_at__isnull=True,
|
||||||
|
issue_module__issue__is_draft=False,
|
||||||
|
),
|
||||||
|
distinct=True,
|
||||||
|
)
|
||||||
|
)
|
||||||
|
.order_by(self.kwargs.get("order_by", "-created_at"))
|
||||||
)
|
)
|
||||||
|
|
||||||
|
def get(self, request, slug, project_id):
|
||||||
class ModuleFavoriteViewSet(BaseViewSet):
|
return self.paginate(
|
||||||
serializer_class = ModuleFavoriteSerializer
|
request=request,
|
||||||
model = ModuleFavorite
|
queryset=(self.get_queryset()),
|
||||||
|
on_results=lambda modules: ModuleSerializer(
|
||||||
def get_queryset(self):
|
modules,
|
||||||
return self.filter_queryset(
|
many=True,
|
||||||
super()
|
fields=self.fields,
|
||||||
.get_queryset()
|
expand=self.expand,
|
||||||
.filter(workspace__slug=self.kwargs.get("slug"))
|
).data,
|
||||||
.filter(user=self.request.user)
|
|
||||||
.select_related("module")
|
|
||||||
)
|
)
|
||||||
|
|
||||||
def create(self, request, slug, project_id):
|
def post(self, request, slug, project_id, pk):
|
||||||
serializer = ModuleFavoriteSerializer(data=request.data)
|
module = Module.objects.get(
|
||||||
if serializer.is_valid():
|
pk=pk, project_id=project_id, workspace__slug=slug
|
||||||
serializer.save(user=request.user, project_id=project_id)
|
|
||||||
return Response(serializer.data, status=status.HTTP_201_CREATED)
|
|
||||||
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
|
|
||||||
|
|
||||||
def destroy(self, request, slug, project_id, module_id):
|
|
||||||
module_favorite = ModuleFavorite.objects.get(
|
|
||||||
project=project_id,
|
|
||||||
user=request.user,
|
|
||||||
workspace__slug=slug,
|
|
||||||
module_id=module_id,
|
|
||||||
)
|
)
|
||||||
module_favorite.delete()
|
module.archived_at = timezone.now()
|
||||||
|
module.save()
|
||||||
|
return Response(status=status.HTTP_204_NO_CONTENT)
|
||||||
|
|
||||||
|
def delete(self, request, slug, project_id, pk):
|
||||||
|
module = Module.objects.get(
|
||||||
|
pk=pk, project_id=project_id, workspace__slug=slug
|
||||||
|
)
|
||||||
|
module.archived_at = None
|
||||||
|
module.save()
|
||||||
return Response(status=status.HTTP_204_NO_CONTENT)
|
return Response(status=status.HTTP_204_NO_CONTENT)
|
||||||
|
@ -1,298 +0,0 @@
|
|||||||
# Python imports
|
|
||||||
import uuid
|
|
||||||
import requests
|
|
||||||
import os
|
|
||||||
|
|
||||||
# Django imports
|
|
||||||
from django.utils import timezone
|
|
||||||
from django.conf import settings
|
|
||||||
|
|
||||||
# Third Party modules
|
|
||||||
from rest_framework.response import Response
|
|
||||||
from rest_framework import exceptions
|
|
||||||
from rest_framework.permissions import AllowAny
|
|
||||||
from rest_framework_simplejwt.tokens import RefreshToken
|
|
||||||
from rest_framework import status
|
|
||||||
from sentry_sdk import capture_exception
|
|
||||||
|
|
||||||
# sso authentication
|
|
||||||
from google.oauth2 import id_token
|
|
||||||
from google.auth.transport import requests as google_auth_request
|
|
||||||
|
|
||||||
# Module imports
|
|
||||||
from plane.db.models import SocialLoginConnection, User
|
|
||||||
from plane.api.serializers import UserSerializer
|
|
||||||
from .base import BaseAPIView
|
|
||||||
|
|
||||||
|
|
||||||
def get_tokens_for_user(user):
|
|
||||||
refresh = RefreshToken.for_user(user)
|
|
||||||
return (
|
|
||||||
str(refresh.access_token),
|
|
||||||
str(refresh),
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def validate_google_token(token, client_id):
|
|
||||||
try:
|
|
||||||
id_info = id_token.verify_oauth2_token(
|
|
||||||
token, google_auth_request.Request(), client_id
|
|
||||||
)
|
|
||||||
email = id_info.get("email")
|
|
||||||
first_name = id_info.get("given_name")
|
|
||||||
last_name = id_info.get("family_name", "")
|
|
||||||
data = {
|
|
||||||
"email": email,
|
|
||||||
"first_name": first_name,
|
|
||||||
"last_name": last_name,
|
|
||||||
}
|
|
||||||
return data
|
|
||||||
except Exception as e:
|
|
||||||
capture_exception(e)
|
|
||||||
raise exceptions.AuthenticationFailed("Error with Google connection.")
|
|
||||||
|
|
||||||
|
|
||||||
def get_access_token(request_token: str, client_id: str) -> str:
|
|
||||||
"""Obtain the request token from github.
|
|
||||||
Given the client id, client secret and request issued out by GitHub, this method
|
|
||||||
should give back an access token
|
|
||||||
Parameters
|
|
||||||
----------
|
|
||||||
CLIENT_ID: str
|
|
||||||
A string representing the client id issued out by github
|
|
||||||
CLIENT_SECRET: str
|
|
||||||
A string representing the client secret issued out by github
|
|
||||||
request_token: str
|
|
||||||
A string representing the request token issued out by github
|
|
||||||
Throws
|
|
||||||
------
|
|
||||||
ValueError:
|
|
||||||
if CLIENT_ID or CLIENT_SECRET or request_token is empty or not a string
|
|
||||||
Returns
|
|
||||||
-------
|
|
||||||
access_token: str
|
|
||||||
A string representing the access token issued out by github
|
|
||||||
"""
|
|
||||||
|
|
||||||
if not request_token:
|
|
||||||
raise ValueError("The request token has to be supplied!")
|
|
||||||
|
|
||||||
CLIENT_SECRET = os.environ.get("GITHUB_CLIENT_SECRET")
|
|
||||||
|
|
||||||
url = f"https://github.com/login/oauth/access_token?client_id={client_id}&client_secret={CLIENT_SECRET}&code={request_token}"
|
|
||||||
headers = {"accept": "application/json"}
|
|
||||||
|
|
||||||
res = requests.post(url, headers=headers)
|
|
||||||
|
|
||||||
data = res.json()
|
|
||||||
access_token = data["access_token"]
|
|
||||||
|
|
||||||
return access_token
|
|
||||||
|
|
||||||
|
|
||||||
def get_user_data(access_token: str) -> dict:
|
|
||||||
"""
|
|
||||||
Obtain the user data from github.
|
|
||||||
Given the access token, this method should give back the user data
|
|
||||||
"""
|
|
||||||
if not access_token:
|
|
||||||
raise ValueError("The request token has to be supplied!")
|
|
||||||
if not isinstance(access_token, str):
|
|
||||||
raise ValueError("The request token has to be a string!")
|
|
||||||
|
|
||||||
access_token = "token " + access_token
|
|
||||||
url = "https://api.github.com/user"
|
|
||||||
headers = {"Authorization": access_token}
|
|
||||||
|
|
||||||
resp = requests.get(url=url, headers=headers)
|
|
||||||
|
|
||||||
user_data = resp.json()
|
|
||||||
|
|
||||||
response = requests.get(
|
|
||||||
url="https://api.github.com/user/emails", headers=headers
|
|
||||||
).json()
|
|
||||||
|
|
||||||
_ = [
|
|
||||||
user_data.update({"email": item.get("email")})
|
|
||||||
for item in response
|
|
||||||
if item.get("primary") is True
|
|
||||||
]
|
|
||||||
|
|
||||||
return user_data
|
|
||||||
|
|
||||||
|
|
||||||
class OauthEndpoint(BaseAPIView):
|
|
||||||
permission_classes = [AllowAny]
|
|
||||||
|
|
||||||
def post(self, request):
|
|
||||||
try:
|
|
||||||
medium = request.data.get("medium", False)
|
|
||||||
id_token = request.data.get("credential", False)
|
|
||||||
client_id = request.data.get("clientId", False)
|
|
||||||
|
|
||||||
if not medium or not id_token:
|
|
||||||
return Response(
|
|
||||||
{
|
|
||||||
"error": "Something went wrong. Please try again later or contact the support team."
|
|
||||||
},
|
|
||||||
status=status.HTTP_400_BAD_REQUEST,
|
|
||||||
)
|
|
||||||
|
|
||||||
if medium == "google":
|
|
||||||
data = validate_google_token(id_token, client_id)
|
|
||||||
|
|
||||||
if medium == "github":
|
|
||||||
access_token = get_access_token(id_token, client_id)
|
|
||||||
data = get_user_data(access_token)
|
|
||||||
|
|
||||||
email = data.get("email", None)
|
|
||||||
if email is None:
|
|
||||||
return Response(
|
|
||||||
{
|
|
||||||
"error": "Something went wrong. Please try again later or contact the support team."
|
|
||||||
},
|
|
||||||
status=status.HTTP_400_BAD_REQUEST,
|
|
||||||
)
|
|
||||||
|
|
||||||
if "@" in email:
|
|
||||||
user = User.objects.get(email=email)
|
|
||||||
email = data["email"]
|
|
||||||
mobile_number = uuid.uuid4().hex
|
|
||||||
email_verified = True
|
|
||||||
else:
|
|
||||||
return Response(
|
|
||||||
{
|
|
||||||
"error": "Something went wrong. Please try again later or contact the support team."
|
|
||||||
},
|
|
||||||
status=status.HTTP_400_BAD_REQUEST,
|
|
||||||
)
|
|
||||||
|
|
||||||
## Login Case
|
|
||||||
|
|
||||||
if not user.is_active:
|
|
||||||
return Response(
|
|
||||||
{
|
|
||||||
"error": "Your account has been deactivated. Please contact your site administrator."
|
|
||||||
},
|
|
||||||
status=status.HTTP_403_FORBIDDEN,
|
|
||||||
)
|
|
||||||
|
|
||||||
user.last_active = timezone.now()
|
|
||||||
user.last_login_time = timezone.now()
|
|
||||||
user.last_login_ip = request.META.get("REMOTE_ADDR")
|
|
||||||
user.last_login_medium = "oauth"
|
|
||||||
user.last_login_uagent = request.META.get("HTTP_USER_AGENT")
|
|
||||||
user.is_email_verified = email_verified
|
|
||||||
user.save()
|
|
||||||
|
|
||||||
access_token, refresh_token = get_tokens_for_user(user)
|
|
||||||
|
|
||||||
data = {
|
|
||||||
"access_token": access_token,
|
|
||||||
"refresh_token": refresh_token,
|
|
||||||
}
|
|
||||||
|
|
||||||
SocialLoginConnection.objects.update_or_create(
|
|
||||||
medium=medium,
|
|
||||||
extra_data={},
|
|
||||||
user=user,
|
|
||||||
defaults={
|
|
||||||
"token_data": {"id_token": id_token},
|
|
||||||
"last_login_at": timezone.now(),
|
|
||||||
},
|
|
||||||
)
|
|
||||||
if settings.ANALYTICS_BASE_API:
|
|
||||||
_ = requests.post(
|
|
||||||
settings.ANALYTICS_BASE_API,
|
|
||||||
headers={
|
|
||||||
"Content-Type": "application/json",
|
|
||||||
"X-Auth-Token": settings.ANALYTICS_SECRET_KEY,
|
|
||||||
},
|
|
||||||
json={
|
|
||||||
"event_id": uuid.uuid4().hex,
|
|
||||||
"event_data": {
|
|
||||||
"medium": f"oauth-{medium}",
|
|
||||||
},
|
|
||||||
"user": {"email": email, "id": str(user.id)},
|
|
||||||
"device_ctx": {
|
|
||||||
"ip": request.META.get("REMOTE_ADDR"),
|
|
||||||
"user_agent": request.META.get("HTTP_USER_AGENT"),
|
|
||||||
},
|
|
||||||
"event_type": "SIGN_IN",
|
|
||||||
},
|
|
||||||
)
|
|
||||||
return Response(data, status=status.HTTP_200_OK)
|
|
||||||
|
|
||||||
except User.DoesNotExist:
|
|
||||||
## Signup Case
|
|
||||||
|
|
||||||
username = uuid.uuid4().hex
|
|
||||||
|
|
||||||
if "@" in email:
|
|
||||||
email = data["email"]
|
|
||||||
mobile_number = uuid.uuid4().hex
|
|
||||||
email_verified = True
|
|
||||||
else:
|
|
||||||
return Response(
|
|
||||||
{
|
|
||||||
"error": "Something went wrong. Please try again later or contact the support team."
|
|
||||||
},
|
|
||||||
status=status.HTTP_400_BAD_REQUEST,
|
|
||||||
)
|
|
||||||
|
|
||||||
user = User(
|
|
||||||
username=username,
|
|
||||||
email=email,
|
|
||||||
mobile_number=mobile_number,
|
|
||||||
first_name=data.get("first_name", ""),
|
|
||||||
last_name=data.get("last_name", ""),
|
|
||||||
is_email_verified=email_verified,
|
|
||||||
is_password_autoset=True,
|
|
||||||
)
|
|
||||||
|
|
||||||
user.set_password(uuid.uuid4().hex)
|
|
||||||
user.is_password_autoset = True
|
|
||||||
user.last_active = timezone.now()
|
|
||||||
user.last_login_time = timezone.now()
|
|
||||||
user.last_login_ip = request.META.get("REMOTE_ADDR")
|
|
||||||
user.last_login_medium = "oauth"
|
|
||||||
user.last_login_uagent = request.META.get("HTTP_USER_AGENT")
|
|
||||||
user.token_updated_at = timezone.now()
|
|
||||||
user.save()
|
|
||||||
|
|
||||||
access_token, refresh_token = get_tokens_for_user(user)
|
|
||||||
data = {
|
|
||||||
"access_token": access_token,
|
|
||||||
"refresh_token": refresh_token,
|
|
||||||
}
|
|
||||||
if settings.ANALYTICS_BASE_API:
|
|
||||||
_ = requests.post(
|
|
||||||
settings.ANALYTICS_BASE_API,
|
|
||||||
headers={
|
|
||||||
"Content-Type": "application/json",
|
|
||||||
"X-Auth-Token": settings.ANALYTICS_SECRET_KEY,
|
|
||||||
},
|
|
||||||
json={
|
|
||||||
"event_id": uuid.uuid4().hex,
|
|
||||||
"event_data": {
|
|
||||||
"medium": f"oauth-{medium}",
|
|
||||||
},
|
|
||||||
"user": {"email": email, "id": str(user.id)},
|
|
||||||
"device_ctx": {
|
|
||||||
"ip": request.META.get("REMOTE_ADDR"),
|
|
||||||
"user_agent": request.META.get("HTTP_USER_AGENT"),
|
|
||||||
},
|
|
||||||
"event_type": "SIGN_UP",
|
|
||||||
},
|
|
||||||
)
|
|
||||||
|
|
||||||
SocialLoginConnection.objects.update_or_create(
|
|
||||||
medium=medium,
|
|
||||||
extra_data={},
|
|
||||||
user=user,
|
|
||||||
defaults={
|
|
||||||
"token_data": {"id_token": id_token},
|
|
||||||
"last_login_at": timezone.now(),
|
|
||||||
},
|
|
||||||
)
|
|
||||||
return Response(data, status=status.HTTP_201_CREATED)
|
|
@ -1,255 +0,0 @@
|
|||||||
# Python imports
|
|
||||||
from datetime import timedelta, date
|
|
||||||
|
|
||||||
# Django imports
|
|
||||||
from django.db.models import Exists, OuterRef, Q, Prefetch
|
|
||||||
from django.utils import timezone
|
|
||||||
|
|
||||||
# Third party imports
|
|
||||||
from rest_framework import status
|
|
||||||
from rest_framework.response import Response
|
|
||||||
from sentry_sdk import capture_exception
|
|
||||||
|
|
||||||
# Module imports
|
|
||||||
from .base import BaseViewSet, BaseAPIView
|
|
||||||
from plane.api.permissions import ProjectEntityPermission
|
|
||||||
from plane.db.models import (
|
|
||||||
Page,
|
|
||||||
PageBlock,
|
|
||||||
PageFavorite,
|
|
||||||
Issue,
|
|
||||||
IssueAssignee,
|
|
||||||
IssueActivity,
|
|
||||||
)
|
|
||||||
from plane.api.serializers import (
|
|
||||||
PageSerializer,
|
|
||||||
PageBlockSerializer,
|
|
||||||
PageFavoriteSerializer,
|
|
||||||
IssueLiteSerializer,
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
class PageViewSet(BaseViewSet):
|
|
||||||
serializer_class = PageSerializer
|
|
||||||
model = Page
|
|
||||||
permission_classes = [
|
|
||||||
ProjectEntityPermission,
|
|
||||||
]
|
|
||||||
search_fields = [
|
|
||||||
"name",
|
|
||||||
]
|
|
||||||
|
|
||||||
def get_queryset(self):
|
|
||||||
subquery = PageFavorite.objects.filter(
|
|
||||||
user=self.request.user,
|
|
||||||
page_id=OuterRef("pk"),
|
|
||||||
project_id=self.kwargs.get("project_id"),
|
|
||||||
workspace__slug=self.kwargs.get("slug"),
|
|
||||||
)
|
|
||||||
return self.filter_queryset(
|
|
||||||
super()
|
|
||||||
.get_queryset()
|
|
||||||
.filter(workspace__slug=self.kwargs.get("slug"))
|
|
||||||
.filter(project_id=self.kwargs.get("project_id"))
|
|
||||||
.filter(project__project_projectmember__member=self.request.user)
|
|
||||||
.filter(Q(owned_by=self.request.user) | Q(access=0))
|
|
||||||
.select_related("project")
|
|
||||||
.select_related("workspace")
|
|
||||||
.select_related("owned_by")
|
|
||||||
.annotate(is_favorite=Exists(subquery))
|
|
||||||
.order_by(self.request.GET.get("order_by", "-created_at"))
|
|
||||||
.prefetch_related("labels")
|
|
||||||
.order_by("name", "-is_favorite")
|
|
||||||
.prefetch_related(
|
|
||||||
Prefetch(
|
|
||||||
"blocks",
|
|
||||||
queryset=PageBlock.objects.select_related(
|
|
||||||
"page", "issue", "workspace", "project"
|
|
||||||
),
|
|
||||||
)
|
|
||||||
)
|
|
||||||
.distinct()
|
|
||||||
)
|
|
||||||
|
|
||||||
def perform_create(self, serializer):
|
|
||||||
serializer.save(
|
|
||||||
project_id=self.kwargs.get("project_id"), owned_by=self.request.user
|
|
||||||
)
|
|
||||||
|
|
||||||
def create(self, request, slug, project_id):
|
|
||||||
serializer = PageSerializer(
|
|
||||||
data=request.data,
|
|
||||||
context={"project_id": project_id, "owned_by_id": request.user.id},
|
|
||||||
)
|
|
||||||
|
|
||||||
if serializer.is_valid():
|
|
||||||
serializer.save()
|
|
||||||
return Response(serializer.data, status=status.HTTP_201_CREATED)
|
|
||||||
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
|
|
||||||
|
|
||||||
def partial_update(self, request, slug, project_id, pk):
|
|
||||||
page = Page.objects.get(pk=pk, workspace__slug=slug, project_id=project_id)
|
|
||||||
# Only update access if the page owner is the requesting user
|
|
||||||
if (
|
|
||||||
page.access != request.data.get("access", page.access)
|
|
||||||
and page.owned_by_id != request.user.id
|
|
||||||
):
|
|
||||||
return Response(
|
|
||||||
{
|
|
||||||
"error": "Access cannot be updated since this page is owned by someone else"
|
|
||||||
},
|
|
||||||
status=status.HTTP_400_BAD_REQUEST,
|
|
||||||
)
|
|
||||||
serializer = PageSerializer(page, data=request.data, partial=True)
|
|
||||||
if serializer.is_valid():
|
|
||||||
serializer.save()
|
|
||||||
return Response(serializer.data, status=status.HTTP_200_OK)
|
|
||||||
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
|
|
||||||
|
|
||||||
def list(self, request, slug, project_id):
|
|
||||||
queryset = self.get_queryset()
|
|
||||||
page_view = request.GET.get("page_view", False)
|
|
||||||
|
|
||||||
if not page_view:
|
|
||||||
return Response({"error": "Page View parameter is required"}, status=status.HTTP_400_BAD_REQUEST)
|
|
||||||
|
|
||||||
# All Pages
|
|
||||||
if page_view == "all":
|
|
||||||
return Response(PageSerializer(queryset, many=True).data, status=status.HTTP_200_OK)
|
|
||||||
|
|
||||||
# Recent pages
|
|
||||||
if page_view == "recent":
|
|
||||||
current_time = date.today()
|
|
||||||
day_before = current_time - timedelta(days=1)
|
|
||||||
todays_pages = queryset.filter(updated_at__date=date.today())
|
|
||||||
yesterdays_pages = queryset.filter(updated_at__date=day_before)
|
|
||||||
earlier_this_week = queryset.filter( updated_at__date__range=(
|
|
||||||
(timezone.now() - timedelta(days=7)),
|
|
||||||
(timezone.now() - timedelta(days=2)),
|
|
||||||
))
|
|
||||||
return Response(
|
|
||||||
{
|
|
||||||
"today": PageSerializer(todays_pages, many=True).data,
|
|
||||||
"yesterday": PageSerializer(yesterdays_pages, many=True).data,
|
|
||||||
"earlier_this_week": PageSerializer(earlier_this_week, many=True).data,
|
|
||||||
},
|
|
||||||
status=status.HTTP_200_OK,
|
|
||||||
)
|
|
||||||
|
|
||||||
# Favorite Pages
|
|
||||||
if page_view == "favorite":
|
|
||||||
queryset = queryset.filter(is_favorite=True)
|
|
||||||
return Response(PageSerializer(queryset, many=True).data, status=status.HTTP_200_OK)
|
|
||||||
|
|
||||||
# My pages
|
|
||||||
if page_view == "created_by_me":
|
|
||||||
queryset = queryset.filter(owned_by=request.user)
|
|
||||||
return Response(PageSerializer(queryset, many=True).data, status=status.HTTP_200_OK)
|
|
||||||
|
|
||||||
# Created by other Pages
|
|
||||||
if page_view == "created_by_other":
|
|
||||||
queryset = queryset.filter(~Q(owned_by=request.user), access=0)
|
|
||||||
return Response(PageSerializer(queryset, many=True).data, status=status.HTTP_200_OK)
|
|
||||||
|
|
||||||
return Response({"error": "No matching view found"}, status=status.HTTP_400_BAD_REQUEST)
|
|
||||||
|
|
||||||
|
|
||||||
class PageBlockViewSet(BaseViewSet):
|
|
||||||
serializer_class = PageBlockSerializer
|
|
||||||
model = PageBlock
|
|
||||||
permission_classes = [
|
|
||||||
ProjectEntityPermission,
|
|
||||||
]
|
|
||||||
|
|
||||||
def get_queryset(self):
|
|
||||||
return self.filter_queryset(
|
|
||||||
super()
|
|
||||||
.get_queryset()
|
|
||||||
.filter(workspace__slug=self.kwargs.get("slug"))
|
|
||||||
.filter(project_id=self.kwargs.get("project_id"))
|
|
||||||
.filter(page_id=self.kwargs.get("page_id"))
|
|
||||||
.filter(project__project_projectmember__member=self.request.user)
|
|
||||||
.select_related("project")
|
|
||||||
.select_related("workspace")
|
|
||||||
.select_related("page")
|
|
||||||
.select_related("issue")
|
|
||||||
.order_by("sort_order")
|
|
||||||
.distinct()
|
|
||||||
)
|
|
||||||
|
|
||||||
def perform_create(self, serializer):
|
|
||||||
serializer.save(
|
|
||||||
project_id=self.kwargs.get("project_id"),
|
|
||||||
page_id=self.kwargs.get("page_id"),
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
class PageFavoriteViewSet(BaseViewSet):
|
|
||||||
permission_classes = [
|
|
||||||
ProjectEntityPermission,
|
|
||||||
]
|
|
||||||
|
|
||||||
serializer_class = PageFavoriteSerializer
|
|
||||||
model = PageFavorite
|
|
||||||
|
|
||||||
def get_queryset(self):
|
|
||||||
return self.filter_queryset(
|
|
||||||
super()
|
|
||||||
.get_queryset()
|
|
||||||
.filter(workspace__slug=self.kwargs.get("slug"))
|
|
||||||
.filter(user=self.request.user)
|
|
||||||
.select_related("page", "page__owned_by")
|
|
||||||
)
|
|
||||||
|
|
||||||
def create(self, request, slug, project_id):
|
|
||||||
serializer = PageFavoriteSerializer(data=request.data)
|
|
||||||
if serializer.is_valid():
|
|
||||||
serializer.save(user=request.user, project_id=project_id)
|
|
||||||
return Response(serializer.data, status=status.HTTP_201_CREATED)
|
|
||||||
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
|
|
||||||
|
|
||||||
def destroy(self, request, slug, project_id, page_id):
|
|
||||||
page_favorite = PageFavorite.objects.get(
|
|
||||||
project=project_id,
|
|
||||||
user=request.user,
|
|
||||||
workspace__slug=slug,
|
|
||||||
page_id=page_id,
|
|
||||||
)
|
|
||||||
page_favorite.delete()
|
|
||||||
return Response(status=status.HTTP_204_NO_CONTENT)
|
|
||||||
|
|
||||||
class CreateIssueFromPageBlockEndpoint(BaseAPIView):
|
|
||||||
permission_classes = [
|
|
||||||
ProjectEntityPermission,
|
|
||||||
]
|
|
||||||
|
|
||||||
def post(self, request, slug, project_id, page_id, page_block_id):
|
|
||||||
page_block = PageBlock.objects.get(
|
|
||||||
pk=page_block_id,
|
|
||||||
workspace__slug=slug,
|
|
||||||
project_id=project_id,
|
|
||||||
page_id=page_id,
|
|
||||||
)
|
|
||||||
issue = Issue.objects.create(
|
|
||||||
name=page_block.name,
|
|
||||||
project_id=project_id,
|
|
||||||
description=page_block.description,
|
|
||||||
description_html=page_block.description_html,
|
|
||||||
description_stripped=page_block.description_stripped,
|
|
||||||
)
|
|
||||||
_ = IssueAssignee.objects.create(
|
|
||||||
issue=issue, assignee=request.user, project_id=project_id
|
|
||||||
)
|
|
||||||
|
|
||||||
_ = IssueActivity.objects.create(
|
|
||||||
issue=issue,
|
|
||||||
actor=request.user,
|
|
||||||
project_id=project_id,
|
|
||||||
comment=f"created the issue from {page_block.name} block",
|
|
||||||
verb="created",
|
|
||||||
)
|
|
||||||
|
|
||||||
page_block.issue = issue
|
|
||||||
page_block.save()
|
|
||||||
|
|
||||||
return Response(IssueLiteSerializer(issue).data, status=status.HTTP_200_OK)
|
|
File diff suppressed because it is too large
Load Diff
@ -1,93 +1,161 @@
|
|||||||
# Python imports
|
|
||||||
from itertools import groupby
|
|
||||||
|
|
||||||
# Django imports
|
# Django imports
|
||||||
from django.db.models import Q
|
from django.db import IntegrityError
|
||||||
|
|
||||||
# Third party imports
|
# Third party imports
|
||||||
from rest_framework.response import Response
|
|
||||||
from rest_framework import status
|
from rest_framework import status
|
||||||
from sentry_sdk import capture_exception
|
from rest_framework.response import Response
|
||||||
|
|
||||||
|
from plane.api.serializers import StateSerializer
|
||||||
|
from plane.app.permissions import ProjectEntityPermission
|
||||||
|
from plane.db.models import Issue, State
|
||||||
|
|
||||||
# Module imports
|
# Module imports
|
||||||
from . import BaseViewSet, BaseAPIView
|
from .base import BaseAPIView
|
||||||
from plane.api.serializers import StateSerializer
|
|
||||||
from plane.api.permissions import ProjectEntityPermission
|
|
||||||
from plane.db.models import State, Issue
|
|
||||||
|
|
||||||
|
|
||||||
class StateViewSet(BaseViewSet):
|
class StateAPIEndpoint(BaseAPIView):
|
||||||
serializer_class = StateSerializer
|
serializer_class = StateSerializer
|
||||||
model = State
|
model = State
|
||||||
permission_classes = [
|
permission_classes = [
|
||||||
ProjectEntityPermission,
|
ProjectEntityPermission,
|
||||||
]
|
]
|
||||||
|
|
||||||
def perform_create(self, serializer):
|
|
||||||
serializer.save(project_id=self.kwargs.get("project_id"))
|
|
||||||
|
|
||||||
def get_queryset(self):
|
def get_queryset(self):
|
||||||
return self.filter_queryset(
|
return (
|
||||||
super()
|
State.objects.filter(workspace__slug=self.kwargs.get("slug"))
|
||||||
.get_queryset()
|
|
||||||
.filter(workspace__slug=self.kwargs.get("slug"))
|
|
||||||
.filter(project_id=self.kwargs.get("project_id"))
|
.filter(project_id=self.kwargs.get("project_id"))
|
||||||
.filter(project__project_projectmember__member=self.request.user)
|
.filter(
|
||||||
.filter(~Q(name="Triage"))
|
project__project_projectmember__member=self.request.user,
|
||||||
|
project__project_projectmember__is_active=True,
|
||||||
|
)
|
||||||
|
.filter(is_triage=False)
|
||||||
|
.filter(project__archived_at__isnull=True)
|
||||||
.select_related("project")
|
.select_related("project")
|
||||||
.select_related("workspace")
|
.select_related("workspace")
|
||||||
.distinct()
|
.distinct()
|
||||||
)
|
)
|
||||||
|
|
||||||
def create(self, request, slug, project_id):
|
def post(self, request, slug, project_id):
|
||||||
serializer = StateSerializer(data=request.data)
|
try:
|
||||||
if serializer.is_valid():
|
serializer = StateSerializer(
|
||||||
serializer.save(project_id=project_id)
|
data=request.data, context={"project_id": project_id}
|
||||||
|
)
|
||||||
|
if serializer.is_valid():
|
||||||
|
if (
|
||||||
|
request.data.get("external_id")
|
||||||
|
and request.data.get("external_source")
|
||||||
|
and State.objects.filter(
|
||||||
|
project_id=project_id,
|
||||||
|
workspace__slug=slug,
|
||||||
|
external_source=request.data.get("external_source"),
|
||||||
|
external_id=request.data.get("external_id"),
|
||||||
|
).exists()
|
||||||
|
):
|
||||||
|
state = State.objects.filter(
|
||||||
|
workspace__slug=slug,
|
||||||
|
project_id=project_id,
|
||||||
|
external_id=request.data.get("external_id"),
|
||||||
|
external_source=request.data.get("external_source"),
|
||||||
|
).first()
|
||||||
|
return Response(
|
||||||
|
{
|
||||||
|
"error": "State with the same external id and external source already exists",
|
||||||
|
"id": str(state.id),
|
||||||
|
},
|
||||||
|
status=status.HTTP_409_CONFLICT,
|
||||||
|
)
|
||||||
|
|
||||||
|
serializer.save(project_id=project_id)
|
||||||
|
return Response(serializer.data, status=status.HTTP_200_OK)
|
||||||
|
return Response(
|
||||||
|
serializer.errors, status=status.HTTP_400_BAD_REQUEST
|
||||||
|
)
|
||||||
|
except IntegrityError:
|
||||||
|
state = State.objects.filter(
|
||||||
|
workspace__slug=slug,
|
||||||
|
project_id=project_id,
|
||||||
|
name=request.data.get("name"),
|
||||||
|
).first()
|
||||||
|
return Response(
|
||||||
|
{
|
||||||
|
"error": "State with the same name already exists in the project",
|
||||||
|
"id": str(state.id),
|
||||||
|
},
|
||||||
|
status=status.HTTP_409_CONFLICT,
|
||||||
|
)
|
||||||
|
|
||||||
|
def get(self, request, slug, project_id, state_id=None):
|
||||||
|
if state_id:
|
||||||
|
serializer = StateSerializer(
|
||||||
|
self.get_queryset().get(pk=state_id),
|
||||||
|
fields=self.fields,
|
||||||
|
expand=self.expand,
|
||||||
|
)
|
||||||
return Response(serializer.data, status=status.HTTP_200_OK)
|
return Response(serializer.data, status=status.HTTP_200_OK)
|
||||||
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
|
return self.paginate(
|
||||||
|
request=request,
|
||||||
|
queryset=(self.get_queryset()),
|
||||||
|
on_results=lambda states: StateSerializer(
|
||||||
|
states,
|
||||||
|
many=True,
|
||||||
|
fields=self.fields,
|
||||||
|
expand=self.expand,
|
||||||
|
).data,
|
||||||
|
)
|
||||||
|
|
||||||
def list(self, request, slug, project_id):
|
def delete(self, request, slug, project_id, state_id):
|
||||||
states = StateSerializer(self.get_queryset(), many=True).data
|
|
||||||
grouped = request.GET.get("grouped", False)
|
|
||||||
if grouped == "true":
|
|
||||||
state_dict = {}
|
|
||||||
for key, value in groupby(
|
|
||||||
sorted(states, key=lambda state: state["group"]),
|
|
||||||
lambda state: state.get("group"),
|
|
||||||
):
|
|
||||||
state_dict[str(key)] = list(value)
|
|
||||||
return Response(state_dict, status=status.HTTP_200_OK)
|
|
||||||
return Response(states, status=status.HTTP_200_OK)
|
|
||||||
|
|
||||||
def mark_as_default(self, request, slug, project_id, pk):
|
|
||||||
# Select all the states which are marked as default
|
|
||||||
_ = State.objects.filter(
|
|
||||||
workspace__slug=slug, project_id=project_id, default=True
|
|
||||||
).update(default=False)
|
|
||||||
_ = State.objects.filter(
|
|
||||||
workspace__slug=slug, project_id=project_id, pk=pk
|
|
||||||
).update(default=True)
|
|
||||||
return Response(status=status.HTTP_204_NO_CONTENT)
|
|
||||||
|
|
||||||
def destroy(self, request, slug, project_id, pk):
|
|
||||||
state = State.objects.get(
|
state = State.objects.get(
|
||||||
~Q(name="Triage"),
|
is_triage=False,
|
||||||
pk=pk,
|
pk=state_id,
|
||||||
project_id=project_id,
|
project_id=project_id,
|
||||||
workspace__slug=slug,
|
workspace__slug=slug,
|
||||||
)
|
)
|
||||||
|
|
||||||
if state.default:
|
if state.default:
|
||||||
return Response({"error": "Default state cannot be deleted"}, status=False)
|
return Response(
|
||||||
|
{"error": "Default state cannot be deleted"},
|
||||||
|
status=status.HTTP_400_BAD_REQUEST,
|
||||||
|
)
|
||||||
|
|
||||||
# Check for any issues in the state
|
# Check for any issues in the state
|
||||||
issue_exist = Issue.issue_objects.filter(state=pk).exists()
|
issue_exist = Issue.issue_objects.filter(state=state_id).exists()
|
||||||
|
|
||||||
if issue_exist:
|
if issue_exist:
|
||||||
return Response(
|
return Response(
|
||||||
{"error": "The state is not empty, only empty states can be deleted"},
|
{
|
||||||
|
"error": "The state is not empty, only empty states can be deleted"
|
||||||
|
},
|
||||||
status=status.HTTP_400_BAD_REQUEST,
|
status=status.HTTP_400_BAD_REQUEST,
|
||||||
)
|
)
|
||||||
|
|
||||||
state.delete()
|
state.delete()
|
||||||
return Response(status=status.HTTP_204_NO_CONTENT)
|
return Response(status=status.HTTP_204_NO_CONTENT)
|
||||||
|
|
||||||
|
def patch(self, request, slug, project_id, state_id=None):
|
||||||
|
state = State.objects.get(
|
||||||
|
workspace__slug=slug, project_id=project_id, pk=state_id
|
||||||
|
)
|
||||||
|
serializer = StateSerializer(state, data=request.data, partial=True)
|
||||||
|
if serializer.is_valid():
|
||||||
|
if (
|
||||||
|
str(request.data.get("external_id"))
|
||||||
|
and (state.external_id != str(request.data.get("external_id")))
|
||||||
|
and State.objects.filter(
|
||||||
|
project_id=project_id,
|
||||||
|
workspace__slug=slug,
|
||||||
|
external_source=request.data.get(
|
||||||
|
"external_source", state.external_source
|
||||||
|
),
|
||||||
|
external_id=request.data.get("external_id"),
|
||||||
|
).exists()
|
||||||
|
):
|
||||||
|
return Response(
|
||||||
|
{
|
||||||
|
"error": "State with the same external id and external source already exists",
|
||||||
|
"id": str(state.id),
|
||||||
|
},
|
||||||
|
status=status.HTTP_409_CONFLICT,
|
||||||
|
)
|
||||||
|
serializer.save()
|
||||||
|
return Response(serializer.data, status=status.HTTP_200_OK)
|
||||||
|
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
|
||||||
|
@ -1,73 +0,0 @@
|
|||||||
# Third party imports
|
|
||||||
from rest_framework.response import Response
|
|
||||||
from rest_framework import status
|
|
||||||
|
|
||||||
from sentry_sdk import capture_exception
|
|
||||||
|
|
||||||
# Module imports
|
|
||||||
from plane.api.serializers import (
|
|
||||||
UserSerializer,
|
|
||||||
IssueActivitySerializer,
|
|
||||||
UserMeSerializer,
|
|
||||||
UserMeSettingsSerializer,
|
|
||||||
)
|
|
||||||
|
|
||||||
from plane.api.views.base import BaseViewSet, BaseAPIView
|
|
||||||
from plane.db.models import (
|
|
||||||
User,
|
|
||||||
Workspace,
|
|
||||||
WorkspaceMemberInvite,
|
|
||||||
Issue,
|
|
||||||
IssueActivity,
|
|
||||||
)
|
|
||||||
from plane.utils.paginator import BasePaginator
|
|
||||||
|
|
||||||
|
|
||||||
class UserEndpoint(BaseViewSet):
|
|
||||||
serializer_class = UserSerializer
|
|
||||||
model = User
|
|
||||||
|
|
||||||
def get_object(self):
|
|
||||||
return self.request.user
|
|
||||||
|
|
||||||
def retrieve(self, request):
|
|
||||||
serialized_data = UserMeSerializer(request.user).data
|
|
||||||
return Response(
|
|
||||||
serialized_data,
|
|
||||||
status=status.HTTP_200_OK,
|
|
||||||
)
|
|
||||||
|
|
||||||
def retrieve_user_settings(self, request):
|
|
||||||
serialized_data = UserMeSettingsSerializer(request.user).data
|
|
||||||
return Response(serialized_data, status=status.HTTP_200_OK)
|
|
||||||
|
|
||||||
|
|
||||||
class UpdateUserOnBoardedEndpoint(BaseAPIView):
|
|
||||||
def patch(self, request):
|
|
||||||
user = User.objects.get(pk=request.user.id)
|
|
||||||
user.is_onboarded = request.data.get("is_onboarded", False)
|
|
||||||
user.save()
|
|
||||||
return Response({"message": "Updated successfully"}, status=status.HTTP_200_OK)
|
|
||||||
|
|
||||||
|
|
||||||
class UpdateUserTourCompletedEndpoint(BaseAPIView):
|
|
||||||
def patch(self, request):
|
|
||||||
user = User.objects.get(pk=request.user.id)
|
|
||||||
user.is_tour_completed = request.data.get("is_tour_completed", False)
|
|
||||||
user.save()
|
|
||||||
return Response({"message": "Updated successfully"}, status=status.HTTP_200_OK)
|
|
||||||
|
|
||||||
|
|
||||||
class UserActivityEndpoint(BaseAPIView, BasePaginator):
|
|
||||||
def get(self, request, slug):
|
|
||||||
queryset = IssueActivity.objects.filter(
|
|
||||||
actor=request.user, workspace__slug=slug
|
|
||||||
).select_related("actor", "workspace", "issue", "project")
|
|
||||||
|
|
||||||
return self.paginate(
|
|
||||||
request=request,
|
|
||||||
queryset=queryset,
|
|
||||||
on_results=lambda issue_activities: IssueActivitySerializer(
|
|
||||||
issue_activities, many=True
|
|
||||||
).data,
|
|
||||||
)
|
|
File diff suppressed because it is too large
Load Diff
0
apiserver/plane/app/__init__.py
Normal file
0
apiserver/plane/app/__init__.py
Normal file
5
apiserver/plane/app/apps.py
Normal file
5
apiserver/plane/app/apps.py
Normal file
@ -0,0 +1,5 @@
|
|||||||
|
from django.apps import AppConfig
|
||||||
|
|
||||||
|
|
||||||
|
class AppApiConfig(AppConfig):
|
||||||
|
name = "plane.app"
|
0
apiserver/plane/app/middleware/__init__.py
Normal file
0
apiserver/plane/app/middleware/__init__.py
Normal file
50
apiserver/plane/app/middleware/api_authentication.py
Normal file
50
apiserver/plane/app/middleware/api_authentication.py
Normal file
@ -0,0 +1,50 @@
|
|||||||
|
# Django imports
|
||||||
|
from django.utils import timezone
|
||||||
|
from django.db.models import Q
|
||||||
|
|
||||||
|
# Third party imports
|
||||||
|
from rest_framework import authentication
|
||||||
|
from rest_framework.exceptions import AuthenticationFailed
|
||||||
|
|
||||||
|
# Module imports
|
||||||
|
from plane.db.models import APIToken
|
||||||
|
|
||||||
|
|
||||||
|
class APIKeyAuthentication(authentication.BaseAuthentication):
|
||||||
|
"""
|
||||||
|
Authentication with an API Key
|
||||||
|
"""
|
||||||
|
|
||||||
|
www_authenticate_realm = "api"
|
||||||
|
media_type = "application/json"
|
||||||
|
auth_header_name = "X-Api-Key"
|
||||||
|
|
||||||
|
def get_api_token(self, request):
|
||||||
|
return request.headers.get(self.auth_header_name)
|
||||||
|
|
||||||
|
def validate_api_token(self, token):
|
||||||
|
try:
|
||||||
|
api_token = APIToken.objects.get(
|
||||||
|
Q(
|
||||||
|
Q(expired_at__gt=timezone.now())
|
||||||
|
| Q(expired_at__isnull=True)
|
||||||
|
),
|
||||||
|
token=token,
|
||||||
|
is_active=True,
|
||||||
|
)
|
||||||
|
except APIToken.DoesNotExist:
|
||||||
|
raise AuthenticationFailed("Given API token is not valid")
|
||||||
|
|
||||||
|
# save api token last used
|
||||||
|
api_token.last_used = timezone.now()
|
||||||
|
api_token.save(update_fields=["last_used"])
|
||||||
|
return (api_token.user, api_token.token)
|
||||||
|
|
||||||
|
def authenticate(self, request):
|
||||||
|
token = self.get_api_token(request=request)
|
||||||
|
if not token:
|
||||||
|
return None
|
||||||
|
|
||||||
|
# Validate the API token
|
||||||
|
user, token = self.validate_api_token(token)
|
||||||
|
return user, token
|
14
apiserver/plane/app/permissions/__init__.py
Normal file
14
apiserver/plane/app/permissions/__init__.py
Normal file
@ -0,0 +1,14 @@
|
|||||||
|
from .workspace import (
|
||||||
|
WorkSpaceBasePermission,
|
||||||
|
WorkspaceOwnerPermission,
|
||||||
|
WorkSpaceAdminPermission,
|
||||||
|
WorkspaceEntityPermission,
|
||||||
|
WorkspaceViewerPermission,
|
||||||
|
WorkspaceUserPermission,
|
||||||
|
)
|
||||||
|
from .project import (
|
||||||
|
ProjectBasePermission,
|
||||||
|
ProjectEntityPermission,
|
||||||
|
ProjectMemberPermission,
|
||||||
|
ProjectLitePermission,
|
||||||
|
)
|
@ -1,8 +1,8 @@
|
|||||||
# Third Party imports
|
# Third Party imports
|
||||||
from rest_framework.permissions import BasePermission, SAFE_METHODS
|
from rest_framework.permissions import SAFE_METHODS, BasePermission
|
||||||
|
|
||||||
# Module import
|
# Module import
|
||||||
from plane.db.models import WorkspaceMember, ProjectMember
|
from plane.db.models import ProjectMember, WorkspaceMember
|
||||||
|
|
||||||
# Permission Mappings
|
# Permission Mappings
|
||||||
Admin = 20
|
Admin = 20
|
||||||
@ -13,14 +13,15 @@ Guest = 5
|
|||||||
|
|
||||||
class ProjectBasePermission(BasePermission):
|
class ProjectBasePermission(BasePermission):
|
||||||
def has_permission(self, request, view):
|
def has_permission(self, request, view):
|
||||||
|
|
||||||
if request.user.is_anonymous:
|
if request.user.is_anonymous:
|
||||||
return False
|
return False
|
||||||
|
|
||||||
## Safe Methods -> Handle the filtering logic in queryset
|
## Safe Methods -> Handle the filtering logic in queryset
|
||||||
if request.method in SAFE_METHODS:
|
if request.method in SAFE_METHODS:
|
||||||
return WorkspaceMember.objects.filter(
|
return WorkspaceMember.objects.filter(
|
||||||
workspace__slug=view.workspace_slug, member=request.user
|
workspace__slug=view.workspace_slug,
|
||||||
|
member=request.user,
|
||||||
|
is_active=True,
|
||||||
).exists()
|
).exists()
|
||||||
|
|
||||||
## Only workspace owners or admins can create the projects
|
## Only workspace owners or admins can create the projects
|
||||||
@ -29,6 +30,7 @@ class ProjectBasePermission(BasePermission):
|
|||||||
workspace__slug=view.workspace_slug,
|
workspace__slug=view.workspace_slug,
|
||||||
member=request.user,
|
member=request.user,
|
||||||
role__in=[Admin, Member],
|
role__in=[Admin, Member],
|
||||||
|
is_active=True,
|
||||||
).exists()
|
).exists()
|
||||||
|
|
||||||
## Only Project Admins can update project attributes
|
## Only Project Admins can update project attributes
|
||||||
@ -37,19 +39,21 @@ class ProjectBasePermission(BasePermission):
|
|||||||
member=request.user,
|
member=request.user,
|
||||||
role=Admin,
|
role=Admin,
|
||||||
project_id=view.project_id,
|
project_id=view.project_id,
|
||||||
|
is_active=True,
|
||||||
).exists()
|
).exists()
|
||||||
|
|
||||||
|
|
||||||
class ProjectMemberPermission(BasePermission):
|
class ProjectMemberPermission(BasePermission):
|
||||||
def has_permission(self, request, view):
|
def has_permission(self, request, view):
|
||||||
|
|
||||||
if request.user.is_anonymous:
|
if request.user.is_anonymous:
|
||||||
return False
|
return False
|
||||||
|
|
||||||
## Safe Methods -> Handle the filtering logic in queryset
|
## Safe Methods -> Handle the filtering logic in queryset
|
||||||
if request.method in SAFE_METHODS:
|
if request.method in SAFE_METHODS:
|
||||||
return ProjectMember.objects.filter(
|
return ProjectMember.objects.filter(
|
||||||
workspace__slug=view.workspace_slug, member=request.user
|
workspace__slug=view.workspace_slug,
|
||||||
|
member=request.user,
|
||||||
|
is_active=True,
|
||||||
).exists()
|
).exists()
|
||||||
## Only workspace owners or admins can create the projects
|
## Only workspace owners or admins can create the projects
|
||||||
if request.method == "POST":
|
if request.method == "POST":
|
||||||
@ -57,6 +61,7 @@ class ProjectMemberPermission(BasePermission):
|
|||||||
workspace__slug=view.workspace_slug,
|
workspace__slug=view.workspace_slug,
|
||||||
member=request.user,
|
member=request.user,
|
||||||
role__in=[Admin, Member],
|
role__in=[Admin, Member],
|
||||||
|
is_active=True,
|
||||||
).exists()
|
).exists()
|
||||||
|
|
||||||
## Only Project Admins can update project attributes
|
## Only Project Admins can update project attributes
|
||||||
@ -65,12 +70,12 @@ class ProjectMemberPermission(BasePermission):
|
|||||||
member=request.user,
|
member=request.user,
|
||||||
role__in=[Admin, Member],
|
role__in=[Admin, Member],
|
||||||
project_id=view.project_id,
|
project_id=view.project_id,
|
||||||
|
is_active=True,
|
||||||
).exists()
|
).exists()
|
||||||
|
|
||||||
|
|
||||||
class ProjectEntityPermission(BasePermission):
|
class ProjectEntityPermission(BasePermission):
|
||||||
def has_permission(self, request, view):
|
def has_permission(self, request, view):
|
||||||
|
|
||||||
if request.user.is_anonymous:
|
if request.user.is_anonymous:
|
||||||
return False
|
return False
|
||||||
|
|
||||||
@ -80,6 +85,7 @@ class ProjectEntityPermission(BasePermission):
|
|||||||
workspace__slug=view.workspace_slug,
|
workspace__slug=view.workspace_slug,
|
||||||
member=request.user,
|
member=request.user,
|
||||||
project_id=view.project_id,
|
project_id=view.project_id,
|
||||||
|
is_active=True,
|
||||||
).exists()
|
).exists()
|
||||||
|
|
||||||
## Only project members or admins can create and edit the project attributes
|
## Only project members or admins can create and edit the project attributes
|
||||||
@ -88,11 +94,11 @@ class ProjectEntityPermission(BasePermission):
|
|||||||
member=request.user,
|
member=request.user,
|
||||||
role__in=[Admin, Member],
|
role__in=[Admin, Member],
|
||||||
project_id=view.project_id,
|
project_id=view.project_id,
|
||||||
|
is_active=True,
|
||||||
).exists()
|
).exists()
|
||||||
|
|
||||||
|
|
||||||
class ProjectLitePermission(BasePermission):
|
class ProjectLitePermission(BasePermission):
|
||||||
|
|
||||||
def has_permission(self, request, view):
|
def has_permission(self, request, view):
|
||||||
if request.user.is_anonymous:
|
if request.user.is_anonymous:
|
||||||
return False
|
return False
|
||||||
@ -101,4 +107,5 @@ class ProjectLitePermission(BasePermission):
|
|||||||
workspace__slug=view.workspace_slug,
|
workspace__slug=view.workspace_slug,
|
||||||
member=request.user,
|
member=request.user,
|
||||||
project_id=view.project_id,
|
project_id=view.project_id,
|
||||||
|
is_active=True,
|
||||||
).exists()
|
).exists()
|
@ -32,15 +32,31 @@ class WorkSpaceBasePermission(BasePermission):
|
|||||||
member=request.user,
|
member=request.user,
|
||||||
workspace__slug=view.workspace_slug,
|
workspace__slug=view.workspace_slug,
|
||||||
role__in=[Owner, Admin],
|
role__in=[Owner, Admin],
|
||||||
|
is_active=True,
|
||||||
).exists()
|
).exists()
|
||||||
|
|
||||||
# allow only owner to delete the workspace
|
# allow only owner to delete the workspace
|
||||||
if request.method == "DELETE":
|
if request.method == "DELETE":
|
||||||
return WorkspaceMember.objects.filter(
|
return WorkspaceMember.objects.filter(
|
||||||
member=request.user, workspace__slug=view.workspace_slug, role=Owner
|
member=request.user,
|
||||||
|
workspace__slug=view.workspace_slug,
|
||||||
|
role=Owner,
|
||||||
|
is_active=True,
|
||||||
).exists()
|
).exists()
|
||||||
|
|
||||||
|
|
||||||
|
class WorkspaceOwnerPermission(BasePermission):
|
||||||
|
def has_permission(self, request, view):
|
||||||
|
if request.user.is_anonymous:
|
||||||
|
return False
|
||||||
|
|
||||||
|
return WorkspaceMember.objects.filter(
|
||||||
|
workspace__slug=view.workspace_slug,
|
||||||
|
member=request.user,
|
||||||
|
role=Owner,
|
||||||
|
).exists()
|
||||||
|
|
||||||
|
|
||||||
class WorkSpaceAdminPermission(BasePermission):
|
class WorkSpaceAdminPermission(BasePermission):
|
||||||
def has_permission(self, request, view):
|
def has_permission(self, request, view):
|
||||||
if request.user.is_anonymous:
|
if request.user.is_anonymous:
|
||||||
@ -50,6 +66,7 @@ class WorkSpaceAdminPermission(BasePermission):
|
|||||||
member=request.user,
|
member=request.user,
|
||||||
workspace__slug=view.workspace_slug,
|
workspace__slug=view.workspace_slug,
|
||||||
role__in=[Owner, Admin],
|
role__in=[Owner, Admin],
|
||||||
|
is_active=True,
|
||||||
).exists()
|
).exists()
|
||||||
|
|
||||||
|
|
||||||
@ -63,12 +80,14 @@ class WorkspaceEntityPermission(BasePermission):
|
|||||||
return WorkspaceMember.objects.filter(
|
return WorkspaceMember.objects.filter(
|
||||||
workspace__slug=view.workspace_slug,
|
workspace__slug=view.workspace_slug,
|
||||||
member=request.user,
|
member=request.user,
|
||||||
|
is_active=True,
|
||||||
).exists()
|
).exists()
|
||||||
|
|
||||||
return WorkspaceMember.objects.filter(
|
return WorkspaceMember.objects.filter(
|
||||||
member=request.user,
|
member=request.user,
|
||||||
workspace__slug=view.workspace_slug,
|
workspace__slug=view.workspace_slug,
|
||||||
role__in=[Owner, Admin],
|
role__in=[Owner, Admin],
|
||||||
|
is_active=True,
|
||||||
).exists()
|
).exists()
|
||||||
|
|
||||||
|
|
||||||
@ -78,5 +97,19 @@ class WorkspaceViewerPermission(BasePermission):
|
|||||||
return False
|
return False
|
||||||
|
|
||||||
return WorkspaceMember.objects.filter(
|
return WorkspaceMember.objects.filter(
|
||||||
member=request.user, workspace__slug=view.workspace_slug, role__gte=10
|
member=request.user,
|
||||||
|
workspace__slug=view.workspace_slug,
|
||||||
|
is_active=True,
|
||||||
|
).exists()
|
||||||
|
|
||||||
|
|
||||||
|
class WorkspaceUserPermission(BasePermission):
|
||||||
|
def has_permission(self, request, view):
|
||||||
|
if request.user.is_anonymous:
|
||||||
|
return False
|
||||||
|
|
||||||
|
return WorkspaceMember.objects.filter(
|
||||||
|
member=request.user,
|
||||||
|
workspace__slug=view.workspace_slug,
|
||||||
|
is_active=True,
|
||||||
).exists()
|
).exists()
|
125
apiserver/plane/app/serializers/__init__.py
Normal file
125
apiserver/plane/app/serializers/__init__.py
Normal file
@ -0,0 +1,125 @@
|
|||||||
|
from .base import BaseSerializer
|
||||||
|
from .user import (
|
||||||
|
UserSerializer,
|
||||||
|
UserLiteSerializer,
|
||||||
|
ChangePasswordSerializer,
|
||||||
|
ResetPasswordSerializer,
|
||||||
|
UserAdminLiteSerializer,
|
||||||
|
UserMeSerializer,
|
||||||
|
UserMeSettingsSerializer,
|
||||||
|
)
|
||||||
|
from .workspace import (
|
||||||
|
WorkSpaceSerializer,
|
||||||
|
WorkSpaceMemberSerializer,
|
||||||
|
TeamSerializer,
|
||||||
|
WorkSpaceMemberInviteSerializer,
|
||||||
|
WorkspaceLiteSerializer,
|
||||||
|
WorkspaceThemeSerializer,
|
||||||
|
WorkspaceMemberAdminSerializer,
|
||||||
|
WorkspaceMemberMeSerializer,
|
||||||
|
WorkspaceUserPropertiesSerializer,
|
||||||
|
)
|
||||||
|
from .project import (
|
||||||
|
ProjectSerializer,
|
||||||
|
ProjectListSerializer,
|
||||||
|
ProjectDetailSerializer,
|
||||||
|
ProjectMemberSerializer,
|
||||||
|
ProjectMemberInviteSerializer,
|
||||||
|
ProjectIdentifierSerializer,
|
||||||
|
ProjectFavoriteSerializer,
|
||||||
|
ProjectLiteSerializer,
|
||||||
|
ProjectMemberLiteSerializer,
|
||||||
|
ProjectDeployBoardSerializer,
|
||||||
|
ProjectMemberAdminSerializer,
|
||||||
|
ProjectPublicMemberSerializer,
|
||||||
|
ProjectMemberRoleSerializer,
|
||||||
|
)
|
||||||
|
from .state import StateSerializer, StateLiteSerializer
|
||||||
|
from .view import (
|
||||||
|
GlobalViewSerializer,
|
||||||
|
IssueViewSerializer,
|
||||||
|
IssueViewFavoriteSerializer,
|
||||||
|
)
|
||||||
|
from .cycle import (
|
||||||
|
CycleSerializer,
|
||||||
|
CycleIssueSerializer,
|
||||||
|
CycleFavoriteSerializer,
|
||||||
|
CycleWriteSerializer,
|
||||||
|
CycleUserPropertiesSerializer,
|
||||||
|
)
|
||||||
|
from .asset import FileAssetSerializer
|
||||||
|
from .issue import (
|
||||||
|
IssueCreateSerializer,
|
||||||
|
IssueActivitySerializer,
|
||||||
|
IssueCommentSerializer,
|
||||||
|
IssuePropertySerializer,
|
||||||
|
IssueAssigneeSerializer,
|
||||||
|
LabelSerializer,
|
||||||
|
IssueSerializer,
|
||||||
|
IssueFlatSerializer,
|
||||||
|
IssueStateSerializer,
|
||||||
|
IssueLinkSerializer,
|
||||||
|
IssueInboxSerializer,
|
||||||
|
IssueLiteSerializer,
|
||||||
|
IssueAttachmentSerializer,
|
||||||
|
IssueSubscriberSerializer,
|
||||||
|
IssueReactionSerializer,
|
||||||
|
CommentReactionSerializer,
|
||||||
|
IssueVoteSerializer,
|
||||||
|
IssueRelationSerializer,
|
||||||
|
RelatedIssueSerializer,
|
||||||
|
IssuePublicSerializer,
|
||||||
|
IssueDetailSerializer,
|
||||||
|
IssueReactionLiteSerializer,
|
||||||
|
IssueAttachmentLiteSerializer,
|
||||||
|
IssueLinkLiteSerializer,
|
||||||
|
)
|
||||||
|
|
||||||
|
from .module import (
|
||||||
|
ModuleDetailSerializer,
|
||||||
|
ModuleWriteSerializer,
|
||||||
|
ModuleSerializer,
|
||||||
|
ModuleIssueSerializer,
|
||||||
|
ModuleLinkSerializer,
|
||||||
|
ModuleFavoriteSerializer,
|
||||||
|
ModuleUserPropertiesSerializer,
|
||||||
|
)
|
||||||
|
|
||||||
|
from .api import APITokenSerializer, APITokenReadSerializer
|
||||||
|
|
||||||
|
from .importer import ImporterSerializer
|
||||||
|
|
||||||
|
from .page import (
|
||||||
|
PageSerializer,
|
||||||
|
PageLogSerializer,
|
||||||
|
SubPageSerializer,
|
||||||
|
PageFavoriteSerializer,
|
||||||
|
)
|
||||||
|
|
||||||
|
from .estimate import (
|
||||||
|
EstimateSerializer,
|
||||||
|
EstimatePointSerializer,
|
||||||
|
EstimateReadSerializer,
|
||||||
|
WorkspaceEstimateSerializer,
|
||||||
|
)
|
||||||
|
|
||||||
|
from .inbox import (
|
||||||
|
InboxSerializer,
|
||||||
|
InboxIssueSerializer,
|
||||||
|
IssueStateInboxSerializer,
|
||||||
|
InboxIssueLiteSerializer,
|
||||||
|
InboxIssueDetailSerializer,
|
||||||
|
)
|
||||||
|
|
||||||
|
from .analytic import AnalyticViewSerializer
|
||||||
|
|
||||||
|
from .notification import (
|
||||||
|
NotificationSerializer,
|
||||||
|
UserNotificationPreferenceSerializer,
|
||||||
|
)
|
||||||
|
|
||||||
|
from .exporter import ExporterHistorySerializer
|
||||||
|
|
||||||
|
from .webhook import WebhookSerializer, WebhookLogSerializer
|
||||||
|
|
||||||
|
from .dashboard import DashboardSerializer, WidgetSerializer
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue
Block a user