forked from github/plane
Compare commits
897 Commits
qa-preview
...
preview
Author | SHA1 | Date | |
---|---|---|---|
dbc8cd182a | |||
55457758ec | |||
534d4e4dc6 | |||
e0a20dbb0a | |||
|
8b6035d315 | ||
|
b90ca97461 | ||
|
b0ab17021f | ||
|
cd395fa3d2 | ||
|
9e3fedd0df | ||
|
5250c64be5 | ||
|
5c6170507e | ||
|
4f15c03f91 | ||
|
43534dcaed | ||
|
cfbc1a91af | ||
|
7aa1d750ea | ||
|
91ed27837e | ||
|
f45c2d12fd | ||
|
d0cb00f28a | ||
|
c80638090f | ||
|
549790ee8a | ||
|
1dac70ecbe | ||
|
3c2b2e3ed6 | ||
|
f805acbcef | ||
|
b349de556a | ||
|
699017014e | ||
|
2d4547601d | ||
|
9b918b727a | ||
|
74a88fc028 | ||
|
e86397b649 | ||
|
7e0520d1cf | ||
|
95580d0c62 | ||
|
03df410b52 | ||
|
c843a1757f | ||
|
2ea6d70fac | ||
|
7bec244a67 | ||
|
ddb07dbe5f | ||
|
9b0949148f | ||
|
8d009187ab | ||
|
39b5a58ce8 | ||
|
986f81e3ae | ||
|
fd2cacb0cd | ||
|
4f138ac3f9 | ||
|
2c8c139c62 | ||
|
57f2445bb8 | ||
|
90609b306f | ||
|
62dac421dc | ||
|
3742ea91bf | ||
|
b4cc58d5dd | ||
|
71b73000d2 | ||
|
4cba7ff2f5 | ||
|
840bc51537 | ||
|
0ab03c963c | ||
|
1fb8791941 | ||
|
e9518ced89 | ||
|
bc0752f7e8 | ||
|
4c97098218 | ||
|
91d85ffed0 | ||
|
d485446ee2 | ||
|
92fd7b6977 | ||
|
e7fc942514 | ||
|
68ebcfd04e | ||
|
fed5916907 | ||
|
204e4a8c6d | ||
|
bd470f7a90 | ||
|
84e73291cc | ||
|
1423955a34 | ||
|
a771bab64c | ||
|
fafdac68f0 | ||
|
3a466cfe40 | ||
|
86715f5b20 | ||
|
97fb08928f | ||
|
8bdf8fa24f | ||
|
f1530688f8 | ||
|
265ce7778a | ||
|
79d227b906 | ||
|
5d4b3cea4c | ||
|
2c510a7cb9 | ||
|
478535422b | ||
|
35ca495f40 | ||
|
68fa7a485b | ||
|
ecfd3b68e3 | ||
|
9249e6d5b9 | ||
|
30cee78170 | ||
|
4c46b075b4 | ||
|
f3d9053d81 | ||
|
a136931168 | ||
|
5235b78cb8 | ||
|
1c3619a4d6 | ||
|
f3fd48dd43 | ||
|
c6f36a056b | ||
|
51683e6b2f | ||
|
fb189ca447 | ||
|
ea728a385f | ||
|
7452e40134 | ||
|
599809dd2e | ||
|
ad5559afe4 | ||
|
11c3d4fbd8 | ||
|
20795db9e8 | ||
|
7e9daf8a20 | ||
|
07106f9161 | ||
|
4b8a1353fc | ||
|
8205961d2a | ||
|
53c63c8a2b | ||
|
564d64a30e | ||
|
961fdc17d5 | ||
|
7e0037b06b | ||
|
9f9e508bb7 | ||
|
2429ac053d | ||
|
31b2fa2c14 | ||
|
ec837a42d5 | ||
|
b5ed602e05 | ||
|
9652d56e49 | ||
|
2f637fc9f1 | ||
|
95d2b6f1c1 | ||
|
5c7886d7f3 | ||
|
e4211e5817 | ||
|
1caceca1e7 | ||
|
53f3357149 | ||
|
cdf86391f0 | ||
|
72392d5731 | ||
|
db8cf1fb24 | ||
|
d262eb4ffb | ||
|
60aea62739 | ||
|
baab6ce99f | ||
|
5aed04eb41 | ||
|
165bec9aa4 | ||
|
231fd52992 | ||
|
9642b761b7 | ||
|
86ae79f851 | ||
|
991b8e7703 | ||
|
56d4e25431 | ||
|
23f77328d4 | ||
|
58a45c96c0 | ||
|
15e04e55bb | ||
|
8911eaf676 | ||
|
0d2a8cca50 | ||
|
061be85a5d | ||
|
4d1b5adfc4 | ||
|
3ff0f6187a | ||
|
37c5ce54d5 | ||
|
054dd2bb7d | ||
|
180f1d74e8 | ||
|
6ab8588afb | ||
|
e1f04356fc | ||
|
4eefc7f692 | ||
|
7142889c23 | ||
|
bca3e13242 | ||
|
e02fa4d9e7 | ||
|
621624e29f | ||
|
4ea616f1cd | ||
|
901a7703ff | ||
|
2f883e4939 | ||
|
cb4cfa1dd5 | ||
|
7d3a96b3d0 | ||
|
0f79c6d7d8 | ||
|
6a245e121a | ||
|
293854fb3a | ||
|
53ddef1cd5 | ||
|
473dfc7a5b | ||
|
1a462711e1 | ||
|
3139f8e109 | ||
|
aa3702cd46 | ||
|
aed87ef472 | ||
|
1b742f66c6 | ||
|
568701881e | ||
|
2dbea54d2a | ||
|
82ba9833f2 | ||
|
0759666b75 | ||
|
e9774e1af3 | ||
|
4a93fdbdb4 | ||
|
861a1c4132 | ||
|
5244ba72c9 | ||
|
36bc7bf996 | ||
|
0789238282 | ||
|
c17748eec2 | ||
|
94f1e6d957 | ||
|
13bbb9cde4 | ||
|
92a077dce1 | ||
|
ed2e4ad6f7 | ||
|
cbe5d9a047 | ||
|
d25fcfdd88 | ||
|
aaad37575b | ||
|
6bc133e3b1 | ||
|
42b524b16a | ||
|
0bc4b6cece | ||
|
43c75f4457 | ||
|
80761d3507 | ||
|
9c13dbd957 | ||
|
8d9adf4d87 | ||
|
4ccb505f36 | ||
|
884856b021 | ||
|
552c66457a | ||
|
1363ef0b2d | ||
|
898cf98be3 | ||
|
cb632408f9 | ||
|
b930d98665 | ||
|
69e110f4a8 | ||
|
c3c6ef8830 | ||
|
8aca74c68d | ||
|
c97b994311 | ||
|
443b93f897 | ||
|
730e556bea | ||
|
f77f4b8221 | ||
|
5c4c3f5c04 | ||
|
73c91654eb | ||
|
578bd29f6f | ||
|
2e5e14556d | ||
|
ecac45e885 | ||
|
6ec9c64f7c | ||
|
f493a03f56 | ||
|
48c9b78397 | ||
|
9c29ad1a28 | ||
|
e3ac075ee2 | ||
|
8c9d328c24 | ||
|
b2146098e2 | ||
|
4a06572f73 | ||
|
d34df5c805 | ||
|
01702e9f66 | ||
|
b57c389c75 | ||
|
27acd1bec1 | ||
|
539afb0792 | ||
|
6c7ba3bc79 | ||
|
c8ab650008 | ||
|
89d019df57 | ||
|
535731141f | ||
|
4b30339a59 | ||
|
899771a678 | ||
|
8997ee2e3e | ||
|
e05bc3965c | ||
|
b07fec533c | ||
|
cc069b61aa | ||
|
2074bb97db | ||
|
f5151ae717 | ||
|
deb5ac0578 | ||
|
cead56cc52 | ||
|
cb78ccad1f | ||
|
6c6b7156bb | ||
|
8cc372679c | ||
|
94327b8311 | ||
|
7b88a2a88c | ||
|
47a7f60611 | ||
|
7a8aef4599 | ||
|
bc02e56e3c | ||
|
b03f6a81e2 | ||
|
b535d8a23c | ||
|
bce69bcbe1 | ||
|
1fa47a6c04 | ||
|
c16a5b9b71 | ||
|
a852e3cc52 | ||
|
ed8782757d | ||
|
549f6d0943 | ||
|
cb5198c883 | ||
|
b4adb82d40 | ||
|
3f1ce9907d | ||
|
da735f318a | ||
|
a08f401452 | ||
|
4861be2773 | ||
|
6a6ab5544a | ||
|
466f69a0b9 | ||
|
f188c9fdc5 | ||
|
dd579f83ee | ||
|
66f2492e60 | ||
|
c08d6987d0 | ||
|
e4f48d6878 | ||
|
b3d3c0fb06 | ||
|
cace132a2a | ||
|
3d09a69d58 | ||
|
921b9078f1 | ||
|
e1db39ffc8 | ||
|
2b05d23470 | ||
|
69fa1708cc | ||
|
666b7ea577 | ||
|
7b76df6868 | ||
|
dbdd14493b | ||
|
4572b7378d | ||
|
5a32d10f96 | ||
|
126d01bdc5 | ||
|
87eadc3c5d | ||
|
53367a6bc4 | ||
|
c06ef4d1d7 | ||
|
73334130be | ||
|
4d0f641ee0 | ||
|
50318190f5 | ||
|
d07dd65022 | ||
|
f8f9dd3331 | ||
|
af70722e34 | ||
|
d99529b109 | ||
|
6eb7014ea4 | ||
|
59c9b3bdce | ||
|
894de80f41 | ||
|
4b706437d7 | ||
|
e4bccea824 | ||
|
d39f2526a2 | ||
|
bc6e48fcd6 | ||
|
e6f33eb262 | ||
|
5cfebb8dae | ||
|
e4988ee940 | ||
|
850bf01d65 | ||
|
f183e389ea | ||
|
5d7c0a2a64 | ||
|
92e994990c | ||
|
d1087820f6 | ||
|
65024fe5ec | ||
|
62693abb09 | ||
|
9326fb0762 | ||
|
56805203f1 | ||
|
39136d3a9f | ||
|
34301e4399 | ||
|
51f795fbd7 | ||
|
7abfbac479 | ||
|
c4028efd71 | ||
|
0215b697a5 | ||
|
37fb3cd4e2 | ||
|
30cc923fdb | ||
|
b1520783cf | ||
|
0f56945321 | ||
|
16d7b3c7d1 | ||
|
5033b1ba7e | ||
|
6dd785b5dd | ||
|
ef467f7f6b | ||
|
fe64208e84 | ||
|
ec43c8e634 | ||
|
fece947eb5 | ||
|
1f5d54260a | ||
|
895ff03cf2 | ||
|
7e46cbcb52 | ||
|
6c70d3854a | ||
|
bd142989b4 | ||
|
002b2505f3 | ||
|
34d6b135f2 | ||
|
c858b76054 | ||
|
aba170dbde | ||
|
58e0170cac | ||
|
e20681bb6c | ||
|
c950e3d3f7 | ||
|
ee57f815fd | ||
|
67fdafb720 | ||
|
888268ac11 | ||
|
87888a55ce | ||
|
1866474569 | ||
|
e1d73057ae | ||
|
01e09873e6 | ||
|
dad682a7c3 | ||
|
5c089f0223 | ||
|
e1f13af084 | ||
|
8b6206f28b | ||
|
e1ef830f39 | ||
|
a8c5b558b1 | ||
|
510eeb7a8f | ||
|
b4fb9f1aa2 | ||
|
31ebecba52 | ||
|
395098417c | ||
|
812df59d1d | ||
|
7c85ee7e55 | ||
|
9c1d496165 | ||
|
d6a32ef75d | ||
|
6240b17063 | ||
|
33c99ded77 | ||
|
ba6479674c | ||
|
46e1d46005 | ||
|
8c1f169f61 | ||
|
0aaca709da | ||
|
5f6c9a4166 | ||
|
9f055840ef | ||
|
50cbb2f002 | ||
|
849d3a66c1 | ||
|
1f7565ce52 | ||
|
34f89ba45b | ||
|
27fcfcf620 | ||
|
5571d42e10 | ||
|
e0a4d7a12a | ||
|
5c64933927 | ||
|
9c50ee39c3 | ||
|
18b5115546 | ||
|
3372e21759 | ||
|
03f8bfae10 | ||
|
03e5f4a5bd | ||
|
62607ade6f | ||
|
7927b7678d | ||
|
ebad7f0cdf | ||
|
b1bf125916 | ||
|
9b54fe80a8 | ||
|
02182e05c4 | ||
|
e92417037c | ||
|
d73cd2ec9d | ||
|
acf81f30f5 | ||
|
1cc2a4e679 | ||
|
c61e8fdb24 | ||
|
b1a5e4872b | ||
|
b1592adc66 | ||
|
e86d2ba743 | ||
|
b10e89fdd7 | ||
|
efadc728af | ||
|
370decc8aa | ||
|
ac6e710623 | ||
|
56f4df4cb5 | ||
|
fcbe690665 | ||
|
487e961df1 | ||
|
71901efcdf | ||
|
022a286eba | ||
|
c851ec7034 | ||
|
fb442d6dfe | ||
|
e9fdb0ff5d | ||
|
614096fd2f | ||
|
133c9b3ddb | ||
|
48b55ef261 | ||
|
7464c1090a | ||
|
ab3c3a6cf9 | ||
|
92becbc617 | ||
|
ee318ce91b | ||
|
fb4f4260fa | ||
|
3efb7fc070 | ||
|
95871b0049 | ||
|
952eb871df | ||
|
6bf9d84bea | ||
|
a6a28d46c7 | ||
|
4a44bb2a6c | ||
|
d16a0b61d0 | ||
|
c7db290718 | ||
|
e433a835fd | ||
|
cf3b888465 | ||
|
a64e1e04db | ||
|
07a4cb1f7d | ||
|
e1bf318317 | ||
|
bbbd7047d3 | ||
|
17e5663e81 | ||
|
170f30c7dd | ||
|
7381a818a9 | ||
|
261013b794 | ||
|
ce9ed6b25e | ||
|
10057377dc | ||
|
eba5ed24ad | ||
|
41e812a811 | ||
|
a94c607031 | ||
|
665a07f15a | ||
|
85a8af5125 | ||
|
7628419a26 | ||
|
2cd16c61ba | ||
|
8f7b05b73f | ||
|
98d545cfb9 | ||
|
2548a9f062 | ||
|
d901277a20 | ||
|
489555f788 | ||
|
b827a1af27 | ||
|
1d2e331cec | ||
|
e51e4761b9 | ||
|
9299478539 | ||
|
fb4cffdd1c | ||
|
83bf28bb83 | ||
|
25a2816a76 | ||
|
571b89632c | ||
|
5b5698ad97 | ||
|
b1989bae1b | ||
|
83139989c2 | ||
|
1bf06821bb | ||
|
eea3b4fa54 | ||
|
f64284f6a0 | ||
|
06496ff0f0 | ||
|
247937d93a | ||
|
e0a18578f5 | ||
|
e93bbec4cd | ||
|
d90aaba842 | ||
|
a303e52039 | ||
|
73d12cf1bb | ||
|
7651640e29 | ||
|
41a9dc3603 | ||
|
afda3ee6ca | ||
|
fc8edab59b | ||
|
9cf5348019 | ||
|
042ed04a03 | ||
|
e29edfc02b | ||
|
eb0af8de59 | ||
|
0fb43c6fc5 | ||
|
963d26ccda | ||
|
3eb819c4ae | ||
|
4b67a41b41 | ||
|
0a36850d03 | ||
|
5bce2014c5 | ||
|
3f7f91e120 | ||
|
1927fdd437 | ||
|
b86c6c906a | ||
|
99975d0ba0 | ||
|
4f72ebded9 | ||
|
be5d1eb9f9 | ||
|
8d730e6680 | ||
|
41a3cb708c | ||
|
27037a2177 | ||
|
e2affc3fa6 | ||
|
3a14f19c99 | ||
|
eb4c3a4db5 | ||
|
2e129682b7 | ||
|
4a145f7a06 | ||
|
e69fcd410c | ||
|
55afef204d | ||
|
f9e187d8b9 | ||
|
9545dc77d6 | ||
|
1fc987c6c9 | ||
|
c1c0297b6d | ||
|
1a7b5d7222 | ||
|
fb3dd77b66 | ||
|
a43dfc097d | ||
|
346c6f5414 | ||
|
729b6ac79e | ||
|
0a35fcfbc0 | ||
|
75b4c6e7d6 | ||
|
a1d6c40627 | ||
|
4a2e648f6d | ||
|
76db394ab1 | ||
|
4563b50fad | ||
|
065226f8b2 | ||
|
0a99a1a091 | ||
|
4b2a9c8335 | ||
|
751b15a7a7 | ||
|
ac22769220 | ||
|
46ae0f98dc | ||
|
30aaec9097 | ||
|
4041c5bc5b | ||
|
403595a897 | ||
|
0ee93dfd8c | ||
|
ee0e3e2e25 | ||
|
0165abab3e | ||
|
7d07afd59c | ||
|
efaba43494 | ||
|
a8ec2b6914 | ||
|
39eb8c98d1 | ||
|
138d06868b | ||
|
2eab3b41a2 | ||
|
662b497082 | ||
|
67cf1785b8 | ||
|
7d08a57be6 | ||
|
b0ad48e35a | ||
|
d68669df51 | ||
|
4e600e4e9b | ||
|
4fc4da7982 | ||
|
6f210e1f4b | ||
|
f7803dab56 | ||
|
70172f8e3d | ||
|
21bc668a56 | ||
|
dc5a5f4a91 | ||
|
2c67aced15 | ||
|
3a4c893368 | ||
|
0d036e6bf5 | ||
|
3ef0570f6a | ||
|
c9d2ea36b8 | ||
|
f0836ceb10 | ||
|
888665783e | ||
|
817737b2c0 | ||
|
638c1e21c9 | ||
|
c67e097fc2 | ||
|
804dd8300d | ||
|
c6d6b9a0e9 | ||
|
9debd81a50 | ||
|
d53a086206 | ||
|
ef8472ce5e | ||
|
4aa34f3eda | ||
|
c7616fda11 | ||
|
483fc57601 | ||
|
09a1a55da8 | ||
|
61f92563a9 | ||
|
00e07443b0 | ||
|
c4efdcd704 | ||
|
3c9679dff9 | ||
|
b3393f5c48 | ||
|
f995736642 | ||
|
f7f1f2bea4 | ||
|
532da80375 | ||
|
212f2b54f8 | ||
|
9ecdcc6fde | ||
|
ddae745669 | ||
|
e78c1f2060 | ||
|
ebc891b985 | ||
|
60b5589c48 | ||
|
f8208b1b5e | ||
|
6c6b764421 | ||
|
5c912b8821 | ||
|
ff19980502 | ||
|
a1a24e4574 | ||
|
c1c2a6ddce | ||
|
ec3cad1f25 | ||
|
336c97d336 | ||
|
e4a3d0db5c | ||
|
7f2e99dd2d | ||
|
a104cc4814 | ||
|
03cbad5110 | ||
|
2956c43ed5 | ||
|
eae32593cb | ||
|
7fd625e0e3 | ||
|
f007dcff26 | ||
|
adf091fa07 | ||
|
b66f07845a | ||
|
911211cf3d | ||
|
53b41481a2 | ||
|
9d9d703c62 | ||
|
a2f34e9573 | ||
|
81f84f24f7 | ||
|
87f39d7372 | ||
|
1a1594e818 | ||
|
8d3ea5bb3e | ||
|
6a2be6afc4 | ||
|
338d58f79d | ||
|
e23e4bc392 | ||
|
c1598c3d38 | ||
|
4a436eeee2 | ||
|
47681fe9f8 | ||
|
f27efb80e1 | ||
|
c1e1b81b99 | ||
|
d9db765ae3 | ||
|
0531dc3308 | ||
|
e36b7a5ab9 | ||
|
c6b756d918 | ||
|
f3ae57bc85 | ||
|
2374161030 | ||
|
512ad83c08 | ||
|
801f75f406 | ||
|
f88109ef04 | ||
|
bb50df0dff | ||
|
2986769f28 | ||
|
fd5326dec6 | ||
|
49452a68ab | ||
|
b3ac9def8d | ||
|
be62662bb1 | ||
|
4eba5c115a | ||
|
24442ccc9c | ||
|
e32a50fba6 | ||
|
4baf2a2f09 | ||
|
577118ca02 | ||
|
4a26f11e23 | ||
|
9926e321f6 | ||
|
7f5028a4f6 | ||
|
06a7bdffd7 | ||
|
d3dedc8e51 | ||
|
d656f8e62a | ||
|
864519e770 | ||
|
034f0a06db | ||
|
7263cb072c | ||
|
ea3a0362b0 | ||
|
c9337d4a41 | ||
|
f347c1cd69 | ||
|
3f07c48b35 | ||
|
04b2214bf2 | ||
|
1adb38655a | ||
|
4ab64b6905 | ||
|
4b0d85591e | ||
|
8e2789af3e | ||
|
3592feb3d7 | ||
|
bc6a2542f5 | ||
|
5e2d93df52 | ||
|
cce349b805 | ||
|
fadda7cf04 | ||
|
c4093d29a7 | ||
|
8c89e9cc01 | ||
|
5625a3581a | ||
|
ee387c4222 | ||
|
6a16a98b03 | ||
|
11f84a986c | ||
|
4b0d48b290 | ||
|
9789068880 | ||
|
151c355177 | ||
|
38580c3940 | ||
|
7ff91fdb82 | ||
|
a679b42200 | ||
|
27762ea500 | ||
|
2cd5dbcd02 | ||
|
96868760a3 | ||
|
df97b35a99 | ||
|
4611ec0b83 | ||
|
e6b31e2550 | ||
|
59fb371e3d | ||
|
23e5306f6d | ||
|
0f99fb302b | ||
|
0e49d616b7 | ||
|
e72920d33e | ||
|
80dc38b649 | ||
|
43b503c756 | ||
|
68d370fd86 | ||
|
5e03f3dd82 | ||
|
1257a88089 | ||
|
12a3392722 | ||
|
b62a1b11b1 | ||
|
0a05aef046 | ||
|
efd3ebf067 | ||
|
266f14d550 | ||
|
9eb8f41008 | ||
|
b340232e76 | ||
|
64ca267db4 | ||
|
46e79dde27 | ||
|
7272c54439 | ||
|
543636eb40 | ||
|
6c2fecd322 | ||
|
af5057defa | ||
|
c5e7c2f6a8 | ||
|
eda1e46a2d | ||
|
ee78b4fe52 | ||
|
a19598fec1 | ||
|
afff9790d4 | ||
|
c0cd201b7c | ||
|
942785b7c0 | ||
|
a70f551d17 | ||
|
2580e66d4b | ||
|
d887b780ae | ||
|
4b0ccea146 | ||
|
02a776396b | ||
|
5cd93f5e59 | ||
|
69b1d0a929 | ||
|
b522de99ba | ||
|
b58d7a715a | ||
|
87cd44bcd2 | ||
|
804b7d8663 | ||
|
1539340113 | ||
|
d9ee692ce9 | ||
|
c3ba9f61d8 | ||
|
6e702d6cc7 | ||
|
447a8bc2f8 | ||
|
34d872beca | ||
|
4263e9b507 | ||
|
768b4abf22 | ||
|
6f0af4689b | ||
|
22bbdd5ab8 | ||
|
28a9c53202 | ||
|
33d88f56da | ||
|
23001d425f | ||
|
5de94c575a | ||
|
f200acc1e8 | ||
|
f132fe59ae | ||
|
e584259c04 | ||
|
a77ceb636f | ||
|
10ab081a0b | ||
|
1d5a3a02c1 | ||
|
62b9b259c0 | ||
|
91e84aede1 | ||
|
71bf049e89 | ||
|
685e62a72f | ||
|
d9bd43f43c | ||
|
10bdcc906c | ||
|
54964924f0 | ||
|
ff8008cbed | ||
|
78428fb564 | ||
|
b2824366a8 | ||
|
05eb728c40 | ||
|
ad2471c5a7 | ||
|
440cfc0f20 | ||
|
9a32d722dc | ||
|
37fe9a185c | ||
|
eabb31a764 | ||
|
1e27c7936d | ||
|
0a617eec26 | ||
|
ad20079e0c | ||
|
e14baf17a7 | ||
|
542b18a585 | ||
|
37d88cc05b | ||
|
b652d1a8f1 | ||
|
98b7a941f9 | ||
|
dc131ee05b | ||
|
0e3d15215d | ||
|
816b6abf3b | ||
|
6ecaa661a7 | ||
|
0b1efb173f | ||
|
aceee7d2e2 | ||
|
0a41eff435 | ||
|
5f681973a0 | ||
|
9c65657a66 | ||
|
6fab75f9ab | ||
|
111070bdb9 | ||
|
d76840ff6f | ||
|
d4b6f4faf1 | ||
|
fe9519314a | ||
|
e141091e99 | ||
|
40b8b0ac35 | ||
|
47d6b152a0 | ||
|
216a7c8fda | ||
|
e7468292c7 | ||
|
7bff8d2ec5 | ||
|
6f2cce081f | ||
|
a86dafc11c | ||
|
81256d6373 | ||
|
c9792da4a1 | ||
|
37df0bcdd8 | ||
|
18c86bd8cc | ||
|
0ee6c20272 | ||
|
184db0156c | ||
|
b7a0f3c693 | ||
|
e40f38e2e1 | ||
|
05e7afab8d | ||
|
969a51f425 | ||
|
849bc92aea | ||
|
a37dec45d9 | ||
|
e1793dda74 | ||
|
31fdaf2659 | ||
|
8a1a6c6f62 | ||
|
3d83101f69 | ||
|
d473ba9d0d | ||
|
e5902152ab | ||
|
f03a9a6de8 | ||
|
b7e2f1e57a | ||
|
885de6f679 | ||
|
ce9714ff12 | ||
|
74b141eea2 | ||
|
08425c9614 | ||
|
ecfcc03ef0 | ||
|
829c08f0ee | ||
|
2edd2d947e | ||
|
5b67f27345 | ||
|
7684a2c091 | ||
|
4e2bf24e8d | ||
|
aafac9ed1d | ||
|
3adf48e429 | ||
|
1c546e3cc5 | ||
|
5c7382d894 | ||
|
1f8ae3a5ad | ||
|
f1ed0c979c | ||
|
78b29eb81b | ||
|
8d3a0a2eec | ||
|
910d1a1de3 | ||
|
6004f29bbc | ||
|
9d0056cfee | ||
|
4bb99d5fbf | ||
|
6c61fbd102 | ||
|
2605b938f0 | ||
|
b4f51cb5af | ||
|
fe80ca3e1c | ||
|
b78e83d81b | ||
|
ee68c3ae86 | ||
|
239f68e260 | ||
|
f949d57fa0 | ||
|
dadd2cf39b | ||
|
13d7832d35 | ||
|
0f892d4670 | ||
|
e8945f244d | ||
|
a0588be405 | ||
|
29a0ba4ddc | ||
|
644073f063 | ||
|
63a15f2bf9 | ||
|
936452758a | ||
|
4fdac437e9 | ||
|
48fe6f9b9a | ||
|
05d675c138 | ||
|
09b4f6dedd | ||
|
449ac06fd7 | ||
|
c7d50bb7ce | ||
|
90de11c08d | ||
|
24c02495aa | ||
|
1795916042 | ||
|
43cbe44a35 | ||
|
361ee16567 | ||
|
de24b02a0a | ||
|
1d3745157d | ||
|
07c15fcc1f | ||
|
c33cfeb227 | ||
|
f119d702c7 | ||
|
0f752f93b6 | ||
|
ae2e1a4b64 | ||
|
472a5d8047 | ||
|
3b12332704 | ||
|
c7cad452ab | ||
|
82c0ee00a3 | ||
|
8041b23a63 | ||
|
f38278f465 | ||
|
73b58e91ee | ||
|
b515c0ffa6 | ||
|
b629263bc2 | ||
|
1bf064df15 | ||
|
9918d5242c | ||
|
dd87bd0ee2 | ||
|
26d37fbd38 | ||
|
bf2c6e36ef | ||
|
62e66acc37 | ||
|
0f28008fa5 | ||
|
5b0066140f | ||
|
e5ae139178 | ||
|
02e2c6f848 | ||
|
8d15b9e7de | ||
|
5eba682128 | ||
|
457ed9bfe6 | ||
|
842048b2f2 | ||
|
d16a6402cd | ||
|
09253e3f55 | ||
|
9538d9ee9b | ||
|
b487e2defe | ||
|
fffcb2d947 | ||
|
01874cb2db | ||
|
b82f04ea04 | ||
|
29e8e6c997 | ||
|
c68ade11ec | ||
|
cb2577d259 | ||
|
5ac2c17e53 | ||
|
f198d14cc7 | ||
|
3400e31d0a | ||
|
6c8c61c53b | ||
|
dd9056c165 | ||
|
89d4851ff5 | ||
|
49a4c466b7 | ||
|
aebf8daf08 | ||
|
d4a4046cd3 | ||
|
39d1916ab2 | ||
|
d0d106901e | ||
|
0ceb9974f6 | ||
|
e0fcc0c876 |
@ -1,23 +0,0 @@
|
|||||||
version = 1
|
|
||||||
|
|
||||||
exclude_patterns = [
|
|
||||||
"bin/**",
|
|
||||||
"**/node_modules/",
|
|
||||||
"**/*.min.js"
|
|
||||||
]
|
|
||||||
|
|
||||||
[[analyzers]]
|
|
||||||
name = "shell"
|
|
||||||
|
|
||||||
[[analyzers]]
|
|
||||||
name = "javascript"
|
|
||||||
|
|
||||||
[analyzers.meta]
|
|
||||||
plugins = ["react"]
|
|
||||||
environment = ["nodejs"]
|
|
||||||
|
|
||||||
[[analyzers]]
|
|
||||||
name = "python"
|
|
||||||
|
|
||||||
[analyzers.meta]
|
|
||||||
runtime_version = "3.x.x"
|
|
10
.env.example
10
.env.example
@ -1,14 +1,12 @@
|
|||||||
# Database Settings
|
# Database Settings
|
||||||
PGUSER="plane"
|
POSTGRES_USER="plane"
|
||||||
PGPASSWORD="plane"
|
POSTGRES_PASSWORD="plane"
|
||||||
PGHOST="plane-db"
|
POSTGRES_DB="plane"
|
||||||
PGDATABASE="plane"
|
PGDATA="/var/lib/postgresql/data"
|
||||||
DATABASE_URL=postgresql://${PGUSER}:${PGPASSWORD}@${PGHOST}/${PGDATABASE}
|
|
||||||
|
|
||||||
# Redis Settings
|
# Redis Settings
|
||||||
REDIS_HOST="plane-redis"
|
REDIS_HOST="plane-redis"
|
||||||
REDIS_PORT="6379"
|
REDIS_PORT="6379"
|
||||||
REDIS_URL="redis://${REDIS_HOST}:6379/"
|
|
||||||
|
|
||||||
# AWS Settings
|
# AWS Settings
|
||||||
AWS_REGION=""
|
AWS_REGION=""
|
||||||
|
5
.github/ISSUE_TEMPLATE/--bug-report.yaml
vendored
5
.github/ISSUE_TEMPLATE/--bug-report.yaml
vendored
@ -1,7 +1,8 @@
|
|||||||
name: Bug report
|
name: Bug report
|
||||||
description: Create a bug report to help us improve Plane
|
description: Create a bug report to help us improve Plane
|
||||||
title: "[bug]: "
|
title: "[bug]: "
|
||||||
labels: [bug, need testing]
|
labels: [🐛bug]
|
||||||
|
assignees: [srinivaspendem, pushya22]
|
||||||
body:
|
body:
|
||||||
- type: markdown
|
- type: markdown
|
||||||
attributes:
|
attributes:
|
||||||
@ -44,7 +45,7 @@ body:
|
|||||||
- Deploy preview
|
- Deploy preview
|
||||||
validations:
|
validations:
|
||||||
required: true
|
required: true
|
||||||
type: dropdown
|
- type: dropdown
|
||||||
id: browser
|
id: browser
|
||||||
attributes:
|
attributes:
|
||||||
label: Browser
|
label: Browser
|
||||||
|
@ -1,7 +1,8 @@
|
|||||||
name: Feature request
|
name: Feature request
|
||||||
description: Suggest a feature to improve Plane
|
description: Suggest a feature to improve Plane
|
||||||
title: "[feature]: "
|
title: "[feature]: "
|
||||||
labels: [feature]
|
labels: [✨feature]
|
||||||
|
assignees: [srinivaspendem, pushya22]
|
||||||
body:
|
body:
|
||||||
- type: markdown
|
- type: markdown
|
||||||
attributes:
|
attributes:
|
||||||
|
84
.github/workflows/auto-merge.yml
vendored
Normal file
84
.github/workflows/auto-merge.yml
vendored
Normal file
@ -0,0 +1,84 @@
|
|||||||
|
name: Auto Merge or Create PR on Push
|
||||||
|
|
||||||
|
on:
|
||||||
|
workflow_dispatch:
|
||||||
|
push:
|
||||||
|
branches:
|
||||||
|
- "sync/**"
|
||||||
|
|
||||||
|
env:
|
||||||
|
CURRENT_BRANCH: ${{ github.ref_name }}
|
||||||
|
SOURCE_BRANCH: ${{ secrets.SYNC_SOURCE_BRANCH_NAME }} # The sync branch such as "sync/ce"
|
||||||
|
TARGET_BRANCH: ${{ secrets.SYNC_TARGET_BRANCH_NAME }} # The target branch that you would like to merge changes like develop
|
||||||
|
GITHUB_TOKEN: ${{ secrets.ACCESS_TOKEN }} # Personal access token required to modify contents and workflows
|
||||||
|
REVIEWER: ${{ secrets.SYNC_PR_REVIEWER }}
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
Check_Branch:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
outputs:
|
||||||
|
BRANCH_MATCH: ${{ steps.check-branch.outputs.MATCH }}
|
||||||
|
steps:
|
||||||
|
- name: Check if current branch matches the secret
|
||||||
|
id: check-branch
|
||||||
|
run: |
|
||||||
|
if [ "$CURRENT_BRANCH" = "$SOURCE_BRANCH" ]; then
|
||||||
|
echo "MATCH=true" >> $GITHUB_OUTPUT
|
||||||
|
else
|
||||||
|
echo "MATCH=false" >> $GITHUB_OUTPUT
|
||||||
|
fi
|
||||||
|
|
||||||
|
Auto_Merge:
|
||||||
|
if: ${{ needs.Check_Branch.outputs.BRANCH_MATCH == 'true' }}
|
||||||
|
needs: [Check_Branch]
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
permissions:
|
||||||
|
pull-requests: write
|
||||||
|
contents: write
|
||||||
|
steps:
|
||||||
|
- name: Checkout code
|
||||||
|
uses: actions/checkout@v4.1.1
|
||||||
|
with:
|
||||||
|
fetch-depth: 0 # Fetch all history for all branches and tags
|
||||||
|
|
||||||
|
- name: Setup Git
|
||||||
|
run: |
|
||||||
|
git config user.name "GitHub Actions"
|
||||||
|
git config user.email "actions@github.com"
|
||||||
|
|
||||||
|
- name: Setup GH CLI and Git Config
|
||||||
|
run: |
|
||||||
|
type -p curl >/dev/null || (sudo apt update && sudo apt install curl -y)
|
||||||
|
curl -fsSL https://cli.github.com/packages/githubcli-archive-keyring.gpg | sudo dd of=/usr/share/keyrings/githubcli-archive-keyring.gpg
|
||||||
|
sudo chmod go+r /usr/share/keyrings/githubcli-archive-keyring.gpg
|
||||||
|
echo "deb [arch=$(dpkg --print-architecture) signed-by=/usr/share/keyrings/githubcli-archive-keyring.gpg] https://cli.github.com/packages stable main" | sudo tee /etc/apt/sources.list.d/github-cli.list > /dev/null
|
||||||
|
sudo apt update
|
||||||
|
sudo apt install gh -y
|
||||||
|
|
||||||
|
- name: Check for merge conflicts
|
||||||
|
id: conflicts
|
||||||
|
run: |
|
||||||
|
git fetch origin $TARGET_BRANCH
|
||||||
|
git checkout $TARGET_BRANCH
|
||||||
|
# Attempt to merge the main branch into the current branch
|
||||||
|
if $(git merge --no-commit --no-ff $SOURCE_BRANCH); then
|
||||||
|
echo "No merge conflicts detected."
|
||||||
|
echo "HAS_CONFLICTS=false" >> $GITHUB_ENV
|
||||||
|
else
|
||||||
|
echo "Merge conflicts detected."
|
||||||
|
echo "HAS_CONFLICTS=true" >> $GITHUB_ENV
|
||||||
|
git merge --abort
|
||||||
|
fi
|
||||||
|
|
||||||
|
- name: Merge Change to Target Branch
|
||||||
|
if: env.HAS_CONFLICTS == 'false'
|
||||||
|
run: |
|
||||||
|
git commit -m "Merge branch '$SOURCE_BRANCH' into $TARGET_BRANCH"
|
||||||
|
git push origin $TARGET_BRANCH
|
||||||
|
|
||||||
|
- name: Create PR to Target Branch
|
||||||
|
if: env.HAS_CONFLICTS == 'true'
|
||||||
|
run: |
|
||||||
|
# Replace 'username' with the actual GitHub username of the reviewer.
|
||||||
|
PR_URL=$(gh pr create --base $TARGET_BRANCH --head $SOURCE_BRANCH --title "sync: merge conflicts need to be resolved" --body "" --reviewer $REVIEWER)
|
||||||
|
echo "Pull Request created: $PR_URL"
|
239
.github/workflows/build-branch.yml
vendored
239
.github/workflows/build-branch.yml
vendored
@ -1,98 +1,122 @@
|
|||||||
name: Branch Build
|
name: Branch Build
|
||||||
|
|
||||||
on:
|
on:
|
||||||
pull_request:
|
workflow_dispatch:
|
||||||
types:
|
push:
|
||||||
- closed
|
|
||||||
branches:
|
branches:
|
||||||
- master
|
- master
|
||||||
- preview
|
- preview
|
||||||
- qa
|
|
||||||
- develop
|
|
||||||
release:
|
release:
|
||||||
types: [released, prereleased]
|
types: [released, prereleased]
|
||||||
|
|
||||||
env:
|
env:
|
||||||
TARGET_BRANCH: ${{ github.event.pull_request.base.ref || github.event.release.target_commitish }}
|
TARGET_BRANCH: ${{ github.ref_name || github.event.release.target_commitish }}
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
branch_build_setup:
|
branch_build_setup:
|
||||||
if: ${{ (github.event_name == 'pull_request' && github.event.action =='closed' && github.event.pull_request.merged == true) || github.event_name == 'release' }}
|
|
||||||
name: Build-Push Web/Space/API/Proxy Docker Image
|
name: Build-Push Web/Space/API/Proxy Docker Image
|
||||||
runs-on: ubuntu-20.04
|
runs-on: ubuntu-latest
|
||||||
|
outputs:
|
||||||
|
gh_branch_name: ${{ steps.set_env_variables.outputs.TARGET_BRANCH }}
|
||||||
|
gh_buildx_driver: ${{ steps.set_env_variables.outputs.BUILDX_DRIVER }}
|
||||||
|
gh_buildx_version: ${{ steps.set_env_variables.outputs.BUILDX_VERSION }}
|
||||||
|
gh_buildx_platforms: ${{ steps.set_env_variables.outputs.BUILDX_PLATFORMS }}
|
||||||
|
gh_buildx_endpoint: ${{ steps.set_env_variables.outputs.BUILDX_ENDPOINT }}
|
||||||
|
build_frontend: ${{ steps.changed_files.outputs.frontend_any_changed }}
|
||||||
|
build_space: ${{ steps.changed_files.outputs.space_any_changed }}
|
||||||
|
build_backend: ${{ steps.changed_files.outputs.backend_any_changed }}
|
||||||
|
build_proxy: ${{ steps.changed_files.outputs.proxy_any_changed }}
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- name: Check out the repo
|
- id: set_env_variables
|
||||||
uses: actions/checkout@v3.3.0
|
name: Set Environment Variables
|
||||||
|
run: |
|
||||||
|
if [ "${{ env.TARGET_BRANCH }}" == "master" ] || [ "${{ github.event_name }}" == "release" ]; then
|
||||||
|
echo "BUILDX_DRIVER=cloud" >> $GITHUB_OUTPUT
|
||||||
|
echo "BUILDX_VERSION=lab:latest" >> $GITHUB_OUTPUT
|
||||||
|
echo "BUILDX_PLATFORMS=linux/amd64,linux/arm64" >> $GITHUB_OUTPUT
|
||||||
|
echo "BUILDX_ENDPOINT=makeplane/plane-dev" >> $GITHUB_OUTPUT
|
||||||
|
else
|
||||||
|
echo "BUILDX_DRIVER=docker-container" >> $GITHUB_OUTPUT
|
||||||
|
echo "BUILDX_VERSION=latest" >> $GITHUB_OUTPUT
|
||||||
|
echo "BUILDX_PLATFORMS=linux/amd64" >> $GITHUB_OUTPUT
|
||||||
|
echo "BUILDX_ENDPOINT=" >> $GITHUB_OUTPUT
|
||||||
|
fi
|
||||||
|
echo "TARGET_BRANCH=${{ env.TARGET_BRANCH }}" >> $GITHUB_OUTPUT
|
||||||
|
|
||||||
- name: Uploading Proxy Source
|
- id: checkout_files
|
||||||
uses: actions/upload-artifact@v3
|
name: Checkout Files
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
|
||||||
|
- name: Get changed files
|
||||||
|
id: changed_files
|
||||||
|
uses: tj-actions/changed-files@v42
|
||||||
with:
|
with:
|
||||||
name: proxy-src-code
|
files_yaml: |
|
||||||
path: ./nginx
|
frontend:
|
||||||
- name: Uploading Backend Source
|
- web/**
|
||||||
uses: actions/upload-artifact@v3
|
- packages/**
|
||||||
with:
|
- 'package.json'
|
||||||
name: backend-src-code
|
- 'yarn.lock'
|
||||||
path: ./apiserver
|
- 'tsconfig.json'
|
||||||
- name: Uploading Web Source
|
- 'turbo.json'
|
||||||
uses: actions/upload-artifact@v3
|
space:
|
||||||
with:
|
- space/**
|
||||||
name: web-src-code
|
- packages/**
|
||||||
path: |
|
- 'package.json'
|
||||||
./
|
- 'yarn.lock'
|
||||||
!./apiserver
|
- 'tsconfig.json'
|
||||||
!./nginx
|
- 'turbo.json'
|
||||||
!./deploy
|
backend:
|
||||||
!./space
|
- apiserver/**
|
||||||
- name: Uploading Space Source
|
proxy:
|
||||||
uses: actions/upload-artifact@v3
|
- nginx/**
|
||||||
with:
|
|
||||||
name: space-src-code
|
|
||||||
path: |
|
|
||||||
./
|
|
||||||
!./apiserver
|
|
||||||
!./nginx
|
|
||||||
!./deploy
|
|
||||||
!./web
|
|
||||||
outputs:
|
|
||||||
gh_branch_name: ${{ env.TARGET_BRANCH }}
|
|
||||||
|
|
||||||
branch_build_push_frontend:
|
branch_build_push_frontend:
|
||||||
|
if: ${{ needs.branch_build_setup.outputs.build_frontend == 'true' || github.event_name == 'workflow_dispatch' || github.event_name == 'release' || needs.branch_build_setup.outputs.gh_branch_name == 'master' }}
|
||||||
runs-on: ubuntu-20.04
|
runs-on: ubuntu-20.04
|
||||||
needs: [branch_build_setup]
|
needs: [branch_build_setup]
|
||||||
env:
|
env:
|
||||||
FRONTEND_TAG: ${{ secrets.DOCKERHUB_USERNAME }}/plane-frontend:${{ needs.branch_build_setup.outputs.gh_branch_name }}
|
FRONTEND_TAG: ${{ secrets.DOCKERHUB_USERNAME }}/plane-frontend:${{ needs.branch_build_setup.outputs.gh_branch_name }}
|
||||||
|
TARGET_BRANCH: ${{ needs.branch_build_setup.outputs.gh_branch_name }}
|
||||||
|
BUILDX_DRIVER: ${{ needs.branch_build_setup.outputs.gh_buildx_driver }}
|
||||||
|
BUILDX_VERSION: ${{ needs.branch_build_setup.outputs.gh_buildx_version }}
|
||||||
|
BUILDX_PLATFORMS: ${{ needs.branch_build_setup.outputs.gh_buildx_platforms }}
|
||||||
|
BUILDX_ENDPOINT: ${{ needs.branch_build_setup.outputs.gh_buildx_endpoint }}
|
||||||
steps:
|
steps:
|
||||||
- name: Set Frontend Docker Tag
|
- name: Set Frontend Docker Tag
|
||||||
run: |
|
run: |
|
||||||
if [ "${{ needs.branch_build_setup.outputs.gh_branch_name }}" == "master" ] && [ "${{ github.event_name }}" == "release" ]; then
|
if [ "${{ github.event_name }}" == "release" ]; then
|
||||||
TAG=${{ secrets.DOCKERHUB_USERNAME }}/plane-frontend:latest,${{ secrets.DOCKERHUB_USERNAME }}/plane-frontend:${{ github.event.release.tag_name }}
|
TAG=${{ secrets.DOCKERHUB_USERNAME }}/plane-frontend:stable,${{ secrets.DOCKERHUB_USERNAME }}/plane-frontend:${{ github.event.release.tag_name }}
|
||||||
elif [ "${{ needs.branch_build_setup.outputs.gh_branch_name }}" == "master" ]; then
|
elif [ "${{ env.TARGET_BRANCH }}" == "master" ]; then
|
||||||
TAG=${{ secrets.DOCKERHUB_USERNAME }}/plane-frontend:stable
|
TAG=${{ secrets.DOCKERHUB_USERNAME }}/plane-frontend:latest
|
||||||
else
|
else
|
||||||
TAG=${{ env.FRONTEND_TAG }}
|
TAG=${{ env.FRONTEND_TAG }}
|
||||||
fi
|
fi
|
||||||
echo "FRONTEND_TAG=${TAG}" >> $GITHUB_ENV
|
echo "FRONTEND_TAG=${TAG}" >> $GITHUB_ENV
|
||||||
- name: Set up Docker Buildx
|
|
||||||
uses: docker/setup-buildx-action@v2.5.0
|
|
||||||
|
|
||||||
- name: Login to Docker Hub
|
- name: Login to Docker Hub
|
||||||
uses: docker/login-action@v2.1.0
|
uses: docker/login-action@v3
|
||||||
with:
|
with:
|
||||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||||
- name: Downloading Web Source Code
|
|
||||||
uses: actions/download-artifact@v3
|
- name: Set up Docker Buildx
|
||||||
|
uses: docker/setup-buildx-action@v3
|
||||||
with:
|
with:
|
||||||
name: web-src-code
|
driver: ${{ env.BUILDX_DRIVER }}
|
||||||
|
version: ${{ env.BUILDX_VERSION }}
|
||||||
|
endpoint: ${{ env.BUILDX_ENDPOINT }}
|
||||||
|
|
||||||
|
- name: Check out the repo
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
|
||||||
- name: Build and Push Frontend to Docker Container Registry
|
- name: Build and Push Frontend to Docker Container Registry
|
||||||
uses: docker/build-push-action@v4.0.0
|
uses: docker/build-push-action@v5.1.0
|
||||||
with:
|
with:
|
||||||
context: .
|
context: .
|
||||||
file: ./web/Dockerfile.web
|
file: ./web/Dockerfile.web
|
||||||
platforms: linux/amd64
|
platforms: ${{ env.BUILDX_PLATFORMS }}
|
||||||
tags: ${{ env.FRONTEND_TAG }}
|
tags: ${{ env.FRONTEND_TAG }}
|
||||||
push: true
|
push: true
|
||||||
env:
|
env:
|
||||||
@ -101,40 +125,50 @@ jobs:
|
|||||||
DOCKER_PASSWORD: ${{ secrets.DOCKERHUB_TOKEN }}
|
DOCKER_PASSWORD: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||||
|
|
||||||
branch_build_push_space:
|
branch_build_push_space:
|
||||||
|
if: ${{ needs.branch_build_setup.outputs.build_space == 'true' || github.event_name == 'workflow_dispatch' || github.event_name == 'release' || needs.branch_build_setup.outputs.gh_branch_name == 'master' }}
|
||||||
runs-on: ubuntu-20.04
|
runs-on: ubuntu-20.04
|
||||||
needs: [branch_build_setup]
|
needs: [branch_build_setup]
|
||||||
env:
|
env:
|
||||||
SPACE_TAG: ${{ secrets.DOCKERHUB_USERNAME }}/plane-space:${{ needs.branch_build_setup.outputs.gh_branch_name }}
|
SPACE_TAG: ${{ secrets.DOCKERHUB_USERNAME }}/plane-space:${{ needs.branch_build_setup.outputs.gh_branch_name }}
|
||||||
|
TARGET_BRANCH: ${{ needs.branch_build_setup.outputs.gh_branch_name }}
|
||||||
|
BUILDX_DRIVER: ${{ needs.branch_build_setup.outputs.gh_buildx_driver }}
|
||||||
|
BUILDX_VERSION: ${{ needs.branch_build_setup.outputs.gh_buildx_version }}
|
||||||
|
BUILDX_PLATFORMS: ${{ needs.branch_build_setup.outputs.gh_buildx_platforms }}
|
||||||
|
BUILDX_ENDPOINT: ${{ needs.branch_build_setup.outputs.gh_buildx_endpoint }}
|
||||||
steps:
|
steps:
|
||||||
- name: Set Space Docker Tag
|
- name: Set Space Docker Tag
|
||||||
run: |
|
run: |
|
||||||
if [ "${{ needs.branch_build_setup.outputs.gh_branch_name }}" == "master" ] && [ "${{ github.event_name }}" == "release" ]; then
|
if [ "${{ github.event_name }}" == "release" ]; then
|
||||||
TAG=${{ secrets.DOCKERHUB_USERNAME }}/plane-space:latest,${{ secrets.DOCKERHUB_USERNAME }}/plane-space:${{ github.event.release.tag_name }}
|
TAG=${{ secrets.DOCKERHUB_USERNAME }}/plane-space:stable,${{ secrets.DOCKERHUB_USERNAME }}/plane-space:${{ github.event.release.tag_name }}
|
||||||
elif [ "${{ needs.branch_build_setup.outputs.gh_branch_name }}" == "master" ]; then
|
elif [ "${{ env.TARGET_BRANCH }}" == "master" ]; then
|
||||||
TAG=${{ secrets.DOCKERHUB_USERNAME }}/plane-space:stable
|
TAG=${{ secrets.DOCKERHUB_USERNAME }}/plane-space:latest
|
||||||
else
|
else
|
||||||
TAG=${{ env.SPACE_TAG }}
|
TAG=${{ env.SPACE_TAG }}
|
||||||
fi
|
fi
|
||||||
echo "SPACE_TAG=${TAG}" >> $GITHUB_ENV
|
echo "SPACE_TAG=${TAG}" >> $GITHUB_ENV
|
||||||
- name: Set up Docker Buildx
|
|
||||||
uses: docker/setup-buildx-action@v2.5.0
|
|
||||||
|
|
||||||
- name: Login to Docker Hub
|
- name: Login to Docker Hub
|
||||||
uses: docker/login-action@v2.1.0
|
uses: docker/login-action@v3
|
||||||
with:
|
with:
|
||||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||||
- name: Downloading Space Source Code
|
|
||||||
uses: actions/download-artifact@v3
|
- name: Set up Docker Buildx
|
||||||
|
uses: docker/setup-buildx-action@v3
|
||||||
with:
|
with:
|
||||||
name: space-src-code
|
driver: ${{ env.BUILDX_DRIVER }}
|
||||||
|
version: ${{ env.BUILDX_VERSION }}
|
||||||
|
endpoint: ${{ env.BUILDX_ENDPOINT }}
|
||||||
|
|
||||||
|
- name: Check out the repo
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
|
||||||
- name: Build and Push Space to Docker Hub
|
- name: Build and Push Space to Docker Hub
|
||||||
uses: docker/build-push-action@v4.0.0
|
uses: docker/build-push-action@v5.1.0
|
||||||
with:
|
with:
|
||||||
context: .
|
context: .
|
||||||
file: ./space/Dockerfile.space
|
file: ./space/Dockerfile.space
|
||||||
platforms: linux/amd64
|
platforms: ${{ env.BUILDX_PLATFORMS }}
|
||||||
tags: ${{ env.SPACE_TAG }}
|
tags: ${{ env.SPACE_TAG }}
|
||||||
push: true
|
push: true
|
||||||
env:
|
env:
|
||||||
@ -143,40 +177,50 @@ jobs:
|
|||||||
DOCKER_PASSWORD: ${{ secrets.DOCKERHUB_TOKEN }}
|
DOCKER_PASSWORD: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||||
|
|
||||||
branch_build_push_backend:
|
branch_build_push_backend:
|
||||||
|
if: ${{ needs.branch_build_setup.outputs.build_backend == 'true' || github.event_name == 'workflow_dispatch' || github.event_name == 'release' || needs.branch_build_setup.outputs.gh_branch_name == 'master' }}
|
||||||
runs-on: ubuntu-20.04
|
runs-on: ubuntu-20.04
|
||||||
needs: [branch_build_setup]
|
needs: [branch_build_setup]
|
||||||
env:
|
env:
|
||||||
BACKEND_TAG: ${{ secrets.DOCKERHUB_USERNAME }}/plane-backend:${{ needs.branch_build_setup.outputs.gh_branch_name }}
|
BACKEND_TAG: ${{ secrets.DOCKERHUB_USERNAME }}/plane-backend:${{ needs.branch_build_setup.outputs.gh_branch_name }}
|
||||||
|
TARGET_BRANCH: ${{ needs.branch_build_setup.outputs.gh_branch_name }}
|
||||||
|
BUILDX_DRIVER: ${{ needs.branch_build_setup.outputs.gh_buildx_driver }}
|
||||||
|
BUILDX_VERSION: ${{ needs.branch_build_setup.outputs.gh_buildx_version }}
|
||||||
|
BUILDX_PLATFORMS: ${{ needs.branch_build_setup.outputs.gh_buildx_platforms }}
|
||||||
|
BUILDX_ENDPOINT: ${{ needs.branch_build_setup.outputs.gh_buildx_endpoint }}
|
||||||
steps:
|
steps:
|
||||||
- name: Set Backend Docker Tag
|
- name: Set Backend Docker Tag
|
||||||
run: |
|
run: |
|
||||||
if [ "${{ needs.branch_build_setup.outputs.gh_branch_name }}" == "master" ] && [ "${{ github.event_name }}" == "release" ]; then
|
if [ "${{ github.event_name }}" == "release" ]; then
|
||||||
TAG=${{ secrets.DOCKERHUB_USERNAME }}/plane-backend:latest,${{ secrets.DOCKERHUB_USERNAME }}/plane-backend:${{ github.event.release.tag_name }}
|
TAG=${{ secrets.DOCKERHUB_USERNAME }}/plane-backend:stable,${{ secrets.DOCKERHUB_USERNAME }}/plane-backend:${{ github.event.release.tag_name }}
|
||||||
elif [ "${{ needs.branch_build_setup.outputs.gh_branch_name }}" == "master" ]; then
|
elif [ "${{ env.TARGET_BRANCH }}" == "master" ]; then
|
||||||
TAG=${{ secrets.DOCKERHUB_USERNAME }}/plane-backend:stable
|
TAG=${{ secrets.DOCKERHUB_USERNAME }}/plane-backend:latest
|
||||||
else
|
else
|
||||||
TAG=${{ env.BACKEND_TAG }}
|
TAG=${{ env.BACKEND_TAG }}
|
||||||
fi
|
fi
|
||||||
echo "BACKEND_TAG=${TAG}" >> $GITHUB_ENV
|
echo "BACKEND_TAG=${TAG}" >> $GITHUB_ENV
|
||||||
- name: Set up Docker Buildx
|
|
||||||
uses: docker/setup-buildx-action@v2.5.0
|
|
||||||
|
|
||||||
- name: Login to Docker Hub
|
- name: Login to Docker Hub
|
||||||
uses: docker/login-action@v2.1.0
|
uses: docker/login-action@v3
|
||||||
with:
|
with:
|
||||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||||
- name: Downloading Backend Source Code
|
|
||||||
uses: actions/download-artifact@v3
|
- name: Set up Docker Buildx
|
||||||
|
uses: docker/setup-buildx-action@v3
|
||||||
with:
|
with:
|
||||||
name: backend-src-code
|
driver: ${{ env.BUILDX_DRIVER }}
|
||||||
|
version: ${{ env.BUILDX_VERSION }}
|
||||||
|
endpoint: ${{ env.BUILDX_ENDPOINT }}
|
||||||
|
|
||||||
|
- name: Check out the repo
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
|
||||||
- name: Build and Push Backend to Docker Hub
|
- name: Build and Push Backend to Docker Hub
|
||||||
uses: docker/build-push-action@v4.0.0
|
uses: docker/build-push-action@v5.1.0
|
||||||
with:
|
with:
|
||||||
context: .
|
context: ./apiserver
|
||||||
file: ./Dockerfile.api
|
file: ./apiserver/Dockerfile.api
|
||||||
platforms: linux/amd64
|
platforms: ${{ env.BUILDX_PLATFORMS }}
|
||||||
push: true
|
push: true
|
||||||
tags: ${{ env.BACKEND_TAG }}
|
tags: ${{ env.BACKEND_TAG }}
|
||||||
env:
|
env:
|
||||||
@ -185,41 +229,50 @@ jobs:
|
|||||||
DOCKER_PASSWORD: ${{ secrets.DOCKERHUB_TOKEN }}
|
DOCKER_PASSWORD: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||||
|
|
||||||
branch_build_push_proxy:
|
branch_build_push_proxy:
|
||||||
|
if: ${{ needs.branch_build_setup.outputs.build_proxy == 'true' || github.event_name == 'workflow_dispatch' || github.event_name == 'release' || needs.branch_build_setup.outputs.gh_branch_name == 'master' }}
|
||||||
runs-on: ubuntu-20.04
|
runs-on: ubuntu-20.04
|
||||||
needs: [branch_build_setup]
|
needs: [branch_build_setup]
|
||||||
env:
|
env:
|
||||||
PROXY_TAG: ${{ secrets.DOCKERHUB_USERNAME }}/plane-proxy:${{ needs.branch_build_setup.outputs.gh_branch_name }}
|
PROXY_TAG: ${{ secrets.DOCKERHUB_USERNAME }}/plane-proxy:${{ needs.branch_build_setup.outputs.gh_branch_name }}
|
||||||
|
TARGET_BRANCH: ${{ needs.branch_build_setup.outputs.gh_branch_name }}
|
||||||
|
BUILDX_DRIVER: ${{ needs.branch_build_setup.outputs.gh_buildx_driver }}
|
||||||
|
BUILDX_VERSION: ${{ needs.branch_build_setup.outputs.gh_buildx_version }}
|
||||||
|
BUILDX_PLATFORMS: ${{ needs.branch_build_setup.outputs.gh_buildx_platforms }}
|
||||||
|
BUILDX_ENDPOINT: ${{ needs.branch_build_setup.outputs.gh_buildx_endpoint }}
|
||||||
steps:
|
steps:
|
||||||
- name: Set Proxy Docker Tag
|
- name: Set Proxy Docker Tag
|
||||||
run: |
|
run: |
|
||||||
if [ "${{ needs.branch_build_setup.outputs.gh_branch_name }}" == "master" ] && [ "${{ github.event_name }}" == "release" ]; then
|
if [ "${{ github.event_name }}" == "release" ]; then
|
||||||
TAG=${{ secrets.DOCKERHUB_USERNAME }}/plane-proxy:latest,${{ secrets.DOCKERHUB_USERNAME }}/plane-proxy:${{ github.event.release.tag_name }}
|
TAG=${{ secrets.DOCKERHUB_USERNAME }}/plane-proxy:stable,${{ secrets.DOCKERHUB_USERNAME }}/plane-proxy:${{ github.event.release.tag_name }}
|
||||||
elif [ "${{ needs.branch_build_setup.outputs.gh_branch_name }}" == "master" ]; then
|
elif [ "${{ env.TARGET_BRANCH }}" == "master" ]; then
|
||||||
TAG=${{ secrets.DOCKERHUB_USERNAME }}/plane-proxy:stable
|
TAG=${{ secrets.DOCKERHUB_USERNAME }}/plane-proxy:latest
|
||||||
else
|
else
|
||||||
TAG=${{ env.PROXY_TAG }}
|
TAG=${{ env.PROXY_TAG }}
|
||||||
fi
|
fi
|
||||||
echo "PROXY_TAG=${TAG}" >> $GITHUB_ENV
|
echo "PROXY_TAG=${TAG}" >> $GITHUB_ENV
|
||||||
- name: Set up Docker Buildx
|
|
||||||
uses: docker/setup-buildx-action@v2.5.0
|
|
||||||
|
|
||||||
- name: Login to Docker Hub
|
- name: Login to Docker Hub
|
||||||
uses: docker/login-action@v2.1.0
|
uses: docker/login-action@v3
|
||||||
with:
|
with:
|
||||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||||
|
|
||||||
- name: Downloading Proxy Source Code
|
- name: Set up Docker Buildx
|
||||||
uses: actions/download-artifact@v3
|
uses: docker/setup-buildx-action@v3
|
||||||
with:
|
with:
|
||||||
name: proxy-src-code
|
driver: ${{ env.BUILDX_DRIVER }}
|
||||||
|
version: ${{ env.BUILDX_VERSION }}
|
||||||
|
endpoint: ${{ env.BUILDX_ENDPOINT }}
|
||||||
|
|
||||||
|
- name: Check out the repo
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
|
||||||
- name: Build and Push Plane-Proxy to Docker Hub
|
- name: Build and Push Plane-Proxy to Docker Hub
|
||||||
uses: docker/build-push-action@v4.0.0
|
uses: docker/build-push-action@v5.1.0
|
||||||
with:
|
with:
|
||||||
context: .
|
context: ./nginx
|
||||||
file: ./Dockerfile
|
file: ./nginx/Dockerfile
|
||||||
platforms: linux/amd64
|
platforms: ${{ env.BUILDX_PLATFORMS }}
|
||||||
tags: ${{ env.PROXY_TAG }}
|
tags: ${{ env.PROXY_TAG }}
|
||||||
push: true
|
push: true
|
||||||
env:
|
env:
|
||||||
|
110
.github/workflows/build-test-pull-request.yml
vendored
110
.github/workflows/build-test-pull-request.yml
vendored
@ -1,48 +1,104 @@
|
|||||||
name: Build Pull Request Contents
|
name: Build and Lint on Pull Request
|
||||||
|
|
||||||
on:
|
on:
|
||||||
|
workflow_dispatch:
|
||||||
pull_request:
|
pull_request:
|
||||||
types: ["opened", "synchronize"]
|
types: ["opened", "synchronize"]
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
build-pull-request-contents:
|
get-changed-files:
|
||||||
name: Build Pull Request Contents
|
runs-on: ubuntu-latest
|
||||||
runs-on: ubuntu-20.04
|
outputs:
|
||||||
permissions:
|
apiserver_changed: ${{ steps.changed-files.outputs.apiserver_any_changed }}
|
||||||
pull-requests: read
|
web_changed: ${{ steps.changed-files.outputs.web_any_changed }}
|
||||||
|
space_changed: ${{ steps.changed-files.outputs.deploy_any_changed }}
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout Repository to Actions
|
- uses: actions/checkout@v3
|
||||||
uses: actions/checkout@v3.3.0
|
|
||||||
|
|
||||||
- name: Setup Node.js 18.x
|
|
||||||
uses: actions/setup-node@v2
|
|
||||||
with:
|
|
||||||
node-version: 18.x
|
|
||||||
cache: 'yarn'
|
|
||||||
|
|
||||||
- name: Get changed files
|
- name: Get changed files
|
||||||
id: changed-files
|
id: changed-files
|
||||||
uses: tj-actions/changed-files@v38
|
uses: tj-actions/changed-files@v41
|
||||||
with:
|
with:
|
||||||
files_yaml: |
|
files_yaml: |
|
||||||
apiserver:
|
apiserver:
|
||||||
- apiserver/**
|
- apiserver/**
|
||||||
web:
|
web:
|
||||||
- web/**
|
- web/**
|
||||||
|
- packages/**
|
||||||
|
- 'package.json'
|
||||||
|
- 'yarn.lock'
|
||||||
|
- 'tsconfig.json'
|
||||||
|
- 'turbo.json'
|
||||||
deploy:
|
deploy:
|
||||||
- space/**
|
- space/**
|
||||||
|
- packages/**
|
||||||
|
- 'package.json'
|
||||||
|
- 'yarn.lock'
|
||||||
|
- 'tsconfig.json'
|
||||||
|
- 'turbo.json'
|
||||||
|
|
||||||
- name: Build Plane's Main App
|
lint-apiserver:
|
||||||
if: steps.changed-files.outputs.web_any_changed == 'true'
|
needs: get-changed-files
|
||||||
run: |
|
runs-on: ubuntu-latest
|
||||||
yarn
|
if: needs.get-changed-files.outputs.apiserver_changed == 'true'
|
||||||
yarn build --filter=web
|
steps:
|
||||||
|
- uses: actions/checkout@v3
|
||||||
|
- name: Set up Python
|
||||||
|
uses: actions/setup-python@v4
|
||||||
|
with:
|
||||||
|
python-version: '3.x' # Specify the Python version you need
|
||||||
|
- name: Install Pylint
|
||||||
|
run: python -m pip install ruff
|
||||||
|
- name: Install Apiserver Dependencies
|
||||||
|
run: cd apiserver && pip install -r requirements.txt
|
||||||
|
- name: Lint apiserver
|
||||||
|
run: ruff check --fix apiserver
|
||||||
|
|
||||||
- name: Build Plane's Deploy App
|
lint-web:
|
||||||
if: steps.changed-files.outputs.deploy_any_changed == 'true'
|
needs: get-changed-files
|
||||||
run: |
|
if: needs.get-changed-files.outputs.web_changed == 'true'
|
||||||
yarn
|
runs-on: ubuntu-latest
|
||||||
yarn build --filter=space
|
steps:
|
||||||
|
- uses: actions/checkout@v3
|
||||||
|
- name: Setup Node.js
|
||||||
|
uses: actions/setup-node@v2
|
||||||
|
with:
|
||||||
|
node-version: 18.x
|
||||||
|
- run: yarn install
|
||||||
|
- run: yarn lint --filter=web
|
||||||
|
|
||||||
|
lint-space:
|
||||||
|
needs: get-changed-files
|
||||||
|
if: needs.get-changed-files.outputs.space_changed == 'true'
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v3
|
||||||
|
- name: Setup Node.js
|
||||||
|
uses: actions/setup-node@v2
|
||||||
|
with:
|
||||||
|
node-version: 18.x
|
||||||
|
- run: yarn install
|
||||||
|
- run: yarn lint --filter=space
|
||||||
|
|
||||||
|
build-web:
|
||||||
|
needs: lint-web
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v3
|
||||||
|
- name: Setup Node.js
|
||||||
|
uses: actions/setup-node@v2
|
||||||
|
with:
|
||||||
|
node-version: 18.x
|
||||||
|
- run: yarn install
|
||||||
|
- run: yarn build --filter=web
|
||||||
|
|
||||||
|
build-space:
|
||||||
|
needs: lint-space
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v3
|
||||||
|
- name: Setup Node.js
|
||||||
|
uses: actions/setup-node@v2
|
||||||
|
with:
|
||||||
|
node-version: 18.x
|
||||||
|
- run: yarn install
|
||||||
|
- run: yarn build --filter=space
|
||||||
|
45
.github/workflows/check-version.yml
vendored
Normal file
45
.github/workflows/check-version.yml
vendored
Normal file
@ -0,0 +1,45 @@
|
|||||||
|
name: Version Change Before Release
|
||||||
|
|
||||||
|
on:
|
||||||
|
pull_request:
|
||||||
|
branches:
|
||||||
|
- master
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
check-version:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- name: Checkout repository
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
with:
|
||||||
|
ref: ${{ github.head_ref }}
|
||||||
|
fetch-depth: 0
|
||||||
|
|
||||||
|
- name: Setup Node.js
|
||||||
|
uses: actions/setup-node@v4
|
||||||
|
with:
|
||||||
|
node-version: '18'
|
||||||
|
|
||||||
|
- name: Get PR Branch version
|
||||||
|
run: echo "PR_VERSION=$(node -p "require('./package.json').version")" >> $GITHUB_ENV
|
||||||
|
|
||||||
|
- name: Fetch base branch
|
||||||
|
run: git fetch origin master:master
|
||||||
|
|
||||||
|
- name: Get Master Branch version
|
||||||
|
run: |
|
||||||
|
git checkout master
|
||||||
|
echo "MASTER_VERSION=$(node -p "require('./package.json').version")" >> $GITHUB_ENV
|
||||||
|
|
||||||
|
- name: Get master branch version and compare
|
||||||
|
run: |
|
||||||
|
echo "Comparing versions: PR version is $PR_VERSION, Master version is $MASTER_VERSION"
|
||||||
|
if [ "$PR_VERSION" == "$MASTER_VERSION" ]; then
|
||||||
|
echo "Version in PR branch is the same as in master. Failing the CI."
|
||||||
|
exit 1
|
||||||
|
else
|
||||||
|
echo "Version check passed. Versions are different."
|
||||||
|
fi
|
||||||
|
env:
|
||||||
|
PR_VERSION: ${{ env.PR_VERSION }}
|
||||||
|
MASTER_VERSION: ${{ env.MASTER_VERSION }}
|
65
.github/workflows/codeql.yml
vendored
65
.github/workflows/codeql.yml
vendored
@ -1,13 +1,13 @@
|
|||||||
name: "CodeQL"
|
name: "CodeQL"
|
||||||
|
|
||||||
on:
|
on:
|
||||||
|
workflow_dispatch:
|
||||||
push:
|
push:
|
||||||
branches: [ 'develop', 'hot-fix', 'stage-release' ]
|
branches: ["develop", "preview", "master"]
|
||||||
pull_request:
|
pull_request:
|
||||||
# The branches below must be a subset of the branches above
|
branches: ["develop", "preview", "master"]
|
||||||
branches: [ 'develop' ]
|
|
||||||
schedule:
|
schedule:
|
||||||
- cron: '53 19 * * 5'
|
- cron: "53 19 * * 5"
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
analyze:
|
analyze:
|
||||||
@ -21,45 +21,44 @@ jobs:
|
|||||||
strategy:
|
strategy:
|
||||||
fail-fast: false
|
fail-fast: false
|
||||||
matrix:
|
matrix:
|
||||||
language: [ 'python', 'javascript' ]
|
language: ["python", "javascript"]
|
||||||
# CodeQL supports [ 'cpp', 'csharp', 'go', 'java', 'javascript', 'python', 'ruby' ]
|
# CodeQL supports [ 'cpp', 'csharp', 'go', 'java', 'javascript', 'python', 'ruby' ]
|
||||||
# Use only 'java' to analyze code written in Java, Kotlin or both
|
# Use only 'java' to analyze code written in Java, Kotlin or both
|
||||||
# Use only 'javascript' to analyze code written in JavaScript, TypeScript or both
|
# Use only 'javascript' to analyze code written in JavaScript, TypeScript or both
|
||||||
# Learn more about CodeQL language support at https://aka.ms/codeql-docs/language-support
|
# Learn more about CodeQL language support at https://aka.ms/codeql-docs/language-support
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout repository
|
- name: Checkout repository
|
||||||
uses: actions/checkout@v3
|
uses: actions/checkout@v3
|
||||||
|
|
||||||
# Initializes the CodeQL tools for scanning.
|
# Initializes the CodeQL tools for scanning.
|
||||||
- name: Initialize CodeQL
|
- name: Initialize CodeQL
|
||||||
uses: github/codeql-action/init@v2
|
uses: github/codeql-action/init@v2
|
||||||
with:
|
with:
|
||||||
languages: ${{ matrix.language }}
|
languages: ${{ matrix.language }}
|
||||||
# If you wish to specify custom queries, you can do so here or in a config file.
|
# If you wish to specify custom queries, you can do so here or in a config file.
|
||||||
# By default, queries listed here will override any specified in a config file.
|
# By default, queries listed here will override any specified in a config file.
|
||||||
# Prefix the list here with "+" to use these queries and those in the config file.
|
# Prefix the list here with "+" to use these queries and those in the config file.
|
||||||
|
|
||||||
# For more details on CodeQL's query packs, refer to: https://docs.github.com/en/code-security/code-scanning/automatically-scanning-your-code-for-vulnerabilities-and-errors/configuring-code-scanning#using-queries-in-ql-packs
|
# For more details on CodeQL's query packs, refer to: https://docs.github.com/en/code-security/code-scanning/automatically-scanning-your-code-for-vulnerabilities-and-errors/configuring-code-scanning#using-queries-in-ql-packs
|
||||||
# queries: security-extended,security-and-quality
|
# queries: security-extended,security-and-quality
|
||||||
|
|
||||||
|
# Autobuild attempts to build any compiled languages (C/C++, C#, Go, Java, or Swift).
|
||||||
|
# If this step fails, then you should remove it and run the build manually (see below)
|
||||||
|
- name: Autobuild
|
||||||
|
uses: github/codeql-action/autobuild@v2
|
||||||
|
|
||||||
# Autobuild attempts to build any compiled languages (C/C++, C#, Go, Java, or Swift).
|
# ℹ️ Command-line programs to run using the OS shell.
|
||||||
# If this step fails, then you should remove it and run the build manually (see below)
|
# 📚 See https://docs.github.com/en/actions/using-workflows/workflow-syntax-for-github-actions#jobsjob_idstepsrun
|
||||||
- name: Autobuild
|
|
||||||
uses: github/codeql-action/autobuild@v2
|
|
||||||
|
|
||||||
# ℹ️ Command-line programs to run using the OS shell.
|
# If the Autobuild fails above, remove it and uncomment the following three lines.
|
||||||
# 📚 See https://docs.github.com/en/actions/using-workflows/workflow-syntax-for-github-actions#jobsjob_idstepsrun
|
# modify them (or add more) to build your code if your project, please refer to the EXAMPLE below for guidance.
|
||||||
|
|
||||||
# If the Autobuild fails above, remove it and uncomment the following three lines.
|
# - run: |
|
||||||
# modify them (or add more) to build your code if your project, please refer to the EXAMPLE below for guidance.
|
# echo "Run, Build Application using script"
|
||||||
|
# ./location_of_script_within_repo/buildscript.sh
|
||||||
|
|
||||||
# - run: |
|
- name: Perform CodeQL Analysis
|
||||||
# echo "Run, Build Application using script"
|
uses: github/codeql-action/analyze@v2
|
||||||
# ./location_of_script_within_repo/buildscript.sh
|
with:
|
||||||
|
category: "/language:${{matrix.language}}"
|
||||||
- name: Perform CodeQL Analysis
|
|
||||||
uses: github/codeql-action/analyze@v2
|
|
||||||
with:
|
|
||||||
category: "/language:${{matrix.language}}"
|
|
||||||
|
70
.github/workflows/create-sync-pr.yml
vendored
70
.github/workflows/create-sync-pr.yml
vendored
@ -1,42 +1,28 @@
|
|||||||
name: Create PR in Plane EE Repository to sync the changes
|
name: Create Sync Action
|
||||||
|
|
||||||
on:
|
on:
|
||||||
pull_request:
|
workflow_dispatch:
|
||||||
|
push:
|
||||||
branches:
|
branches:
|
||||||
- master
|
- preview
|
||||||
types:
|
|
||||||
- closed
|
env:
|
||||||
|
SOURCE_BRANCH_NAME: ${{ github.ref_name }}
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
create_pr:
|
sync_changes:
|
||||||
# Only run the job when a PR is merged
|
|
||||||
if: github.event.pull_request.merged == true
|
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
permissions:
|
permissions:
|
||||||
pull-requests: write
|
pull-requests: write
|
||||||
contents: read
|
contents: read
|
||||||
steps:
|
steps:
|
||||||
- name: Check SOURCE_REPO
|
|
||||||
id: check_repo
|
|
||||||
env:
|
|
||||||
SOURCE_REPO: ${{ secrets.SOURCE_REPO_NAME }}
|
|
||||||
run: |
|
|
||||||
echo "::set-output name=is_correct_repo::$(if [[ "$SOURCE_REPO" == "makeplane/plane" ]]; then echo 'true'; else echo 'false'; fi)"
|
|
||||||
|
|
||||||
- name: Checkout Code
|
- name: Checkout Code
|
||||||
if: steps.check_repo.outputs.is_correct_repo == 'true'
|
uses: actions/checkout@v4.1.1
|
||||||
uses: actions/checkout@v2
|
|
||||||
with:
|
with:
|
||||||
persist-credentials: false
|
persist-credentials: false
|
||||||
fetch-depth: 0
|
fetch-depth: 0
|
||||||
|
|
||||||
- name: Set up Branch Name
|
|
||||||
if: steps.check_repo.outputs.is_correct_repo == 'true'
|
|
||||||
run: |
|
|
||||||
echo "SOURCE_BRANCH_NAME=${{ github.head_ref }}" >> $GITHUB_ENV
|
|
||||||
|
|
||||||
- name: Setup GH CLI
|
- name: Setup GH CLI
|
||||||
if: steps.check_repo.outputs.is_correct_repo == 'true'
|
|
||||||
run: |
|
run: |
|
||||||
type -p curl >/dev/null || (sudo apt update && sudo apt install curl -y)
|
type -p curl >/dev/null || (sudo apt update && sudo apt install curl -y)
|
||||||
curl -fsSL https://cli.github.com/packages/githubcli-archive-keyring.gpg | sudo dd of=/usr/share/keyrings/githubcli-archive-keyring.gpg
|
curl -fsSL https://cli.github.com/packages/githubcli-archive-keyring.gpg | sudo dd of=/usr/share/keyrings/githubcli-archive-keyring.gpg
|
||||||
@ -45,35 +31,25 @@ jobs:
|
|||||||
sudo apt update
|
sudo apt update
|
||||||
sudo apt install gh -y
|
sudo apt install gh -y
|
||||||
|
|
||||||
- name: Create Pull Request
|
- name: Push Changes to Target Repo A
|
||||||
if: steps.check_repo.outputs.is_correct_repo == 'true'
|
|
||||||
env:
|
env:
|
||||||
GH_TOKEN: ${{ secrets.ACCESS_TOKEN }}
|
GH_TOKEN: ${{ secrets.ACCESS_TOKEN }}
|
||||||
run: |
|
run: |
|
||||||
TARGET_REPO="${{ secrets.TARGET_REPO_NAME }}"
|
TARGET_REPO="${{ secrets.TARGET_REPO_A }}"
|
||||||
TARGET_BRANCH="${{ secrets.TARGET_REPO_BRANCH }}"
|
TARGET_BRANCH="${{ secrets.TARGET_REPO_A_BRANCH_NAME }}"
|
||||||
SOURCE_BRANCH="${{ env.SOURCE_BRANCH_NAME }}"
|
SOURCE_BRANCH="${{ env.SOURCE_BRANCH_NAME }}"
|
||||||
|
|
||||||
git checkout $SOURCE_BRANCH
|
git checkout $SOURCE_BRANCH
|
||||||
git remote add target "https://$GH_TOKEN@github.com/$TARGET_REPO.git"
|
git remote add target-origin-a "https://$GH_TOKEN@github.com/$TARGET_REPO.git"
|
||||||
git push target $SOURCE_BRANCH:$SOURCE_BRANCH
|
git push target-origin-a $SOURCE_BRANCH:$TARGET_BRANCH
|
||||||
|
|
||||||
PR_TITLE="${{ github.event.pull_request.title }}"
|
- name: Push Changes to Target Repo B
|
||||||
PR_BODY="${{ github.event.pull_request.body }}"
|
env:
|
||||||
|
GH_TOKEN: ${{ secrets.ACCESS_TOKEN }}
|
||||||
|
run: |
|
||||||
|
TARGET_REPO="${{ secrets.TARGET_REPO_B }}"
|
||||||
|
TARGET_BRANCH="${{ secrets.TARGET_REPO_B_BRANCH_NAME }}"
|
||||||
|
SOURCE_BRANCH="${{ env.SOURCE_BRANCH_NAME }}"
|
||||||
|
|
||||||
# Remove double quotes
|
git remote add target-origin-b "https://$GH_TOKEN@github.com/$TARGET_REPO.git"
|
||||||
PR_TITLE_CLEANED="${PR_TITLE//\"/}"
|
git push target-origin-b $SOURCE_BRANCH:$TARGET_BRANCH
|
||||||
PR_BODY_CLEANED="${PR_BODY//\"/}"
|
|
||||||
|
|
||||||
# Construct PR_BODY_CONTENT using a here-document
|
|
||||||
PR_BODY_CONTENT=$(cat <<EOF
|
|
||||||
$PR_BODY_CLEANED
|
|
||||||
EOF
|
|
||||||
)
|
|
||||||
|
|
||||||
gh pr create \
|
|
||||||
--base $TARGET_BRANCH \
|
|
||||||
--head $SOURCE_BRANCH \
|
|
||||||
--title "[SYNC] $PR_TITLE_CLEANED" \
|
|
||||||
--body "$PR_BODY_CONTENT" \
|
|
||||||
--repo $TARGET_REPO
|
|
||||||
|
199
.github/workflows/feature-deployment.yml
vendored
Normal file
199
.github/workflows/feature-deployment.yml
vendored
Normal file
@ -0,0 +1,199 @@
|
|||||||
|
name: Feature Preview
|
||||||
|
|
||||||
|
on:
|
||||||
|
workflow_dispatch:
|
||||||
|
inputs:
|
||||||
|
web-build:
|
||||||
|
required: false
|
||||||
|
description: 'Build Web'
|
||||||
|
type: boolean
|
||||||
|
default: true
|
||||||
|
space-build:
|
||||||
|
required: false
|
||||||
|
description: 'Build Space'
|
||||||
|
type: boolean
|
||||||
|
default: false
|
||||||
|
|
||||||
|
env:
|
||||||
|
BUILD_WEB: ${{ github.event.inputs.web-build }}
|
||||||
|
BUILD_SPACE: ${{ github.event.inputs.space-build }}
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
setup-feature-build:
|
||||||
|
name: Feature Build Setup
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- name: Checkout
|
||||||
|
run: |
|
||||||
|
echo "BUILD_WEB=$BUILD_WEB"
|
||||||
|
echo "BUILD_SPACE=$BUILD_SPACE"
|
||||||
|
outputs:
|
||||||
|
web-build: ${{ env.BUILD_WEB}}
|
||||||
|
space-build: ${{env.BUILD_SPACE}}
|
||||||
|
|
||||||
|
feature-build-web:
|
||||||
|
if: ${{ needs.setup-feature-build.outputs.web-build == 'true' }}
|
||||||
|
needs: setup-feature-build
|
||||||
|
name: Feature Build Web
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
env:
|
||||||
|
AWS_ACCESS_KEY_ID: ${{ vars.FEATURE_PREVIEW_AWS_ACCESS_KEY_ID }}
|
||||||
|
AWS_SECRET_ACCESS_KEY: ${{ secrets.FEATURE_PREVIEW_AWS_SECRET_ACCESS_KEY }}
|
||||||
|
AWS_BUCKET: ${{ vars.FEATURE_PREVIEW_AWS_BUCKET }}
|
||||||
|
NEXT_PUBLIC_API_BASE_URL: ${{ vars.FEATURE_PREVIEW_NEXT_PUBLIC_API_BASE_URL }}
|
||||||
|
steps:
|
||||||
|
- name: Set up Node.js
|
||||||
|
uses: actions/setup-node@v4
|
||||||
|
with:
|
||||||
|
node-version: '18'
|
||||||
|
- name: Install AWS cli
|
||||||
|
run: |
|
||||||
|
sudo apt-get update
|
||||||
|
sudo apt-get install -y python3-pip
|
||||||
|
pip3 install awscli
|
||||||
|
- name: Checkout
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
with:
|
||||||
|
path: plane
|
||||||
|
- name: Install Dependencies
|
||||||
|
run: |
|
||||||
|
cd $GITHUB_WORKSPACE/plane
|
||||||
|
yarn install
|
||||||
|
- name: Build Web
|
||||||
|
id: build-web
|
||||||
|
run: |
|
||||||
|
cd $GITHUB_WORKSPACE/plane
|
||||||
|
yarn build --filter=web
|
||||||
|
cd $GITHUB_WORKSPACE
|
||||||
|
|
||||||
|
TAR_NAME="web.tar.gz"
|
||||||
|
tar -czf $TAR_NAME ./plane
|
||||||
|
|
||||||
|
FILE_EXPIRY=$(date -u -d "+2 days" +"%Y-%m-%dT%H:%M:%SZ")
|
||||||
|
aws s3 cp $TAR_NAME s3://${{ env.AWS_BUCKET }}/${{github.sha}}/$TAR_NAME --expires $FILE_EXPIRY
|
||||||
|
|
||||||
|
feature-build-space:
|
||||||
|
if: ${{ needs.setup-feature-build.outputs.space-build == 'true' }}
|
||||||
|
needs: setup-feature-build
|
||||||
|
name: Feature Build Space
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
env:
|
||||||
|
AWS_ACCESS_KEY_ID: ${{ vars.FEATURE_PREVIEW_AWS_ACCESS_KEY_ID }}
|
||||||
|
AWS_SECRET_ACCESS_KEY: ${{ secrets.FEATURE_PREVIEW_AWS_SECRET_ACCESS_KEY }}
|
||||||
|
AWS_BUCKET: ${{ vars.FEATURE_PREVIEW_AWS_BUCKET }}
|
||||||
|
NEXT_PUBLIC_DEPLOY_WITH_NGINX: 1
|
||||||
|
NEXT_PUBLIC_API_BASE_URL: ${{ vars.FEATURE_PREVIEW_NEXT_PUBLIC_API_BASE_URL }}
|
||||||
|
outputs:
|
||||||
|
do-build: ${{ needs.setup-feature-build.outputs.space-build }}
|
||||||
|
s3-url: ${{ steps.build-space.outputs.S3_PRESIGNED_URL }}
|
||||||
|
steps:
|
||||||
|
- name: Set up Node.js
|
||||||
|
uses: actions/setup-node@v4
|
||||||
|
with:
|
||||||
|
node-version: '18'
|
||||||
|
- name: Install AWS cli
|
||||||
|
run: |
|
||||||
|
sudo apt-get update
|
||||||
|
sudo apt-get install -y python3-pip
|
||||||
|
pip3 install awscli
|
||||||
|
- name: Checkout
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
with:
|
||||||
|
path: plane
|
||||||
|
- name: Install Dependencies
|
||||||
|
run: |
|
||||||
|
cd $GITHUB_WORKSPACE/plane
|
||||||
|
yarn install
|
||||||
|
- name: Build Space
|
||||||
|
id: build-space
|
||||||
|
run: |
|
||||||
|
cd $GITHUB_WORKSPACE/plane
|
||||||
|
yarn build --filter=space
|
||||||
|
cd $GITHUB_WORKSPACE
|
||||||
|
|
||||||
|
TAR_NAME="space.tar.gz"
|
||||||
|
tar -czf $TAR_NAME ./plane
|
||||||
|
|
||||||
|
FILE_EXPIRY=$(date -u -d "+2 days" +"%Y-%m-%dT%H:%M:%SZ")
|
||||||
|
aws s3 cp $TAR_NAME s3://${{ env.AWS_BUCKET }}/${{github.sha}}/$TAR_NAME --expires $FILE_EXPIRY
|
||||||
|
|
||||||
|
feature-deploy:
|
||||||
|
if: ${{ always() && (needs.setup-feature-build.outputs.web-build == 'true' || needs.setup-feature-build.outputs.space-build == 'true') }}
|
||||||
|
needs: [feature-build-web, feature-build-space]
|
||||||
|
name: Feature Deploy
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
env:
|
||||||
|
AWS_ACCESS_KEY_ID: ${{ vars.FEATURE_PREVIEW_AWS_ACCESS_KEY_ID }}
|
||||||
|
AWS_SECRET_ACCESS_KEY: ${{ secrets.FEATURE_PREVIEW_AWS_SECRET_ACCESS_KEY }}
|
||||||
|
AWS_BUCKET: ${{ vars.FEATURE_PREVIEW_AWS_BUCKET }}
|
||||||
|
KUBE_CONFIG_FILE: ${{ secrets.FEATURE_PREVIEW_KUBE_CONFIG }}
|
||||||
|
steps:
|
||||||
|
- name: Install AWS cli
|
||||||
|
run: |
|
||||||
|
sudo apt-get update
|
||||||
|
sudo apt-get install -y python3-pip
|
||||||
|
pip3 install awscli
|
||||||
|
- name: Tailscale
|
||||||
|
uses: tailscale/github-action@v2
|
||||||
|
with:
|
||||||
|
oauth-client-id: ${{ secrets.TAILSCALE_OAUTH_CLIENT_ID }}
|
||||||
|
oauth-secret: ${{ secrets.TAILSCALE_OAUTH_SECRET }}
|
||||||
|
tags: tag:ci
|
||||||
|
- name: Kubectl Setup
|
||||||
|
run: |
|
||||||
|
curl -LO "https://dl.k8s.io/release/${{ vars.FEATURE_PREVIEW_KUBE_VERSION }}/bin/linux/amd64/kubectl"
|
||||||
|
chmod +x kubectl
|
||||||
|
|
||||||
|
mkdir -p ~/.kube
|
||||||
|
echo "$KUBE_CONFIG_FILE" > ~/.kube/config
|
||||||
|
chmod 600 ~/.kube/config
|
||||||
|
- name: HELM Setup
|
||||||
|
run: |
|
||||||
|
curl -fsSL -o get_helm.sh https://raw.githubusercontent.com/helm/helm/main/scripts/get-helm-3
|
||||||
|
chmod 700 get_helm.sh
|
||||||
|
./get_helm.sh
|
||||||
|
- name: App Deploy
|
||||||
|
run: |
|
||||||
|
WEB_S3_URL=""
|
||||||
|
if [ ${{ env.BUILD_WEB }} == true ]; then
|
||||||
|
WEB_S3_URL=$(aws s3 presign s3://${{ vars.FEATURE_PREVIEW_AWS_BUCKET }}/${{github.sha}}/web.tar.gz --expires-in 3600)
|
||||||
|
fi
|
||||||
|
|
||||||
|
SPACE_S3_URL=""
|
||||||
|
if [ ${{ env.BUILD_SPACE }} == true ]; then
|
||||||
|
SPACE_S3_URL=$(aws s3 presign s3://${{ vars.FEATURE_PREVIEW_AWS_BUCKET }}/${{github.sha}}/space.tar.gz --expires-in 3600)
|
||||||
|
fi
|
||||||
|
|
||||||
|
if [ ${{ env.BUILD_WEB }} == true ] || [ ${{ env.BUILD_SPACE }} == true ]; then
|
||||||
|
|
||||||
|
helm --kube-insecure-skip-tls-verify repo add feature-preview ${{ vars.FEATURE_PREVIEW_HELM_CHART_URL }}
|
||||||
|
|
||||||
|
APP_NAMESPACE="${{ vars.FEATURE_PREVIEW_NAMESPACE }}"
|
||||||
|
DEPLOY_SCRIPT_URL="${{ vars.FEATURE_PREVIEW_DEPLOY_SCRIPT_URL }}"
|
||||||
|
|
||||||
|
METADATA=$(helm --kube-insecure-skip-tls-verify install feature-preview/${{ vars.FEATURE_PREVIEW_HELM_CHART_NAME }} \
|
||||||
|
--generate-name \
|
||||||
|
--namespace $APP_NAMESPACE \
|
||||||
|
--set ingress.primaryDomain=${{vars.FEATURE_PREVIEW_PRIMARY_DOMAIN || 'feature.plane.tools' }} \
|
||||||
|
--set web.image=${{vars.FEATURE_PREVIEW_DOCKER_BASE}} \
|
||||||
|
--set web.enabled=${{ env.BUILD_WEB || false }} \
|
||||||
|
--set web.artifact_url=$WEB_S3_URL \
|
||||||
|
--set space.image=${{vars.FEATURE_PREVIEW_DOCKER_BASE}} \
|
||||||
|
--set space.enabled=${{ env.BUILD_SPACE || false }} \
|
||||||
|
--set space.artifact_url=$SPACE_S3_URL \
|
||||||
|
--set shared_config.deploy_script_url=$DEPLOY_SCRIPT_URL \
|
||||||
|
--set shared_config.api_base_url=${{vars.FEATURE_PREVIEW_NEXT_PUBLIC_API_BASE_URL}} \
|
||||||
|
--output json \
|
||||||
|
--timeout 1000s)
|
||||||
|
|
||||||
|
APP_NAME=$(echo $METADATA | jq -r '.name')
|
||||||
|
|
||||||
|
INGRESS_HOSTNAME=$(kubectl get ingress -n feature-builds --insecure-skip-tls-verify \
|
||||||
|
-o jsonpath='{.items[?(@.metadata.annotations.meta\.helm\.sh\/release-name=="'$APP_NAME'")]}' | \
|
||||||
|
jq -r '.spec.rules[0].host')
|
||||||
|
|
||||||
|
echo "****************************************"
|
||||||
|
echo "APP NAME ::: $APP_NAME"
|
||||||
|
echo "INGRESS HOSTNAME ::: $INGRESS_HOSTNAME"
|
||||||
|
echo "****************************************"
|
||||||
|
fi
|
5
.gitignore
vendored
5
.gitignore
vendored
@ -1,3 +1,7 @@
|
|||||||
|
pg_data
|
||||||
|
redis_data
|
||||||
|
minio_data
|
||||||
|
|
||||||
node_modules
|
node_modules
|
||||||
.next
|
.next
|
||||||
|
|
||||||
@ -51,6 +55,7 @@ staticfiles
|
|||||||
mediafiles
|
mediafiles
|
||||||
.env
|
.env
|
||||||
.DS_Store
|
.DS_Store
|
||||||
|
logs/
|
||||||
|
|
||||||
node_modules/
|
node_modules/
|
||||||
assets/dist/
|
assets/dist/
|
||||||
|
@ -33,8 +33,8 @@ The backend is a django project which is kept inside apiserver
|
|||||||
1. Clone the repo
|
1. Clone the repo
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
git clone https://github.com/makeplane/plane
|
git clone https://github.com/makeplane/plane.git [folder-name]
|
||||||
cd plane
|
cd [folder-name]
|
||||||
chmod +x setup.sh
|
chmod +x setup.sh
|
||||||
```
|
```
|
||||||
|
|
||||||
@ -44,32 +44,10 @@ chmod +x setup.sh
|
|||||||
./setup.sh
|
./setup.sh
|
||||||
```
|
```
|
||||||
|
|
||||||
3. Define `NEXT_PUBLIC_API_BASE_URL=http://localhost` in **web/.env** and **space/.env** file
|
3. Start the containers
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
echo "\nNEXT_PUBLIC_API_BASE_URL=http://localhost\n" >> ./web/.env
|
docker compose -f docker-compose-local.yml up
|
||||||
```
|
|
||||||
|
|
||||||
```bash
|
|
||||||
echo "\nNEXT_PUBLIC_API_BASE_URL=http://localhost\n" >> ./space/.env
|
|
||||||
```
|
|
||||||
|
|
||||||
4. Run Docker compose up
|
|
||||||
|
|
||||||
```bash
|
|
||||||
docker compose up -d
|
|
||||||
```
|
|
||||||
|
|
||||||
5. Install dependencies
|
|
||||||
|
|
||||||
```bash
|
|
||||||
yarn install
|
|
||||||
```
|
|
||||||
|
|
||||||
6. Run the web app in development mode
|
|
||||||
|
|
||||||
```bash
|
|
||||||
yarn dev
|
|
||||||
```
|
```
|
||||||
|
|
||||||
## Missing a Feature?
|
## Missing a Feature?
|
||||||
|
207
Dockerfile
207
Dockerfile
@ -1,129 +1,110 @@
|
|||||||
FROM node:18-alpine AS builder
|
FROM git.orionkindel.com/tpl/asdf:bookworm AS system
|
||||||
RUN apk add --no-cache libc6-compat
|
|
||||||
# Set working directory
|
|
||||||
WORKDIR /app
|
|
||||||
ENV NEXT_PUBLIC_API_BASE_URL=http://NEXT_PUBLIC_API_BASE_URL_PLACEHOLDER
|
|
||||||
|
|
||||||
RUN yarn global add turbo
|
ARG S6_OVERLAY_VERSION=3.1.6.2
|
||||||
RUN apk add tree
|
|
||||||
COPY . .
|
|
||||||
|
|
||||||
RUN turbo prune --scope=app --scope=plane-deploy --docker
|
ADD https://github.com/just-containers/s6-overlay/releases/download/v${S6_OVERLAY_VERSION}/s6-overlay-noarch.tar.xz /tmp
|
||||||
CMD tree -I node_modules/
|
RUN tar -C / -Jxpf /tmp/s6-overlay-noarch.tar.xz
|
||||||
|
|
||||||
# Add lockfile and package.json's of isolated subworkspace
|
ADD https://github.com/just-containers/s6-overlay/releases/download/v${S6_OVERLAY_VERSION}/s6-overlay-x86_64.tar.xz /tmp
|
||||||
FROM node:18-alpine AS installer
|
RUN tar -C / -Jxpf /tmp/s6-overlay-x86_64.tar.xz
|
||||||
|
|
||||||
RUN apk add --no-cache libc6-compat
|
RUN apt-get update
|
||||||
WORKDIR /app
|
RUN apt-get install -y \
|
||||||
ARG NEXT_PUBLIC_API_BASE_URL=http://localhost:8000
|
build-essential \
|
||||||
# First install the dependencies (as they change less often)
|
zlib1g-dev \
|
||||||
COPY .gitignore .gitignore
|
libncurses5-dev \
|
||||||
COPY --from=builder /app/out/json/ .
|
libgdbm-dev \
|
||||||
COPY --from=builder /app/out/yarn.lock ./yarn.lock
|
libnss3-dev \
|
||||||
RUN yarn install
|
libssl-dev \
|
||||||
|
libreadline-dev \
|
||||||
|
libffi-dev \
|
||||||
|
libsqlite3-dev \
|
||||||
|
wget \
|
||||||
|
libbz2-dev \
|
||||||
|
uuid-dev \
|
||||||
|
nginx \
|
||||||
|
procps
|
||||||
|
|
||||||
# # Build the project
|
RUN asdf plugin add nodejs \
|
||||||
COPY --from=builder /app/out/full/ .
|
&& asdf plugin add python \
|
||||||
COPY turbo.json turbo.json
|
&& asdf plugin add postgres
|
||||||
COPY replace-env-vars.sh /usr/local/bin/
|
|
||||||
USER root
|
|
||||||
RUN chmod +x /usr/local/bin/replace-env-vars.sh
|
|
||||||
|
|
||||||
RUN yarn turbo run build
|
RUN --mount=type=cache,target=/.asdf-build \
|
||||||
|
export ASDF_DOWNLOAD_PATH=/.asdf-build \
|
||||||
|
&& export TMPDIR=/.asdf-build \
|
||||||
|
&& export POSTGRES_SKIP_INITDB=y \
|
||||||
|
&& asdf install nodejs 20.9.0 \
|
||||||
|
&& asdf install python 3.11.1 \
|
||||||
|
&& asdf install postgres 15.3
|
||||||
|
|
||||||
ENV NEXT_PUBLIC_API_BASE_URL=$NEXT_PUBLIC_API_BASE_URL \
|
RUN asdf global nodejs 20.9.0 \
|
||||||
BUILT_NEXT_PUBLIC_API_BASE_URL=$NEXT_PUBLIC_API_BASE_URL
|
&& asdf global postgres 15.3 \
|
||||||
|
&& asdf global python 3.11.1
|
||||||
|
|
||||||
RUN /usr/local/bin/replace-env-vars.sh http://NEXT_PUBLIC_WEBAPP_URL_PLACEHOLDER ${NEXT_PUBLIC_API_BASE_URL}
|
RUN useradd -m postgres && passwd -d postgres
|
||||||
|
|
||||||
FROM python:3.11.1-alpine3.17 AS backend
|
ADD https://dl.min.io/server/minio/release/linux-amd64/minio /usr/bin
|
||||||
|
RUN chmod +x /usr/bin/minio
|
||||||
|
|
||||||
# set environment variables
|
RUN set -eo pipefail; \
|
||||||
ENV PYTHONDONTWRITEBYTECODE 1
|
curl -fsSL https://packages.redis.io/gpg | gpg --dearmor -o /usr/share/keyrings/redis-archive-keyring.gpg; \
|
||||||
ENV PYTHONUNBUFFERED 1
|
echo "deb [signed-by=/usr/share/keyrings/redis-archive-keyring.gpg] https://packages.redis.io/deb bookworm main" | tee /etc/apt/sources.list.d/redis.list; \
|
||||||
|
apt-get update; \
|
||||||
|
apt-get install -y redis
|
||||||
|
|
||||||
|
FROM system AS next_prebuild
|
||||||
|
|
||||||
|
RUN npm i -g yarn
|
||||||
|
RUN --mount=type=cache,target=/.yarn-cache \
|
||||||
|
yarn config set cache-folder /.yarn-cache
|
||||||
|
|
||||||
|
COPY package.json turbo.json yarn.lock app.json ./
|
||||||
|
COPY packages packages
|
||||||
|
COPY web web
|
||||||
|
COPY space space
|
||||||
|
|
||||||
|
RUN --mount=type=cache,target=/.yarn-cache \
|
||||||
|
yarn install
|
||||||
|
|
||||||
|
FROM next_prebuild AS next_build
|
||||||
|
|
||||||
|
RUN --mount=type=cache,target=/.yarn-cache \
|
||||||
|
--mount=type=cache,target=/web/.next \
|
||||||
|
--mount=type=cache,target=/space/.next \
|
||||||
|
yarn build && \
|
||||||
|
cp -R /web/.next /web/_next && \
|
||||||
|
cp -R /space/.next /space/_next
|
||||||
|
|
||||||
|
RUN mv /web/_next /web/.next && \
|
||||||
|
mv /space/_next /space/.next && \
|
||||||
|
cp -R /web/.next/standalone/web/* /web/ && \
|
||||||
|
cp -R /space/.next/standalone/space/* /space/
|
||||||
|
|
||||||
|
FROM next_build AS api_build
|
||||||
|
ENV PYTHONDONTWRITEBYTECODE=1
|
||||||
|
ENV PYTHONUNBUFFERED=1
|
||||||
ENV PIP_DISABLE_PIP_VERSION_CHECK=1
|
ENV PIP_DISABLE_PIP_VERSION_CHECK=1
|
||||||
|
|
||||||
WORKDIR /code
|
COPY apiserver apiserver
|
||||||
|
RUN --mount=type=cache,target=/root/.cache/pip \
|
||||||
|
cd /apiserver \
|
||||||
|
&& pip install -r requirements.txt --compile
|
||||||
|
|
||||||
RUN apk --no-cache add \
|
FROM api_build AS s6
|
||||||
"libpq~=15" \
|
|
||||||
"libxslt~=1.1" \
|
|
||||||
"nodejs-current~=19" \
|
|
||||||
"xmlsec~=1.2" \
|
|
||||||
"nginx" \
|
|
||||||
"nodejs" \
|
|
||||||
"npm" \
|
|
||||||
"supervisor"
|
|
||||||
|
|
||||||
COPY apiserver/requirements.txt ./
|
COPY docker/etc/ /etc/
|
||||||
COPY apiserver/requirements ./requirements
|
|
||||||
RUN apk add --no-cache libffi-dev
|
|
||||||
RUN apk add --no-cache --virtual .build-deps \
|
|
||||||
"bash~=5.2" \
|
|
||||||
"g++~=12.2" \
|
|
||||||
"gcc~=12.2" \
|
|
||||||
"cargo~=1.64" \
|
|
||||||
"git~=2" \
|
|
||||||
"make~=4.3" \
|
|
||||||
"postgresql13-dev~=13" \
|
|
||||||
"libc-dev" \
|
|
||||||
"linux-headers" \
|
|
||||||
&& \
|
|
||||||
pip install -r requirements.txt --compile --no-cache-dir \
|
|
||||||
&& \
|
|
||||||
apk del .build-deps
|
|
||||||
|
|
||||||
# Add in Django deps and generate Django's static files
|
RUN chmod -R 777 /root \
|
||||||
COPY apiserver/manage.py manage.py
|
&& chmod -R 777 /root/.asdf \
|
||||||
COPY apiserver/plane plane/
|
&& chmod -x /root/.asdf/lib/commands/* \
|
||||||
COPY apiserver/templates templates/
|
&& chmod -R 777 /apiserver \
|
||||||
|
&& chmod -R 777 /web \
|
||||||
|
&& chmod -R 777 /space \
|
||||||
|
&& ln $(asdf which postgres) /usr/bin/postgres \
|
||||||
|
&& ln $(asdf which initdb) /usr/bin/initdb \
|
||||||
|
&& ln $(asdf which node) /usr/bin/node \
|
||||||
|
&& ln $(asdf which npm) /usr/bin/npm \
|
||||||
|
&& ln $(asdf which python) /usr/bin/python
|
||||||
|
|
||||||
RUN apk --no-cache add "bash~=5.2"
|
ENV S6_KEEP_ENV=1
|
||||||
COPY apiserver/bin ./bin/
|
ENTRYPOINT ["/init"]
|
||||||
|
|
||||||
RUN chmod +x ./bin/takeoff ./bin/worker
|
|
||||||
RUN chmod -R 777 /code
|
|
||||||
|
|
||||||
# Expose container port and run entry point script
|
|
||||||
|
|
||||||
WORKDIR /app
|
|
||||||
|
|
||||||
# Don't run production as root
|
|
||||||
RUN addgroup --system --gid 1001 plane
|
|
||||||
RUN adduser --system --uid 1001 captain
|
|
||||||
|
|
||||||
COPY --from=installer /app/apps/app/next.config.js .
|
|
||||||
COPY --from=installer /app/apps/app/package.json .
|
|
||||||
COPY --from=installer /app/apps/space/next.config.js .
|
|
||||||
COPY --from=installer /app/apps/space/package.json .
|
|
||||||
|
|
||||||
COPY --from=installer --chown=captain:plane /app/apps/app/.next/standalone ./
|
|
||||||
|
|
||||||
COPY --from=installer --chown=captain:plane /app/apps/app/.next/static ./apps/app/.next/static
|
|
||||||
|
|
||||||
COPY --from=installer --chown=captain:plane /app/apps/space/.next/standalone ./
|
|
||||||
COPY --from=installer --chown=captain:plane /app/apps/space/.next ./apps/space/.next
|
|
||||||
|
|
||||||
ENV NEXT_TELEMETRY_DISABLED 1
|
|
||||||
|
|
||||||
# RUN rm /etc/nginx/conf.d/default.conf
|
|
||||||
#######################################################################
|
|
||||||
COPY nginx/nginx-single-docker-image.conf /etc/nginx/http.d/default.conf
|
|
||||||
#######################################################################
|
|
||||||
|
|
||||||
COPY nginx/supervisor.conf /code/supervisor.conf
|
|
||||||
|
|
||||||
ARG NEXT_PUBLIC_API_BASE_URL=http://localhost:8000
|
|
||||||
ENV NEXT_PUBLIC_API_BASE_URL=$NEXT_PUBLIC_API_BASE_URL \
|
|
||||||
BUILT_NEXT_PUBLIC_API_BASE_URL=$NEXT_PUBLIC_API_BASE_URL
|
|
||||||
|
|
||||||
USER root
|
|
||||||
COPY replace-env-vars.sh /usr/local/bin/
|
|
||||||
COPY start.sh /usr/local/bin/
|
|
||||||
RUN chmod +x /usr/local/bin/replace-env-vars.sh
|
|
||||||
RUN chmod +x /usr/local/bin/start.sh
|
|
||||||
|
|
||||||
EXPOSE 80
|
|
||||||
|
|
||||||
CMD ["supervisord","-c","/code/supervisor.conf"]
|
|
||||||
|
15
ENV_SETUP.md
15
ENV_SETUP.md
@ -49,25 +49,10 @@ NGINX_PORT=80
|
|||||||
|
|
||||||
|
|
||||||
```
|
```
|
||||||
# Enable/Disable OAUTH - default 0 for selfhosted instance
|
|
||||||
NEXT_PUBLIC_ENABLE_OAUTH=0
|
|
||||||
# Public boards deploy URL
|
# Public boards deploy URL
|
||||||
NEXT_PUBLIC_DEPLOY_URL="http://localhost/spaces"
|
NEXT_PUBLIC_DEPLOY_URL="http://localhost/spaces"
|
||||||
```
|
```
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
## {PROJECT_FOLDER}/spaces/.env.example
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
```
|
|
||||||
# Flag to toggle OAuth
|
|
||||||
NEXT_PUBLIC_ENABLE_OAUTH=0
|
|
||||||
```
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
## {PROJECT_FOLDER}/apiserver/.env
|
## {PROJECT_FOLDER}/apiserver/.env
|
||||||
|
|
||||||
|
|
||||||
|
155
README.md
155
README.md
@ -7,7 +7,7 @@
|
|||||||
</p>
|
</p>
|
||||||
|
|
||||||
<h3 align="center"><b>Plane</b></h3>
|
<h3 align="center"><b>Plane</b></h3>
|
||||||
<p align="center"><b>Flexible, extensible open-source project management</b></p>
|
<p align="center"><b>Open-source project management that unlocks customer value.</b></p>
|
||||||
|
|
||||||
<p align="center">
|
<p align="center">
|
||||||
<a href="https://discord.com/invite/A92xrEGCge">
|
<a href="https://discord.com/invite/A92xrEGCge">
|
||||||
@ -16,6 +16,13 @@
|
|||||||
<img alt="Commit activity per month" src="https://img.shields.io/github/commit-activity/m/makeplane/plane?style=for-the-badge" />
|
<img alt="Commit activity per month" src="https://img.shields.io/github/commit-activity/m/makeplane/plane?style=for-the-badge" />
|
||||||
</p>
|
</p>
|
||||||
|
|
||||||
|
<p align="center">
|
||||||
|
<a href="https://dub.sh/plane-website-readme"><b>Website</b></a> •
|
||||||
|
<a href="https://git.new/releases"><b>Releases</b></a> •
|
||||||
|
<a href="https://dub.sh/planepowershq"><b>Twitter</b></a> •
|
||||||
|
<a href="https://dub.sh/planedocs"><b>Documentation</b></a>
|
||||||
|
</p>
|
||||||
|
|
||||||
<p>
|
<p>
|
||||||
<a href="https://app.plane.so/#gh-light-mode-only" target="_blank">
|
<a href="https://app.plane.so/#gh-light-mode-only" target="_blank">
|
||||||
<img
|
<img
|
||||||
@ -33,56 +40,90 @@
|
|||||||
</a>
|
</a>
|
||||||
</p>
|
</p>
|
||||||
|
|
||||||
Meet [Plane](https://plane.so). An open-source software development tool to manage issues, sprints, and product roadmaps with peace of mind 🧘♀️.
|
Meet [Plane](https://dub.sh/plane-website-readme). An open-source software development tool to manage issues, sprints, and product roadmaps with peace of mind. 🧘♀️
|
||||||
|
|
||||||
> Plane is still in its early days, not everything will be perfect yet, and hiccups may happen. Please let us know of any suggestions, ideas, or bugs that you encounter on our [Discord](https://discord.com/invite/A92xrEGCge) or GitHub issues, and we will use your feedback to improve on our upcoming releases.
|
> Plane is still in its early days, not everything will be perfect yet, and hiccups may happen. Please let us know of any suggestions, ideas, or bugs that you encounter on our [Discord](https://discord.com/invite/A92xrEGCge) or GitHub issues, and we will use your feedback to improve in our upcoming releases.
|
||||||
|
|
||||||
The easiest way to get started with Plane is by creating a [Plane Cloud](https://app.plane.so) account. Plane Cloud offers a hosted solution for Plane. If you prefer to self-host Plane, please refer to our [deployment documentation](https://docs.plane.so/self-hosting).
|
## ⚡ Installation
|
||||||
|
|
||||||
## ⚡️ Contributors Quick Start
|
The easiest way to get started with Plane is by creating a [Plane Cloud](https://app.plane.so) account where we offer a hosted solution for users.
|
||||||
|
|
||||||
### Prerequisite
|
If you want more control over your data, prefer to self-host Plane, please refer to our [deployment documentation](https://docs.plane.so/docker-compose).
|
||||||
|
|
||||||
Development system must have docker engine installed and running.
|
| Installation Methods | Documentation Link |
|
||||||
|
| -------------------- | ------------------------------------------------------------------------------------------------------------------------------------------------------------ |
|
||||||
|
| Docker | [![Docker](https://img.shields.io/badge/docker-%230db7ed.svg?style=for-the-badge&logo=docker&logoColor=white)](https://docs.plane.so/self-hosting/methods/docker-compose) |
|
||||||
|
| Kubernetes | [![Kubernetes](https://img.shields.io/badge/kubernetes-%23326ce5.svg?style=for-the-badge&logo=kubernetes&logoColor=white)](https://docs.plane.so/kubernetes) |
|
||||||
|
|
||||||
### Steps
|
`Instance admin` can configure instance settings using our [God-mode](https://docs.plane.so/instance-admin) feature.
|
||||||
|
|
||||||
Setting up local environment is extremely easy and straight forward. Follow the below step and you will be ready to contribute
|
|
||||||
|
|
||||||
1. Clone the code locally using `git clone https://github.com/makeplane/plane.git`
|
|
||||||
1. Switch to the code folder `cd plane`
|
|
||||||
1. Create your feature or fix branch you plan to work on using `git checkout -b <feature-branch-name>`
|
|
||||||
1. Open terminal and run `./setup.sh`
|
|
||||||
1. Open the code on VSCode or similar equivalent IDE
|
|
||||||
1. Review the `.env` files available in various folders. Visit [Environment Setup](./ENV_SETUP.md) to know about various environment variables used in system
|
|
||||||
1. Run the docker command to initiate various services `docker compose -f docker-compose-local.yml up -d`
|
|
||||||
|
|
||||||
You are ready to make changes to the code. Do not forget to refresh the browser (in case id does not auto-reload)
|
|
||||||
|
|
||||||
Thats it!
|
|
||||||
|
|
||||||
## 🍙 Self Hosting
|
|
||||||
|
|
||||||
For self hosting environment setup, visit the [Self Hosting](https://docs.plane.so/self-hosting) documentation page
|
|
||||||
|
|
||||||
## 🚀 Features
|
## 🚀 Features
|
||||||
|
|
||||||
- **Issue Planning and Tracking**: Quickly create issues and add details using a powerful rich text editor that supports file uploads. Add sub-properties and references to issues for better organization and tracking.
|
- **Issues**: Quickly create issues and add details using a powerful rich text editor that supports file uploads. Add sub-properties and references to problems for better organization and tracking.
|
||||||
- **Issue Attachments**: Collaborate effectively by attaching files to issues, making it easy for your team to find and share important project-related documents.
|
|
||||||
- **Layouts**: Customize your project view with your preferred layout - choose from List, Kanban, or Calendar to visualize your project in a way that makes sense to you.
|
- **Cycles**:
|
||||||
- **Cycles**: Plan sprints with Cycles to keep your team on track and productive. Gain insights into your project's progress with burn-down charts and other useful features.
|
Keep up your team's momentum with Cycles. Gain insights into your project's progress with burn-down charts and other valuable features.
|
||||||
- **Modules**: Break down your large projects into smaller, more manageable modules. Assign modules between teams to easily track and plan your project's progress.
|
|
||||||
|
- **Modules**: Break down your large projects into smaller, more manageable modules. Assign modules between teams to track and plan your project's progress easily.
|
||||||
|
|
||||||
- **Views**: Create custom filters to display only the issues that matter to you. Save and share your filters in just a few clicks.
|
- **Views**: Create custom filters to display only the issues that matter to you. Save and share your filters in just a few clicks.
|
||||||
- **Pages**: Plane pages function as an AI-powered notepad, allowing you to easily document issues, cycle plans, and module details, and then synchronize them with your issues.
|
|
||||||
- **Command K**: Enjoy a better user experience with the new Command + K menu. Easily manage and navigate through your projects from one convenient location.
|
- **Pages**: Plane pages, equipped with AI and a rich text editor, let you jot down your thoughts on the fly. Format your text, upload images, hyperlink, or sync your existing ideas into an actionable item or issue.
|
||||||
- **GitHub Sync**: Streamline your planning process by syncing your GitHub issues with Plane. Keep all your issues in one place for better tracking and collaboration.
|
|
||||||
|
- **Analytics**: Get insights into all your Plane data in real-time. Visualize issue data to spot trends, remove blockers, and progress your work.
|
||||||
|
|
||||||
|
- **Drive** (_coming soon_): The drive helps you share documents, images, videos, or any other files that make sense to you or your team and align on the problem/solution.
|
||||||
|
|
||||||
|
## 🛠️ Quick start for contributors
|
||||||
|
|
||||||
|
> Development system must have docker engine installed and running.
|
||||||
|
|
||||||
|
Setting up local environment is extremely easy and straight forward. Follow the below step and you will be ready to contribute -
|
||||||
|
|
||||||
|
1. Clone the code locally using:
|
||||||
|
```
|
||||||
|
git clone https://github.com/makeplane/plane.git
|
||||||
|
```
|
||||||
|
2. Switch to the code folder:
|
||||||
|
```
|
||||||
|
cd plane
|
||||||
|
```
|
||||||
|
3. Create your feature or fix branch you plan to work on using:
|
||||||
|
```
|
||||||
|
git checkout -b <feature-branch-name>
|
||||||
|
```
|
||||||
|
4. Open terminal and run:
|
||||||
|
```
|
||||||
|
./setup.sh
|
||||||
|
```
|
||||||
|
5. Open the code on VSCode or similar equivalent IDE.
|
||||||
|
6. Review the `.env` files available in various folders.
|
||||||
|
Visit [Environment Setup](./ENV_SETUP.md) to know about various environment variables used in system.
|
||||||
|
7. Run the docker command to initiate services:
|
||||||
|
```
|
||||||
|
docker compose -f docker-compose-local.yml up -d
|
||||||
|
```
|
||||||
|
|
||||||
|
You are ready to make changes to the code. Do not forget to refresh the browser (in case it does not auto-reload).
|
||||||
|
|
||||||
|
Thats it!
|
||||||
|
|
||||||
|
## ❤️ Community
|
||||||
|
|
||||||
|
The Plane community can be found on [GitHub Discussions](https://github.com/orgs/makeplane/discussions), and our [Discord server](https://discord.com/invite/A92xrEGCge). Our [Code of conduct](https://github.com/makeplane/plane/blob/master/CODE_OF_CONDUCT.md) applies to all Plane community chanels.
|
||||||
|
|
||||||
|
Ask questions, report bugs, join discussions, voice ideas, make feature requests, or share your projects.
|
||||||
|
|
||||||
|
### Repo Activity
|
||||||
|
|
||||||
|
![Plane Repo Activity](https://repobeats.axiom.co/api/embed/2523c6ed2f77c082b7908c33e2ab208981d76c39.svg "Repobeats analytics image")
|
||||||
|
|
||||||
## 📸 Screenshots
|
## 📸 Screenshots
|
||||||
|
|
||||||
<p>
|
<p>
|
||||||
<a href="https://plane.so" target="_blank">
|
<a href="https://plane.so" target="_blank">
|
||||||
<img
|
<img
|
||||||
src="https://plane-marketing.s3.ap-south-1.amazonaws.com/plane-readme/plane_views_dark_mode.webp"
|
src="https://ik.imagekit.io/w2okwbtu2/Issues_rNZjrGgFl.png?updatedAt=1709298765880"
|
||||||
alt="Plane Views"
|
alt="Plane Views"
|
||||||
width="100%"
|
width="100%"
|
||||||
/>
|
/>
|
||||||
@ -91,8 +132,7 @@ For self hosting environment setup, visit the [Self Hosting](https://docs.plane.
|
|||||||
<p>
|
<p>
|
||||||
<a href="https://plane.so" target="_blank">
|
<a href="https://plane.so" target="_blank">
|
||||||
<img
|
<img
|
||||||
src="https://plane-marketing.s3.ap-south-1.amazonaws.com/plane-readme/plane_issue_detail_dark_mode.webp"
|
src="https://ik.imagekit.io/w2okwbtu2/Cycles_jCDhqmTl9.png?updatedAt=1709298780697"
|
||||||
alt="Plane Issue Details"
|
|
||||||
width="100%"
|
width="100%"
|
||||||
/>
|
/>
|
||||||
</a>
|
</a>
|
||||||
@ -100,7 +140,7 @@ For self hosting environment setup, visit the [Self Hosting](https://docs.plane.
|
|||||||
<p>
|
<p>
|
||||||
<a href="https://plane.so" target="_blank">
|
<a href="https://plane.so" target="_blank">
|
||||||
<img
|
<img
|
||||||
src="https://plane-marketing.s3.ap-south-1.amazonaws.com/plane-readme/plane_cycles_modules_dark_mode.webp"
|
src="https://ik.imagekit.io/w2okwbtu2/Modules_PSCVsbSfI.png?updatedAt=1709298796783"
|
||||||
alt="Plane Cycles and Modules"
|
alt="Plane Cycles and Modules"
|
||||||
width="100%"
|
width="100%"
|
||||||
/>
|
/>
|
||||||
@ -109,7 +149,7 @@ For self hosting environment setup, visit the [Self Hosting](https://docs.plane.
|
|||||||
<p>
|
<p>
|
||||||
<a href="https://plane.so" target="_blank">
|
<a href="https://plane.so" target="_blank">
|
||||||
<img
|
<img
|
||||||
src="https://plane-marketing.s3.ap-south-1.amazonaws.com/plane-readme/plane_analytics_dark_mode.webp"
|
src="https://ik.imagekit.io/w2okwbtu2/Views_uxXsRatS4.png?updatedAt=1709298834522"
|
||||||
alt="Plane Analytics"
|
alt="Plane Analytics"
|
||||||
width="100%"
|
width="100%"
|
||||||
/>
|
/>
|
||||||
@ -118,7 +158,7 @@ For self hosting environment setup, visit the [Self Hosting](https://docs.plane.
|
|||||||
<p>
|
<p>
|
||||||
<a href="https://plane.so" target="_blank">
|
<a href="https://plane.so" target="_blank">
|
||||||
<img
|
<img
|
||||||
src="https://plane-marketing.s3.ap-south-1.amazonaws.com/plane-readme/plane_pages_dark_mode.webp"
|
src="https://ik.imagekit.io/w2okwbtu2/Analytics_0o22gLRtp.png?updatedAt=1709298834389"
|
||||||
alt="Plane Pages"
|
alt="Plane Pages"
|
||||||
width="100%"
|
width="100%"
|
||||||
/>
|
/>
|
||||||
@ -128,7 +168,7 @@ For self hosting environment setup, visit the [Self Hosting](https://docs.plane.
|
|||||||
<p>
|
<p>
|
||||||
<a href="https://plane.so" target="_blank">
|
<a href="https://plane.so" target="_blank">
|
||||||
<img
|
<img
|
||||||
src="https://plane-marketing.s3.ap-south-1.amazonaws.com/plane-readme/plane_commad_k_dark_mode.webp"
|
src="https://ik.imagekit.io/w2okwbtu2/Drive_LlfeY4xn3.png?updatedAt=1709298837917"
|
||||||
alt="Plane Command Menu"
|
alt="Plane Command Menu"
|
||||||
width="100%"
|
width="100%"
|
||||||
/>
|
/>
|
||||||
@ -136,20 +176,23 @@ For self hosting environment setup, visit the [Self Hosting](https://docs.plane.
|
|||||||
</p>
|
</p>
|
||||||
</p>
|
</p>
|
||||||
|
|
||||||
## 📚Documentation
|
|
||||||
|
|
||||||
For full documentation, visit [docs.plane.so](https://docs.plane.so/)
|
|
||||||
|
|
||||||
To see how to Contribute, visit [here](https://github.com/makeplane/plane/blob/master/CONTRIBUTING.md).
|
|
||||||
|
|
||||||
## ❤️ Community
|
|
||||||
|
|
||||||
The Plane community can be found on GitHub Discussions, where you can ask questions, voice ideas, and share your projects.
|
|
||||||
|
|
||||||
To chat with other community members you can join the [Plane Discord](https://discord.com/invite/A92xrEGCge).
|
|
||||||
|
|
||||||
Our [Code of Conduct](https://github.com/makeplane/plane/blob/master/CODE_OF_CONDUCT.md) applies to all Plane community channels.
|
|
||||||
|
|
||||||
## ⛓️ Security
|
## ⛓️ Security
|
||||||
|
|
||||||
If you believe you have found a security vulnerability in Plane, we encourage you to responsibly disclose this and not open a public issue. We will investigate all legitimate reports. Email engineering@plane.so to disclose any security vulnerabilities.
|
If you believe you have found a security vulnerability in Plane, we encourage you to responsibly disclose this and not open a public issue. We will investigate all legitimate reports.
|
||||||
|
|
||||||
|
Email squawk@plane.so to disclose any security vulnerabilities.
|
||||||
|
|
||||||
|
## ❤️ Contribute
|
||||||
|
|
||||||
|
There are many ways to contribute to Plane, including:
|
||||||
|
|
||||||
|
- Submitting [bugs](https://github.com/makeplane/plane/issues/new?assignees=srinivaspendem%2Cpushya22&labels=%F0%9F%90%9Bbug&projects=&template=--bug-report.yaml&title=%5Bbug%5D%3A+) and [feature requests](https://github.com/makeplane/plane/issues/new?assignees=srinivaspendem%2Cpushya22&labels=%E2%9C%A8feature&projects=&template=--feature-request.yaml&title=%5Bfeature%5D%3A+) for various components.
|
||||||
|
- Reviewing [the documentation](https://docs.plane.so/) and submitting [pull requests](https://github.com/makeplane/plane), from fixing typos to adding new features.
|
||||||
|
- Speaking or writing about Plane or any other ecosystem integration and [letting us know](https://discord.com/invite/A92xrEGCge)!
|
||||||
|
- Upvoting [popular feature requests](https://github.com/makeplane/plane/issues) to show your support.
|
||||||
|
|
||||||
|
### We couldn't have done this without you.
|
||||||
|
|
||||||
|
<a href="https://github.com/makeplane/plane/graphs/contributors">
|
||||||
|
<img src="https://contrib.rocks/image?repo=makeplane/plane" />
|
||||||
|
</a>
|
||||||
|
44
SECURITY.md
Normal file
44
SECURITY.md
Normal file
@ -0,0 +1,44 @@
|
|||||||
|
# Security Policy
|
||||||
|
|
||||||
|
This document outlines security procedures and vulnerabilities reporting for the Plane project.
|
||||||
|
|
||||||
|
At Plane, we safeguarding the security of our systems with top priority. Despite our efforts, vulnerabilities may still exist. We greatly appreciate your assistance in identifying and reporting any such vulnerabilities to help us maintain the integrity of our systems and protect our clients.
|
||||||
|
|
||||||
|
To report a security vulnerability, please email us directly at security@plane.so with a detailed description of the vulnerability and steps to reproduce it. Please refrain from disclosing the vulnerability publicly until we have had an opportunity to review and address it.
|
||||||
|
|
||||||
|
## Out of Scope Vulnerabilities
|
||||||
|
|
||||||
|
We appreciate your help in identifying vulnerabilities. However, please note that the following types of vulnerabilities are considered out of scope:
|
||||||
|
|
||||||
|
- Attacks requiring MITM or physical access to a user's device.
|
||||||
|
- Content spoofing and text injection issues without demonstrating an attack vector or ability to modify HTML/CSS.
|
||||||
|
- Email spoofing.
|
||||||
|
- Missing DNSSEC, CAA, CSP headers.
|
||||||
|
- Lack of Secure or HTTP only flag on non-sensitive cookies.
|
||||||
|
|
||||||
|
## Reporting Process
|
||||||
|
|
||||||
|
If you discover a vulnerability, please adhere to the following reporting process:
|
||||||
|
|
||||||
|
1. Email your findings to security@plane.so.
|
||||||
|
2. Refrain from running automated scanners on our infrastructure or dashboard without prior consent. Contact us to set up a sandbox environment if necessary.
|
||||||
|
3. Do not exploit the vulnerability for malicious purposes, such as downloading excessive data or altering user data.
|
||||||
|
4. Maintain confidentiality and refrain from disclosing the vulnerability until it has been resolved.
|
||||||
|
5. Avoid using physical security attacks, social engineering, distributed denial of service, spam, or third-party applications.
|
||||||
|
|
||||||
|
When reporting a vulnerability, please provide sufficient information to allow us to reproduce and address the issue promptly. Include the IP address or URL of the affected system, along with a detailed description of the vulnerability.
|
||||||
|
|
||||||
|
## Our Commitment
|
||||||
|
|
||||||
|
We are committed to promptly addressing reported vulnerabilities and maintaining open communication throughout the resolution process. Here's what you can expect from us:
|
||||||
|
|
||||||
|
- **Response Time:** We will acknowledge receipt of your report within three business days and provide an expected resolution date.
|
||||||
|
- **Legal Protection:** We will not pursue legal action against you for reporting vulnerabilities, provided you adhere to the reporting guidelines.
|
||||||
|
- **Confidentiality:** Your report will be treated with strict confidentiality. We will not disclose your personal information to third parties without your consent.
|
||||||
|
- **Progress Updates:** We will keep you informed of our progress in resolving the reported vulnerability.
|
||||||
|
- **Recognition:** With your permission, we will publicly acknowledge you as the discoverer of the vulnerability.
|
||||||
|
- **Timely Resolution:** We strive to resolve all reported vulnerabilities promptly and will actively participate in the publication process once the issue is resolved.
|
||||||
|
|
||||||
|
We appreciate your cooperation in helping us maintain the security of our systems and protecting our clients. Thank you for your contributions to our security efforts.
|
||||||
|
|
||||||
|
reference: https://supabase.com/.well-known/security.txt
|
@ -8,16 +8,12 @@ SENTRY_DSN=""
|
|||||||
SENTRY_ENVIRONMENT="development"
|
SENTRY_ENVIRONMENT="development"
|
||||||
|
|
||||||
# Database Settings
|
# Database Settings
|
||||||
PGUSER="plane"
|
POSTGRES_USER="plane"
|
||||||
PGPASSWORD="plane"
|
POSTGRES_PASSWORD="plane"
|
||||||
PGHOST="plane-db"
|
POSTGRES_HOST="plane-db"
|
||||||
PGDATABASE="plane"
|
POSTGRES_DB="plane"
|
||||||
DATABASE_URL=postgresql://${PGUSER}:${PGPASSWORD}@${PGHOST}/${PGDATABASE}
|
DATABASE_URL=postgresql://${POSTGRES_USER}:${POSTGRES_PASSWORD}@${POSTGRES_HOST}/${POSTGRES_DB}
|
||||||
|
|
||||||
# Oauth variables
|
|
||||||
GOOGLE_CLIENT_ID=""
|
|
||||||
GITHUB_CLIENT_ID=""
|
|
||||||
GITHUB_CLIENT_SECRET=""
|
|
||||||
|
|
||||||
# Redis Settings
|
# Redis Settings
|
||||||
REDIS_HOST="plane-redis"
|
REDIS_HOST="plane-redis"
|
||||||
@ -34,14 +30,6 @@ AWS_S3_BUCKET_NAME="uploads"
|
|||||||
# Maximum file upload limit
|
# Maximum file upload limit
|
||||||
FILE_SIZE_LIMIT=5242880
|
FILE_SIZE_LIMIT=5242880
|
||||||
|
|
||||||
# GPT settings
|
|
||||||
OPENAI_API_BASE="https://api.openai.com/v1" # deprecated
|
|
||||||
OPENAI_API_KEY="sk-" # deprecated
|
|
||||||
GPT_ENGINE="gpt-3.5-turbo" # deprecated
|
|
||||||
|
|
||||||
# Github
|
|
||||||
GITHUB_CLIENT_SECRET="" # For fetching release notes
|
|
||||||
|
|
||||||
# Settings related to Docker
|
# Settings related to Docker
|
||||||
DOCKERIZED=1 # deprecated
|
DOCKERIZED=1 # deprecated
|
||||||
|
|
||||||
@ -51,19 +39,8 @@ USE_MINIO=1
|
|||||||
# Nginx Configuration
|
# Nginx Configuration
|
||||||
NGINX_PORT=80
|
NGINX_PORT=80
|
||||||
|
|
||||||
|
|
||||||
# SignUps
|
|
||||||
ENABLE_SIGNUP="1"
|
|
||||||
|
|
||||||
# Enable Email/Password Signup
|
|
||||||
ENABLE_EMAIL_PASSWORD="1"
|
|
||||||
|
|
||||||
# Enable Magic link Login
|
|
||||||
ENABLE_MAGIC_LINK_LOGIN="0"
|
|
||||||
|
|
||||||
# Email redirections and minio domain settings
|
# Email redirections and minio domain settings
|
||||||
WEB_URL="http://localhost"
|
WEB_URL="http://localhost"
|
||||||
|
|
||||||
# Gunicorn Workers
|
# Gunicorn Workers
|
||||||
GUNICORN_WORKERS=2
|
GUNICORN_WORKERS=2
|
||||||
|
|
||||||
|
@ -32,27 +32,19 @@ RUN apk add --no-cache --virtual .build-deps \
|
|||||||
apk del .build-deps
|
apk del .build-deps
|
||||||
|
|
||||||
|
|
||||||
RUN addgroup -S plane && \
|
|
||||||
adduser -S captain -G plane
|
|
||||||
|
|
||||||
RUN chown captain.plane /code
|
|
||||||
|
|
||||||
USER captain
|
|
||||||
|
|
||||||
# Add in Django deps and generate Django's static files
|
# Add in Django deps and generate Django's static files
|
||||||
COPY manage.py manage.py
|
COPY manage.py manage.py
|
||||||
COPY plane plane/
|
COPY plane plane/
|
||||||
COPY templates templates/
|
COPY templates templates/
|
||||||
COPY package.json package.json
|
COPY package.json package.json
|
||||||
USER root
|
|
||||||
RUN apk --no-cache add "bash~=5.2"
|
RUN apk --no-cache add "bash~=5.2"
|
||||||
COPY ./bin ./bin/
|
COPY ./bin ./bin/
|
||||||
|
|
||||||
|
RUN mkdir -p /code/plane/logs
|
||||||
RUN chmod +x ./bin/takeoff ./bin/worker ./bin/beat
|
RUN chmod +x ./bin/takeoff ./bin/worker ./bin/beat
|
||||||
RUN chmod -R 777 /code
|
RUN chmod -R 777 /code
|
||||||
|
|
||||||
USER captain
|
|
||||||
|
|
||||||
# Expose container port and run entry point script
|
# Expose container port and run entry point script
|
||||||
EXPOSE 8000
|
EXPOSE 8000
|
||||||
|
|
||||||
|
@ -27,26 +27,19 @@ WORKDIR /code
|
|||||||
COPY requirements.txt ./requirements.txt
|
COPY requirements.txt ./requirements.txt
|
||||||
ADD requirements ./requirements
|
ADD requirements ./requirements
|
||||||
|
|
||||||
RUN pip install -r requirements.txt --compile --no-cache-dir
|
# Install the local development settings
|
||||||
|
RUN pip install -r requirements/local.txt --compile --no-cache-dir
|
||||||
|
|
||||||
RUN addgroup -S plane && \
|
|
||||||
adduser -S captain -G plane
|
|
||||||
|
|
||||||
RUN chown captain.plane /code
|
COPY . .
|
||||||
|
|
||||||
USER captain
|
RUN mkdir -p /code/plane/logs
|
||||||
|
RUN chmod -R +x /code/bin
|
||||||
# Add in Django deps and generate Django's static files
|
|
||||||
|
|
||||||
USER root
|
|
||||||
|
|
||||||
# RUN chmod +x ./bin/takeoff ./bin/worker ./bin/beat
|
|
||||||
RUN chmod -R 777 /code
|
RUN chmod -R 777 /code
|
||||||
|
|
||||||
USER captain
|
|
||||||
|
|
||||||
# Expose container port and run entry point script
|
# Expose container port and run entry point script
|
||||||
EXPOSE 8000
|
EXPOSE 8000
|
||||||
|
|
||||||
# CMD [ "./bin/takeoff" ]
|
CMD [ "./bin/takeoff.local" ]
|
||||||
|
|
||||||
|
@ -26,7 +26,9 @@ def update_description():
|
|||||||
updated_issues.append(issue)
|
updated_issues.append(issue)
|
||||||
|
|
||||||
Issue.objects.bulk_update(
|
Issue.objects.bulk_update(
|
||||||
updated_issues, ["description_html", "description_stripped"], batch_size=100
|
updated_issues,
|
||||||
|
["description_html", "description_stripped"],
|
||||||
|
batch_size=100,
|
||||||
)
|
)
|
||||||
print("Success")
|
print("Success")
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
@ -40,7 +42,9 @@ def update_comments():
|
|||||||
updated_issue_comments = []
|
updated_issue_comments = []
|
||||||
|
|
||||||
for issue_comment in issue_comments:
|
for issue_comment in issue_comments:
|
||||||
issue_comment.comment_html = f"<p>{issue_comment.comment_stripped}</p>"
|
issue_comment.comment_html = (
|
||||||
|
f"<p>{issue_comment.comment_stripped}</p>"
|
||||||
|
)
|
||||||
updated_issue_comments.append(issue_comment)
|
updated_issue_comments.append(issue_comment)
|
||||||
|
|
||||||
IssueComment.objects.bulk_update(
|
IssueComment.objects.bulk_update(
|
||||||
@ -99,7 +103,9 @@ def updated_issue_sort_order():
|
|||||||
issue.sort_order = issue.sequence_id * random.randint(100, 500)
|
issue.sort_order = issue.sequence_id * random.randint(100, 500)
|
||||||
updated_issues.append(issue)
|
updated_issues.append(issue)
|
||||||
|
|
||||||
Issue.objects.bulk_update(updated_issues, ["sort_order"], batch_size=100)
|
Issue.objects.bulk_update(
|
||||||
|
updated_issues, ["sort_order"], batch_size=100
|
||||||
|
)
|
||||||
print("Success")
|
print("Success")
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
print(e)
|
print(e)
|
||||||
@ -137,7 +143,9 @@ def update_project_cover_images():
|
|||||||
project.cover_image = project_cover_images[random.randint(0, 19)]
|
project.cover_image = project_cover_images[random.randint(0, 19)]
|
||||||
updated_projects.append(project)
|
updated_projects.append(project)
|
||||||
|
|
||||||
Project.objects.bulk_update(updated_projects, ["cover_image"], batch_size=100)
|
Project.objects.bulk_update(
|
||||||
|
updated_projects, ["cover_image"], batch_size=100
|
||||||
|
)
|
||||||
print("Success")
|
print("Success")
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
print(e)
|
print(e)
|
||||||
@ -174,7 +182,7 @@ def update_label_color():
|
|||||||
labels = Label.objects.filter(color="")
|
labels = Label.objects.filter(color="")
|
||||||
updated_labels = []
|
updated_labels = []
|
||||||
for label in labels:
|
for label in labels:
|
||||||
label.color = "#" + "%06x" % random.randint(0, 0xFFFFFF)
|
label.color = f"#{random.randint(0, 0xFFFFFF+1):06X}"
|
||||||
updated_labels.append(label)
|
updated_labels.append(label)
|
||||||
|
|
||||||
Label.objects.bulk_update(updated_labels, ["color"], batch_size=100)
|
Label.objects.bulk_update(updated_labels, ["color"], batch_size=100)
|
||||||
@ -186,7 +194,9 @@ def update_label_color():
|
|||||||
|
|
||||||
def create_slack_integration():
|
def create_slack_integration():
|
||||||
try:
|
try:
|
||||||
_ = Integration.objects.create(provider="slack", network=2, title="Slack")
|
_ = Integration.objects.create(
|
||||||
|
provider="slack", network=2, title="Slack"
|
||||||
|
)
|
||||||
print("Success")
|
print("Success")
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
print(e)
|
print(e)
|
||||||
@ -212,12 +222,16 @@ def update_integration_verified():
|
|||||||
|
|
||||||
def update_start_date():
|
def update_start_date():
|
||||||
try:
|
try:
|
||||||
issues = Issue.objects.filter(state__group__in=["started", "completed"])
|
issues = Issue.objects.filter(
|
||||||
|
state__group__in=["started", "completed"]
|
||||||
|
)
|
||||||
updated_issues = []
|
updated_issues = []
|
||||||
for issue in issues:
|
for issue in issues:
|
||||||
issue.start_date = issue.created_at.date()
|
issue.start_date = issue.created_at.date()
|
||||||
updated_issues.append(issue)
|
updated_issues.append(issue)
|
||||||
Issue.objects.bulk_update(updated_issues, ["start_date"], batch_size=500)
|
Issue.objects.bulk_update(
|
||||||
|
updated_issues, ["start_date"], batch_size=500
|
||||||
|
)
|
||||||
print("Success")
|
print("Success")
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
print(e)
|
print(e)
|
||||||
|
3
apiserver/bin/beat
Normal file → Executable file
3
apiserver/bin/beat
Normal file → Executable file
@ -2,4 +2,7 @@
|
|||||||
set -e
|
set -e
|
||||||
|
|
||||||
python manage.py wait_for_db
|
python manage.py wait_for_db
|
||||||
|
# Wait for migrations
|
||||||
|
python manage.py wait_for_migrations
|
||||||
|
# Run the processes
|
||||||
celery -A plane beat -l info
|
celery -A plane beat -l info
|
@ -1,7 +1,8 @@
|
|||||||
#!/bin/bash
|
#!/bin/bash
|
||||||
set -e
|
set -e
|
||||||
python manage.py wait_for_db
|
python manage.py wait_for_db
|
||||||
python manage.py migrate
|
# Wait for migrations
|
||||||
|
python manage.py wait_for_migrations
|
||||||
|
|
||||||
# Create the default bucket
|
# Create the default bucket
|
||||||
#!/bin/bash
|
#!/bin/bash
|
||||||
@ -20,11 +21,15 @@ SIGNATURE=$(echo "$HOSTNAME$MAC_ADDRESS$CPU_INFO$MEMORY_INFO$DISK_INFO" | sha256
|
|||||||
export MACHINE_SIGNATURE=$SIGNATURE
|
export MACHINE_SIGNATURE=$SIGNATURE
|
||||||
|
|
||||||
# Register instance
|
# Register instance
|
||||||
python manage.py register_instance $MACHINE_SIGNATURE
|
python manage.py register_instance "$MACHINE_SIGNATURE"
|
||||||
|
|
||||||
# Load the configuration variable
|
# Load the configuration variable
|
||||||
python manage.py configure_instance
|
python manage.py configure_instance
|
||||||
|
|
||||||
# Create the default bucket
|
# Create the default bucket
|
||||||
python manage.py create_bucket
|
python manage.py create_bucket
|
||||||
|
|
||||||
exec gunicorn -w $GUNICORN_WORKERS -k uvicorn.workers.UvicornWorker plane.asgi:application --bind 0.0.0.0:${PORT:-8000} --max-requests 1200 --max-requests-jitter 1000 --access-logfile -
|
# Clear Cache before starting to remove stale values
|
||||||
|
python manage.py clear_cache
|
||||||
|
|
||||||
|
exec gunicorn -w "$GUNICORN_WORKERS" -k uvicorn.workers.UvicornWorker plane.asgi:application --bind 0.0.0.0:"${PORT:-8000}" --max-requests 1200 --max-requests-jitter 1000 --access-logfile -
|
||||||
|
35
apiserver/bin/takeoff.local
Executable file
35
apiserver/bin/takeoff.local
Executable file
@ -0,0 +1,35 @@
|
|||||||
|
#!/bin/bash
|
||||||
|
set -e
|
||||||
|
python manage.py wait_for_db
|
||||||
|
# Wait for migrations
|
||||||
|
python manage.py wait_for_migrations
|
||||||
|
|
||||||
|
# Create the default bucket
|
||||||
|
#!/bin/bash
|
||||||
|
|
||||||
|
# Collect system information
|
||||||
|
HOSTNAME=$(hostname)
|
||||||
|
MAC_ADDRESS=$(ip link show | awk '/ether/ {print $2}' | head -n 1)
|
||||||
|
CPU_INFO=$(cat /proc/cpuinfo)
|
||||||
|
MEMORY_INFO=$(free -h)
|
||||||
|
DISK_INFO=$(df -h)
|
||||||
|
|
||||||
|
# Concatenate information and compute SHA-256 hash
|
||||||
|
SIGNATURE=$(echo "$HOSTNAME$MAC_ADDRESS$CPU_INFO$MEMORY_INFO$DISK_INFO" | sha256sum | awk '{print $1}')
|
||||||
|
|
||||||
|
# Export the variables
|
||||||
|
export MACHINE_SIGNATURE=$SIGNATURE
|
||||||
|
|
||||||
|
# Register instance
|
||||||
|
python manage.py register_instance "$MACHINE_SIGNATURE"
|
||||||
|
# Load the configuration variable
|
||||||
|
python manage.py configure_instance
|
||||||
|
|
||||||
|
# Create the default bucket
|
||||||
|
python manage.py create_bucket
|
||||||
|
|
||||||
|
# Clear Cache before starting to remove stale values
|
||||||
|
python manage.py clear_cache
|
||||||
|
|
||||||
|
python manage.py runserver 0.0.0.0:8000 --settings=plane.settings.local
|
||||||
|
|
@ -2,4 +2,7 @@
|
|||||||
set -e
|
set -e
|
||||||
|
|
||||||
python manage.py wait_for_db
|
python manage.py wait_for_db
|
||||||
|
# Wait for migrations
|
||||||
|
python manage.py wait_for_migrations
|
||||||
|
# Run the processes
|
||||||
celery -A plane worker -l info
|
celery -A plane worker -l info
|
@ -2,10 +2,10 @@
|
|||||||
import os
|
import os
|
||||||
import sys
|
import sys
|
||||||
|
|
||||||
if __name__ == '__main__':
|
if __name__ == "__main__":
|
||||||
os.environ.setdefault(
|
os.environ.setdefault(
|
||||||
'DJANGO_SETTINGS_MODULE',
|
"DJANGO_SETTINGS_MODULE", "plane.settings.production"
|
||||||
'plane.settings.production')
|
)
|
||||||
try:
|
try:
|
||||||
from django.core.management import execute_from_command_line
|
from django.core.management import execute_from_command_line
|
||||||
except ImportError as exc:
|
except ImportError as exc:
|
||||||
|
@ -1,4 +1,4 @@
|
|||||||
{
|
{
|
||||||
"name": "plane-api",
|
"name": "plane-api",
|
||||||
"version": "0.13.2"
|
"version": "0.17.0"
|
||||||
}
|
}
|
@ -1,3 +1,3 @@
|
|||||||
from .celery import app as celery_app
|
from .celery import app as celery_app
|
||||||
|
|
||||||
__all__ = ('celery_app',)
|
__all__ = ("celery_app",)
|
||||||
|
@ -2,4 +2,4 @@ from django.apps import AppConfig
|
|||||||
|
|
||||||
|
|
||||||
class AnalyticsConfig(AppConfig):
|
class AnalyticsConfig(AppConfig):
|
||||||
name = 'plane.analytics'
|
name = "plane.analytics"
|
||||||
|
@ -25,7 +25,10 @@ class APIKeyAuthentication(authentication.BaseAuthentication):
|
|||||||
def validate_api_token(self, token):
|
def validate_api_token(self, token):
|
||||||
try:
|
try:
|
||||||
api_token = APIToken.objects.get(
|
api_token = APIToken.objects.get(
|
||||||
Q(Q(expired_at__gt=timezone.now()) | Q(expired_at__isnull=True)),
|
Q(
|
||||||
|
Q(expired_at__gt=timezone.now())
|
||||||
|
| Q(expired_at__isnull=True)
|
||||||
|
),
|
||||||
token=token,
|
token=token,
|
||||||
is_active=True,
|
is_active=True,
|
||||||
)
|
)
|
||||||
|
@ -1,17 +1,18 @@
|
|||||||
from rest_framework.throttling import SimpleRateThrottle
|
from rest_framework.throttling import SimpleRateThrottle
|
||||||
|
|
||||||
|
|
||||||
class ApiKeyRateThrottle(SimpleRateThrottle):
|
class ApiKeyRateThrottle(SimpleRateThrottle):
|
||||||
scope = 'api_key'
|
scope = "api_key"
|
||||||
rate = '60/minute'
|
rate = "60/minute"
|
||||||
|
|
||||||
def get_cache_key(self, request, view):
|
def get_cache_key(self, request, view):
|
||||||
# Retrieve the API key from the request header
|
# Retrieve the API key from the request header
|
||||||
api_key = request.headers.get('X-Api-Key')
|
api_key = request.headers.get("X-Api-Key")
|
||||||
if not api_key:
|
if not api_key:
|
||||||
return None # Allow the request if there's no API key
|
return None # Allow the request if there's no API key
|
||||||
|
|
||||||
# Use the API key as part of the cache key
|
# Use the API key as part of the cache key
|
||||||
return f'{self.scope}:{api_key}'
|
return f"{self.scope}:{api_key}"
|
||||||
|
|
||||||
def allow_request(self, request, view):
|
def allow_request(self, request, view):
|
||||||
allowed = super().allow_request(request, view)
|
allowed = super().allow_request(request, view)
|
||||||
@ -35,7 +36,7 @@ class ApiKeyRateThrottle(SimpleRateThrottle):
|
|||||||
reset_time = int(now + self.duration)
|
reset_time = int(now + self.duration)
|
||||||
|
|
||||||
# Add headers
|
# Add headers
|
||||||
request.META['X-RateLimit-Remaining'] = max(0, available)
|
request.META["X-RateLimit-Remaining"] = max(0, available)
|
||||||
request.META['X-RateLimit-Reset'] = reset_time
|
request.META["X-RateLimit-Reset"] = reset_time
|
||||||
|
|
||||||
return allowed
|
return allowed
|
@ -13,5 +13,9 @@ from .issue import (
|
|||||||
)
|
)
|
||||||
from .state import StateLiteSerializer, StateSerializer
|
from .state import StateLiteSerializer, StateSerializer
|
||||||
from .cycle import CycleSerializer, CycleIssueSerializer, CycleLiteSerializer
|
from .cycle import CycleSerializer, CycleIssueSerializer, CycleLiteSerializer
|
||||||
from .module import ModuleSerializer, ModuleIssueSerializer, ModuleLiteSerializer
|
from .module import (
|
||||||
|
ModuleSerializer,
|
||||||
|
ModuleIssueSerializer,
|
||||||
|
ModuleLiteSerializer,
|
||||||
|
)
|
||||||
from .inbox import InboxIssueSerializer
|
from .inbox import InboxIssueSerializer
|
@ -66,11 +66,11 @@ class BaseSerializer(serializers.ModelSerializer):
|
|||||||
if expand in self.fields:
|
if expand in self.fields:
|
||||||
# Import all the expandable serializers
|
# Import all the expandable serializers
|
||||||
from . import (
|
from . import (
|
||||||
WorkspaceLiteSerializer,
|
|
||||||
ProjectLiteSerializer,
|
|
||||||
UserLiteSerializer,
|
|
||||||
StateLiteSerializer,
|
|
||||||
IssueSerializer,
|
IssueSerializer,
|
||||||
|
ProjectLiteSerializer,
|
||||||
|
StateLiteSerializer,
|
||||||
|
UserLiteSerializer,
|
||||||
|
WorkspaceLiteSerializer,
|
||||||
)
|
)
|
||||||
|
|
||||||
# Expansion mapper
|
# Expansion mapper
|
||||||
@ -97,9 +97,11 @@ class BaseSerializer(serializers.ModelSerializer):
|
|||||||
exp_serializer = expansion[expand](
|
exp_serializer = expansion[expand](
|
||||||
getattr(instance, expand)
|
getattr(instance, expand)
|
||||||
)
|
)
|
||||||
response[expand] = exp_serializer.data
|
response[expand] = exp_serializer.data
|
||||||
else:
|
else:
|
||||||
# You might need to handle this case differently
|
# You might need to handle this case differently
|
||||||
response[expand] = getattr(instance, f"{expand}_id", None)
|
response[expand] = getattr(
|
||||||
|
instance, f"{expand}_id", None
|
||||||
|
)
|
||||||
|
|
||||||
return response
|
return response
|
@ -23,7 +23,9 @@ class CycleSerializer(BaseSerializer):
|
|||||||
and data.get("end_date", None) is not None
|
and data.get("end_date", None) is not None
|
||||||
and data.get("start_date", None) > data.get("end_date", None)
|
and data.get("start_date", None) > data.get("end_date", None)
|
||||||
):
|
):
|
||||||
raise serializers.ValidationError("Start date cannot exceed end date")
|
raise serializers.ValidationError(
|
||||||
|
"Start date cannot exceed end date"
|
||||||
|
)
|
||||||
return data
|
return data
|
||||||
|
|
||||||
class Meta:
|
class Meta:
|
||||||
@ -55,7 +57,6 @@ class CycleIssueSerializer(BaseSerializer):
|
|||||||
|
|
||||||
|
|
||||||
class CycleLiteSerializer(BaseSerializer):
|
class CycleLiteSerializer(BaseSerializer):
|
||||||
|
|
||||||
class Meta:
|
class Meta:
|
||||||
model = Cycle
|
model = Cycle
|
||||||
fields = "__all__"
|
fields = "__all__"
|
@ -2,8 +2,8 @@
|
|||||||
from .base import BaseSerializer
|
from .base import BaseSerializer
|
||||||
from plane.db.models import InboxIssue
|
from plane.db.models import InboxIssue
|
||||||
|
|
||||||
class InboxIssueSerializer(BaseSerializer):
|
|
||||||
|
|
||||||
|
class InboxIssueSerializer(BaseSerializer):
|
||||||
class Meta:
|
class Meta:
|
||||||
model = InboxIssue
|
model = InboxIssue
|
||||||
fields = "__all__"
|
fields = "__all__"
|
||||||
|
@ -1,31 +1,34 @@
|
|||||||
from lxml import html
|
from django.core.exceptions import ValidationError
|
||||||
|
from django.core.validators import URLValidator
|
||||||
|
|
||||||
# Django imports
|
# Django imports
|
||||||
from django.utils import timezone
|
from django.utils import timezone
|
||||||
|
from lxml import html
|
||||||
|
|
||||||
# Third party imports
|
# Third party imports
|
||||||
from rest_framework import serializers
|
from rest_framework import serializers
|
||||||
|
|
||||||
# Module imports
|
# Module imports
|
||||||
from plane.db.models import (
|
from plane.db.models import (
|
||||||
User,
|
|
||||||
Issue,
|
Issue,
|
||||||
State,
|
IssueActivity,
|
||||||
IssueAssignee,
|
IssueAssignee,
|
||||||
Label,
|
IssueAttachment,
|
||||||
|
IssueComment,
|
||||||
IssueLabel,
|
IssueLabel,
|
||||||
IssueLink,
|
IssueLink,
|
||||||
IssueComment,
|
Label,
|
||||||
IssueAttachment,
|
|
||||||
IssueActivity,
|
|
||||||
ProjectMember,
|
ProjectMember,
|
||||||
|
State,
|
||||||
|
User,
|
||||||
)
|
)
|
||||||
|
|
||||||
from .base import BaseSerializer
|
from .base import BaseSerializer
|
||||||
from .cycle import CycleSerializer, CycleLiteSerializer
|
from .cycle import CycleLiteSerializer, CycleSerializer
|
||||||
from .module import ModuleSerializer, ModuleLiteSerializer
|
from .module import ModuleLiteSerializer, ModuleSerializer
|
||||||
from .user import UserLiteSerializer
|
|
||||||
from .state import StateLiteSerializer
|
from .state import StateLiteSerializer
|
||||||
|
from .user import UserLiteSerializer
|
||||||
|
|
||||||
|
|
||||||
class IssueSerializer(BaseSerializer):
|
class IssueSerializer(BaseSerializer):
|
||||||
assignees = serializers.ListField(
|
assignees = serializers.ListField(
|
||||||
@ -66,16 +69,18 @@ class IssueSerializer(BaseSerializer):
|
|||||||
and data.get("target_date", None) is not None
|
and data.get("target_date", None) is not None
|
||||||
and data.get("start_date", None) > data.get("target_date", None)
|
and data.get("start_date", None) > data.get("target_date", None)
|
||||||
):
|
):
|
||||||
raise serializers.ValidationError("Start date cannot exceed target date")
|
raise serializers.ValidationError(
|
||||||
|
"Start date cannot exceed target date"
|
||||||
|
)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
if(data.get("description_html", None) is not None):
|
if data.get("description_html", None) is not None:
|
||||||
parsed = html.fromstring(data["description_html"])
|
parsed = html.fromstring(data["description_html"])
|
||||||
parsed_str = html.tostring(parsed, encoding='unicode')
|
parsed_str = html.tostring(parsed, encoding="unicode")
|
||||||
data["description_html"] = parsed_str
|
data["description_html"] = parsed_str
|
||||||
|
|
||||||
except Exception as e:
|
except Exception:
|
||||||
raise serializers.ValidationError(f"Invalid HTML: {str(e)}")
|
raise serializers.ValidationError("Invalid HTML passed")
|
||||||
|
|
||||||
# Validate assignees are from project
|
# Validate assignees are from project
|
||||||
if data.get("assignees", []):
|
if data.get("assignees", []):
|
||||||
@ -96,7 +101,8 @@ class IssueSerializer(BaseSerializer):
|
|||||||
if (
|
if (
|
||||||
data.get("state")
|
data.get("state")
|
||||||
and not State.objects.filter(
|
and not State.objects.filter(
|
||||||
project_id=self.context.get("project_id"), pk=data.get("state")
|
project_id=self.context.get("project_id"),
|
||||||
|
pk=data.get("state").id,
|
||||||
).exists()
|
).exists()
|
||||||
):
|
):
|
||||||
raise serializers.ValidationError(
|
raise serializers.ValidationError(
|
||||||
@ -107,7 +113,8 @@ class IssueSerializer(BaseSerializer):
|
|||||||
if (
|
if (
|
||||||
data.get("parent")
|
data.get("parent")
|
||||||
and not Issue.objects.filter(
|
and not Issue.objects.filter(
|
||||||
workspace_id=self.context.get("workspace_id"), pk=data.get("parent")
|
workspace_id=self.context.get("workspace_id"),
|
||||||
|
pk=data.get("parent").id,
|
||||||
).exists()
|
).exists()
|
||||||
):
|
):
|
||||||
raise serializers.ValidationError(
|
raise serializers.ValidationError(
|
||||||
@ -238,9 +245,13 @@ class IssueSerializer(BaseSerializer):
|
|||||||
]
|
]
|
||||||
if "labels" in self.fields:
|
if "labels" in self.fields:
|
||||||
if "labels" in self.expand:
|
if "labels" in self.expand:
|
||||||
data["labels"] = LabelSerializer(instance.labels.all(), many=True).data
|
data["labels"] = LabelSerializer(
|
||||||
|
instance.labels.all(), many=True
|
||||||
|
).data
|
||||||
else:
|
else:
|
||||||
data["labels"] = [str(label.id) for label in instance.labels.all()]
|
data["labels"] = [
|
||||||
|
str(label.id) for label in instance.labels.all()
|
||||||
|
]
|
||||||
|
|
||||||
return data
|
return data
|
||||||
|
|
||||||
@ -275,16 +286,42 @@ class IssueLinkSerializer(BaseSerializer):
|
|||||||
"updated_at",
|
"updated_at",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
def validate_url(self, value):
|
||||||
|
# Check URL format
|
||||||
|
validate_url = URLValidator()
|
||||||
|
try:
|
||||||
|
validate_url(value)
|
||||||
|
except ValidationError:
|
||||||
|
raise serializers.ValidationError("Invalid URL format.")
|
||||||
|
|
||||||
|
# Check URL scheme
|
||||||
|
if not value.startswith(("http://", "https://")):
|
||||||
|
raise serializers.ValidationError("Invalid URL scheme.")
|
||||||
|
|
||||||
|
return value
|
||||||
|
|
||||||
# Validation if url already exists
|
# Validation if url already exists
|
||||||
def create(self, validated_data):
|
def create(self, validated_data):
|
||||||
if IssueLink.objects.filter(
|
if IssueLink.objects.filter(
|
||||||
url=validated_data.get("url"), issue_id=validated_data.get("issue_id")
|
url=validated_data.get("url"),
|
||||||
|
issue_id=validated_data.get("issue_id"),
|
||||||
).exists():
|
).exists():
|
||||||
raise serializers.ValidationError(
|
raise serializers.ValidationError(
|
||||||
{"error": "URL already exists for this Issue"}
|
{"error": "URL already exists for this Issue"}
|
||||||
)
|
)
|
||||||
return IssueLink.objects.create(**validated_data)
|
return IssueLink.objects.create(**validated_data)
|
||||||
|
|
||||||
|
def update(self, instance, validated_data):
|
||||||
|
if IssueLink.objects.filter(
|
||||||
|
url=validated_data.get("url"),
|
||||||
|
issue_id=instance.issue_id,
|
||||||
|
).exists():
|
||||||
|
raise serializers.ValidationError(
|
||||||
|
{"error": "URL already exists for this Issue"}
|
||||||
|
)
|
||||||
|
|
||||||
|
return super().update(instance, validated_data)
|
||||||
|
|
||||||
|
|
||||||
class IssueAttachmentSerializer(BaseSerializer):
|
class IssueAttachmentSerializer(BaseSerializer):
|
||||||
class Meta:
|
class Meta:
|
||||||
@ -324,13 +361,13 @@ class IssueCommentSerializer(BaseSerializer):
|
|||||||
|
|
||||||
def validate(self, data):
|
def validate(self, data):
|
||||||
try:
|
try:
|
||||||
if(data.get("comment_html", None) is not None):
|
if data.get("comment_html", None) is not None:
|
||||||
parsed = html.fromstring(data["comment_html"])
|
parsed = html.fromstring(data["comment_html"])
|
||||||
parsed_str = html.tostring(parsed, encoding='unicode')
|
parsed_str = html.tostring(parsed, encoding="unicode")
|
||||||
data["comment_html"] = parsed_str
|
data["comment_html"] = parsed_str
|
||||||
|
|
||||||
except Exception as e:
|
except Exception:
|
||||||
raise serializers.ValidationError(f"Invalid HTML: {str(e)}")
|
raise serializers.ValidationError("Invalid HTML passed")
|
||||||
return data
|
return data
|
||||||
|
|
||||||
|
|
||||||
@ -362,7 +399,6 @@ class ModuleIssueSerializer(BaseSerializer):
|
|||||||
|
|
||||||
|
|
||||||
class LabelLiteSerializer(BaseSerializer):
|
class LabelLiteSerializer(BaseSerializer):
|
||||||
|
|
||||||
class Meta:
|
class Meta:
|
||||||
model = Label
|
model = Label
|
||||||
fields = [
|
fields = [
|
||||||
|
@ -52,7 +52,9 @@ class ModuleSerializer(BaseSerializer):
|
|||||||
and data.get("target_date", None) is not None
|
and data.get("target_date", None) is not None
|
||||||
and data.get("start_date", None) > data.get("target_date", None)
|
and data.get("start_date", None) > data.get("target_date", None)
|
||||||
):
|
):
|
||||||
raise serializers.ValidationError("Start date cannot exceed target date")
|
raise serializers.ValidationError(
|
||||||
|
"Start date cannot exceed target date"
|
||||||
|
)
|
||||||
|
|
||||||
if data.get("members", []):
|
if data.get("members", []):
|
||||||
data["members"] = ProjectMember.objects.filter(
|
data["members"] = ProjectMember.objects.filter(
|
||||||
@ -65,18 +67,18 @@ class ModuleSerializer(BaseSerializer):
|
|||||||
def create(self, validated_data):
|
def create(self, validated_data):
|
||||||
members = validated_data.pop("members", None)
|
members = validated_data.pop("members", None)
|
||||||
|
|
||||||
project = self.context["project"]
|
project_id = self.context["project_id"]
|
||||||
|
workspace_id = self.context["workspace_id"]
|
||||||
module = Module.objects.create(**validated_data, project=project)
|
|
||||||
|
|
||||||
|
module = Module.objects.create(**validated_data, project_id=project_id)
|
||||||
if members is not None:
|
if members is not None:
|
||||||
ModuleMember.objects.bulk_create(
|
ModuleMember.objects.bulk_create(
|
||||||
[
|
[
|
||||||
ModuleMember(
|
ModuleMember(
|
||||||
module=module,
|
module=module,
|
||||||
member=member,
|
member_id=str(member),
|
||||||
project=project,
|
project_id=project_id,
|
||||||
workspace=project.workspace,
|
workspace_id=workspace_id,
|
||||||
created_by=module.created_by,
|
created_by=module.created_by,
|
||||||
updated_by=module.updated_by,
|
updated_by=module.updated_by,
|
||||||
)
|
)
|
||||||
@ -97,7 +99,7 @@ class ModuleSerializer(BaseSerializer):
|
|||||||
[
|
[
|
||||||
ModuleMember(
|
ModuleMember(
|
||||||
module=instance,
|
module=instance,
|
||||||
member=member,
|
member_id=str(member),
|
||||||
project=instance.project,
|
project=instance.project,
|
||||||
workspace=instance.project.workspace,
|
workspace=instance.project.workspace,
|
||||||
created_by=instance.created_by,
|
created_by=instance.created_by,
|
||||||
@ -146,7 +148,8 @@ class ModuleLinkSerializer(BaseSerializer):
|
|||||||
# Validation if url already exists
|
# Validation if url already exists
|
||||||
def create(self, validated_data):
|
def create(self, validated_data):
|
||||||
if ModuleLink.objects.filter(
|
if ModuleLink.objects.filter(
|
||||||
url=validated_data.get("url"), module_id=validated_data.get("module_id")
|
url=validated_data.get("url"),
|
||||||
|
module_id=validated_data.get("module_id"),
|
||||||
).exists():
|
).exists():
|
||||||
raise serializers.ValidationError(
|
raise serializers.ValidationError(
|
||||||
{"error": "URL already exists for this Issue"}
|
{"error": "URL already exists for this Issue"}
|
||||||
@ -155,7 +158,6 @@ class ModuleLinkSerializer(BaseSerializer):
|
|||||||
|
|
||||||
|
|
||||||
class ModuleLiteSerializer(BaseSerializer):
|
class ModuleLiteSerializer(BaseSerializer):
|
||||||
|
|
||||||
class Meta:
|
class Meta:
|
||||||
model = Module
|
model = Module
|
||||||
fields = "__all__"
|
fields = "__all__"
|
@ -2,12 +2,16 @@
|
|||||||
from rest_framework import serializers
|
from rest_framework import serializers
|
||||||
|
|
||||||
# Module imports
|
# Module imports
|
||||||
from plane.db.models import Project, ProjectIdentifier, WorkspaceMember, State, Estimate
|
from plane.db.models import (
|
||||||
|
Project,
|
||||||
|
ProjectIdentifier,
|
||||||
|
WorkspaceMember,
|
||||||
|
)
|
||||||
|
|
||||||
from .base import BaseSerializer
|
from .base import BaseSerializer
|
||||||
|
|
||||||
|
|
||||||
class ProjectSerializer(BaseSerializer):
|
class ProjectSerializer(BaseSerializer):
|
||||||
|
|
||||||
total_members = serializers.IntegerField(read_only=True)
|
total_members = serializers.IntegerField(read_only=True)
|
||||||
total_cycles = serializers.IntegerField(read_only=True)
|
total_cycles = serializers.IntegerField(read_only=True)
|
||||||
total_modules = serializers.IntegerField(read_only=True)
|
total_modules = serializers.IntegerField(read_only=True)
|
||||||
@ -21,7 +25,7 @@ class ProjectSerializer(BaseSerializer):
|
|||||||
fields = "__all__"
|
fields = "__all__"
|
||||||
read_only_fields = [
|
read_only_fields = [
|
||||||
"id",
|
"id",
|
||||||
'emoji',
|
"emoji",
|
||||||
"workspace",
|
"workspace",
|
||||||
"created_at",
|
"created_at",
|
||||||
"updated_at",
|
"updated_at",
|
||||||
@ -59,12 +63,16 @@ class ProjectSerializer(BaseSerializer):
|
|||||||
def create(self, validated_data):
|
def create(self, validated_data):
|
||||||
identifier = validated_data.get("identifier", "").strip().upper()
|
identifier = validated_data.get("identifier", "").strip().upper()
|
||||||
if identifier == "":
|
if identifier == "":
|
||||||
raise serializers.ValidationError(detail="Project Identifier is required")
|
raise serializers.ValidationError(
|
||||||
|
detail="Project Identifier is required"
|
||||||
|
)
|
||||||
|
|
||||||
if ProjectIdentifier.objects.filter(
|
if ProjectIdentifier.objects.filter(
|
||||||
name=identifier, workspace_id=self.context["workspace_id"]
|
name=identifier, workspace_id=self.context["workspace_id"]
|
||||||
).exists():
|
).exists():
|
||||||
raise serializers.ValidationError(detail="Project Identifier is taken")
|
raise serializers.ValidationError(
|
||||||
|
detail="Project Identifier is taken"
|
||||||
|
)
|
||||||
|
|
||||||
project = Project.objects.create(
|
project = Project.objects.create(
|
||||||
**validated_data, workspace_id=self.context["workspace_id"]
|
**validated_data, workspace_id=self.context["workspace_id"]
|
||||||
|
@ -7,9 +7,9 @@ class StateSerializer(BaseSerializer):
|
|||||||
def validate(self, data):
|
def validate(self, data):
|
||||||
# If the default is being provided then make all other states default False
|
# If the default is being provided then make all other states default False
|
||||||
if data.get("default", False):
|
if data.get("default", False):
|
||||||
State.objects.filter(project_id=self.context.get("project_id")).update(
|
State.objects.filter(
|
||||||
default=False
|
project_id=self.context.get("project_id")
|
||||||
)
|
).update(default=False)
|
||||||
return data
|
return data
|
||||||
|
|
||||||
class Meta:
|
class Meta:
|
||||||
|
@ -1,5 +1,6 @@
|
|||||||
# Module imports
|
# Module imports
|
||||||
from plane.db.models import User
|
from plane.db.models import User
|
||||||
|
|
||||||
from .base import BaseSerializer
|
from .base import BaseSerializer
|
||||||
|
|
||||||
|
|
||||||
@ -10,7 +11,9 @@ class UserLiteSerializer(BaseSerializer):
|
|||||||
"id",
|
"id",
|
||||||
"first_name",
|
"first_name",
|
||||||
"last_name",
|
"last_name",
|
||||||
|
"email",
|
||||||
"avatar",
|
"avatar",
|
||||||
"display_name",
|
"display_name",
|
||||||
|
"email",
|
||||||
]
|
]
|
||||||
read_only_fields = fields
|
read_only_fields = fields
|
@ -5,6 +5,7 @@ from .base import BaseSerializer
|
|||||||
|
|
||||||
class WorkspaceLiteSerializer(BaseSerializer):
|
class WorkspaceLiteSerializer(BaseSerializer):
|
||||||
"""Lite serializer with only required fields"""
|
"""Lite serializer with only required fields"""
|
||||||
|
|
||||||
class Meta:
|
class Meta:
|
||||||
model = Workspace
|
model = Workspace
|
||||||
fields = [
|
fields = [
|
||||||
|
@ -4,6 +4,7 @@ from plane.api.views.cycle import (
|
|||||||
CycleAPIEndpoint,
|
CycleAPIEndpoint,
|
||||||
CycleIssueAPIEndpoint,
|
CycleIssueAPIEndpoint,
|
||||||
TransferCycleIssueAPIEndpoint,
|
TransferCycleIssueAPIEndpoint,
|
||||||
|
CycleArchiveUnarchiveAPIEndpoint,
|
||||||
)
|
)
|
||||||
|
|
||||||
urlpatterns = [
|
urlpatterns = [
|
||||||
@ -32,4 +33,14 @@ urlpatterns = [
|
|||||||
TransferCycleIssueAPIEndpoint.as_view(),
|
TransferCycleIssueAPIEndpoint.as_view(),
|
||||||
name="transfer-issues",
|
name="transfer-issues",
|
||||||
),
|
),
|
||||||
|
path(
|
||||||
|
"workspaces/<str:slug>/projects/<uuid:project_id>/cycles/<uuid:pk>/archive/",
|
||||||
|
CycleArchiveUnarchiveAPIEndpoint.as_view(),
|
||||||
|
name="cycle-archive-unarchive",
|
||||||
|
),
|
||||||
|
path(
|
||||||
|
"workspaces/<str:slug>/projects/<uuid:project_id>/archived-cycles/",
|
||||||
|
CycleArchiveUnarchiveAPIEndpoint.as_view(),
|
||||||
|
name="cycle-archive-unarchive",
|
||||||
|
),
|
||||||
]
|
]
|
@ -1,6 +1,10 @@
|
|||||||
from django.urls import path
|
from django.urls import path
|
||||||
|
|
||||||
from plane.api.views import ModuleAPIEndpoint, ModuleIssueAPIEndpoint
|
from plane.api.views import (
|
||||||
|
ModuleAPIEndpoint,
|
||||||
|
ModuleIssueAPIEndpoint,
|
||||||
|
ModuleArchiveUnarchiveAPIEndpoint,
|
||||||
|
)
|
||||||
|
|
||||||
urlpatterns = [
|
urlpatterns = [
|
||||||
path(
|
path(
|
||||||
@ -23,4 +27,14 @@ urlpatterns = [
|
|||||||
ModuleIssueAPIEndpoint.as_view(),
|
ModuleIssueAPIEndpoint.as_view(),
|
||||||
name="module-issues",
|
name="module-issues",
|
||||||
),
|
),
|
||||||
|
path(
|
||||||
|
"workspaces/<str:slug>/projects/<uuid:project_id>/modules/<uuid:pk>/archive/",
|
||||||
|
ModuleArchiveUnarchiveAPIEndpoint.as_view(),
|
||||||
|
name="module-archive-unarchive",
|
||||||
|
),
|
||||||
|
path(
|
||||||
|
"workspaces/<str:slug>/projects/<uuid:project_id>/archived-modules/",
|
||||||
|
ModuleArchiveUnarchiveAPIEndpoint.as_view(),
|
||||||
|
name="module-archive-unarchive",
|
||||||
|
),
|
||||||
]
|
]
|
@ -1,16 +1,24 @@
|
|||||||
from django.urls import path
|
from django.urls import path
|
||||||
|
|
||||||
from plane.api.views import ProjectAPIEndpoint
|
from plane.api.views import (
|
||||||
|
ProjectAPIEndpoint,
|
||||||
|
ProjectArchiveUnarchiveAPIEndpoint,
|
||||||
|
)
|
||||||
|
|
||||||
urlpatterns = [
|
urlpatterns = [
|
||||||
path(
|
path(
|
||||||
"workspaces/<str:slug>/projects/",
|
"workspaces/<str:slug>/projects/",
|
||||||
ProjectAPIEndpoint.as_view(),
|
ProjectAPIEndpoint.as_view(),
|
||||||
name="project",
|
name="project",
|
||||||
),
|
),
|
||||||
path(
|
path(
|
||||||
"workspaces/<str:slug>/projects/<uuid:project_id>/",
|
"workspaces/<str:slug>/projects/<uuid:pk>/",
|
||||||
ProjectAPIEndpoint.as_view(),
|
ProjectAPIEndpoint.as_view(),
|
||||||
name="project",
|
name="project",
|
||||||
),
|
),
|
||||||
|
path(
|
||||||
|
"workspaces/<str:slug>/projects/<uuid:project_id>/archive/",
|
||||||
|
ProjectArchiveUnarchiveAPIEndpoint.as_view(),
|
||||||
|
name="project-archive-unarchive",
|
||||||
|
),
|
||||||
]
|
]
|
@ -1,4 +1,4 @@
|
|||||||
from .project import ProjectAPIEndpoint
|
from .project import ProjectAPIEndpoint, ProjectArchiveUnarchiveAPIEndpoint
|
||||||
|
|
||||||
from .state import StateAPIEndpoint
|
from .state import StateAPIEndpoint
|
||||||
|
|
||||||
@ -14,8 +14,13 @@ from .cycle import (
|
|||||||
CycleAPIEndpoint,
|
CycleAPIEndpoint,
|
||||||
CycleIssueAPIEndpoint,
|
CycleIssueAPIEndpoint,
|
||||||
TransferCycleIssueAPIEndpoint,
|
TransferCycleIssueAPIEndpoint,
|
||||||
|
CycleArchiveUnarchiveAPIEndpoint,
|
||||||
)
|
)
|
||||||
|
|
||||||
from .module import ModuleAPIEndpoint, ModuleIssueAPIEndpoint
|
from .module import (
|
||||||
|
ModuleAPIEndpoint,
|
||||||
|
ModuleIssueAPIEndpoint,
|
||||||
|
ModuleArchiveUnarchiveAPIEndpoint,
|
||||||
|
)
|
||||||
|
|
||||||
from .inbox import InboxIssueAPIEndpoint
|
from .inbox import InboxIssueAPIEndpoint
|
@ -1,25 +1,27 @@
|
|||||||
# Python imports
|
# Python imports
|
||||||
|
from urllib.parse import urlparse
|
||||||
|
|
||||||
import zoneinfo
|
import zoneinfo
|
||||||
import json
|
|
||||||
|
|
||||||
# Django imports
|
# Django imports
|
||||||
from django.conf import settings
|
from django.conf import settings
|
||||||
from django.db import IntegrityError
|
|
||||||
from django.core.exceptions import ObjectDoesNotExist, ValidationError
|
from django.core.exceptions import ObjectDoesNotExist, ValidationError
|
||||||
|
from django.db import IntegrityError
|
||||||
|
from django.urls import resolve
|
||||||
from django.utils import timezone
|
from django.utils import timezone
|
||||||
|
from rest_framework import status
|
||||||
|
from rest_framework.permissions import IsAuthenticated
|
||||||
|
from rest_framework.response import Response
|
||||||
|
|
||||||
# Third party imports
|
# Third party imports
|
||||||
from rest_framework.views import APIView
|
from rest_framework.views import APIView
|
||||||
from rest_framework.response import Response
|
|
||||||
from rest_framework.permissions import IsAuthenticated
|
|
||||||
from rest_framework import status
|
|
||||||
from sentry_sdk import capture_exception
|
|
||||||
|
|
||||||
# Module imports
|
# Module imports
|
||||||
from plane.api.middleware.api_authentication import APIKeyAuthentication
|
from plane.api.middleware.api_authentication import APIKeyAuthentication
|
||||||
from plane.api.rate_limit import ApiKeyRateThrottle
|
from plane.api.rate_limit import ApiKeyRateThrottle
|
||||||
from plane.utils.paginator import BasePaginator
|
|
||||||
from plane.bgtasks.webhook_task import send_webhook
|
from plane.bgtasks.webhook_task import send_webhook
|
||||||
|
from plane.utils.exception_logger import log_exception
|
||||||
|
from plane.utils.paginator import BasePaginator
|
||||||
|
|
||||||
|
|
||||||
class TimezoneMixin:
|
class TimezoneMixin:
|
||||||
@ -41,7 +43,9 @@ class WebhookMixin:
|
|||||||
bulk = False
|
bulk = False
|
||||||
|
|
||||||
def finalize_response(self, request, response, *args, **kwargs):
|
def finalize_response(self, request, response, *args, **kwargs):
|
||||||
response = super().finalize_response(request, response, *args, **kwargs)
|
response = super().finalize_response(
|
||||||
|
request, response, *args, **kwargs
|
||||||
|
)
|
||||||
|
|
||||||
# Check for the case should webhook be sent
|
# Check for the case should webhook be sent
|
||||||
if (
|
if (
|
||||||
@ -49,6 +53,11 @@ class WebhookMixin:
|
|||||||
and self.request.method in ["POST", "PATCH", "DELETE"]
|
and self.request.method in ["POST", "PATCH", "DELETE"]
|
||||||
and response.status_code in [200, 201, 204]
|
and response.status_code in [200, 201, 204]
|
||||||
):
|
):
|
||||||
|
url = request.build_absolute_uri()
|
||||||
|
parsed_url = urlparse(url)
|
||||||
|
# Extract the scheme and netloc
|
||||||
|
scheme = parsed_url.scheme
|
||||||
|
netloc = parsed_url.netloc
|
||||||
# Push the object to delay
|
# Push the object to delay
|
||||||
send_webhook.delay(
|
send_webhook.delay(
|
||||||
event=self.webhook_event,
|
event=self.webhook_event,
|
||||||
@ -57,6 +66,7 @@ class WebhookMixin:
|
|||||||
action=self.request.method,
|
action=self.request.method,
|
||||||
slug=self.workspace_slug,
|
slug=self.workspace_slug,
|
||||||
bulk=self.bulk,
|
bulk=self.bulk,
|
||||||
|
current_site=f"{scheme}://{netloc}",
|
||||||
)
|
)
|
||||||
|
|
||||||
return response
|
return response
|
||||||
@ -97,28 +107,23 @@ class BaseAPIView(TimezoneMixin, APIView, BasePaginator):
|
|||||||
|
|
||||||
if isinstance(e, ValidationError):
|
if isinstance(e, ValidationError):
|
||||||
return Response(
|
return Response(
|
||||||
{
|
{"error": "Please provide valid detail"},
|
||||||
"error": "The provided payload is not valid please try with a valid payload"
|
|
||||||
},
|
|
||||||
status=status.HTTP_400_BAD_REQUEST,
|
status=status.HTTP_400_BAD_REQUEST,
|
||||||
)
|
)
|
||||||
|
|
||||||
if isinstance(e, ObjectDoesNotExist):
|
if isinstance(e, ObjectDoesNotExist):
|
||||||
model_name = str(exc).split(" matching query does not exist.")[0]
|
|
||||||
return Response(
|
return Response(
|
||||||
{"error": f"{model_name} does not exist."},
|
{"error": "The requested resource does not exist."},
|
||||||
status=status.HTTP_404_NOT_FOUND,
|
status=status.HTTP_404_NOT_FOUND,
|
||||||
)
|
)
|
||||||
|
|
||||||
if isinstance(e, KeyError):
|
if isinstance(e, KeyError):
|
||||||
return Response(
|
return Response(
|
||||||
{"error": f"key {e} does not exist"},
|
{"error": "The required key does not exist."},
|
||||||
status=status.HTTP_400_BAD_REQUEST,
|
status=status.HTTP_400_BAD_REQUEST,
|
||||||
)
|
)
|
||||||
|
|
||||||
if settings.DEBUG:
|
log_exception(e)
|
||||||
print(e)
|
|
||||||
capture_exception(e)
|
|
||||||
return Response(
|
return Response(
|
||||||
{"error": "Something went wrong please try again later"},
|
{"error": "Something went wrong please try again later"},
|
||||||
status=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
status=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||||
@ -140,7 +145,9 @@ class BaseAPIView(TimezoneMixin, APIView, BasePaginator):
|
|||||||
|
|
||||||
def finalize_response(self, request, response, *args, **kwargs):
|
def finalize_response(self, request, response, *args, **kwargs):
|
||||||
# Call super to get the default response
|
# Call super to get the default response
|
||||||
response = super().finalize_response(request, response, *args, **kwargs)
|
response = super().finalize_response(
|
||||||
|
request, response, *args, **kwargs
|
||||||
|
)
|
||||||
|
|
||||||
# Add custom headers if they exist in the request META
|
# Add custom headers if they exist in the request META
|
||||||
ratelimit_remaining = request.META.get("X-RateLimit-Remaining")
|
ratelimit_remaining = request.META.get("X-RateLimit-Remaining")
|
||||||
@ -159,18 +166,27 @@ class BaseAPIView(TimezoneMixin, APIView, BasePaginator):
|
|||||||
|
|
||||||
@property
|
@property
|
||||||
def project_id(self):
|
def project_id(self):
|
||||||
return self.kwargs.get("project_id", None)
|
project_id = self.kwargs.get("project_id", None)
|
||||||
|
if project_id:
|
||||||
|
return project_id
|
||||||
|
|
||||||
|
if resolve(self.request.path_info).url_name == "project":
|
||||||
|
return self.kwargs.get("pk", None)
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def fields(self):
|
def fields(self):
|
||||||
fields = [
|
fields = [
|
||||||
field for field in self.request.GET.get("fields", "").split(",") if field
|
field
|
||||||
|
for field in self.request.GET.get("fields", "").split(",")
|
||||||
|
if field
|
||||||
]
|
]
|
||||||
return fields if fields else None
|
return fields if fields else None
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def expand(self):
|
def expand(self):
|
||||||
expand = [
|
expand = [
|
||||||
expand for expand in self.request.GET.get("expand", "").split(",") if expand
|
expand
|
||||||
|
for expand in self.request.GET.get("expand", "").split(",")
|
||||||
|
if expand
|
||||||
]
|
]
|
||||||
return expand if expand else None
|
return expand if expand else None
|
||||||
|
@ -2,23 +2,31 @@
|
|||||||
import json
|
import json
|
||||||
|
|
||||||
# Django imports
|
# Django imports
|
||||||
from django.db.models import Q, Count, Sum, Prefetch, F, OuterRef, Func
|
|
||||||
from django.utils import timezone
|
|
||||||
from django.core import serializers
|
from django.core import serializers
|
||||||
|
from django.db.models import Count, F, Func, OuterRef, Q, Sum
|
||||||
|
from django.utils import timezone
|
||||||
|
|
||||||
# Third party imports
|
# Third party imports
|
||||||
from rest_framework.response import Response
|
|
||||||
from rest_framework import status
|
from rest_framework import status
|
||||||
|
from rest_framework.response import Response
|
||||||
|
|
||||||
# Module imports
|
# Module imports
|
||||||
from .base import BaseAPIView, WebhookMixin
|
|
||||||
from plane.db.models import Cycle, Issue, CycleIssue, IssueLink, IssueAttachment
|
|
||||||
from plane.app.permissions import ProjectEntityPermission
|
|
||||||
from plane.api.serializers import (
|
from plane.api.serializers import (
|
||||||
CycleSerializer,
|
|
||||||
CycleIssueSerializer,
|
CycleIssueSerializer,
|
||||||
|
CycleSerializer,
|
||||||
)
|
)
|
||||||
|
from plane.app.permissions import ProjectEntityPermission
|
||||||
from plane.bgtasks.issue_activites_task import issue_activity
|
from plane.bgtasks.issue_activites_task import issue_activity
|
||||||
|
from plane.db.models import (
|
||||||
|
Cycle,
|
||||||
|
CycleIssue,
|
||||||
|
Issue,
|
||||||
|
IssueAttachment,
|
||||||
|
IssueLink,
|
||||||
|
)
|
||||||
|
from plane.utils.analytics_plot import burndown_plot
|
||||||
|
|
||||||
|
from .base import BaseAPIView, WebhookMixin
|
||||||
|
|
||||||
|
|
||||||
class CycleAPIEndpoint(WebhookMixin, BaseAPIView):
|
class CycleAPIEndpoint(WebhookMixin, BaseAPIView):
|
||||||
@ -39,7 +47,10 @@ class CycleAPIEndpoint(WebhookMixin, BaseAPIView):
|
|||||||
return (
|
return (
|
||||||
Cycle.objects.filter(workspace__slug=self.kwargs.get("slug"))
|
Cycle.objects.filter(workspace__slug=self.kwargs.get("slug"))
|
||||||
.filter(project_id=self.kwargs.get("project_id"))
|
.filter(project_id=self.kwargs.get("project_id"))
|
||||||
.filter(project__project_projectmember__member=self.request.user)
|
.filter(
|
||||||
|
project__project_projectmember__member=self.request.user,
|
||||||
|
project__project_projectmember__is_active=True,
|
||||||
|
)
|
||||||
.select_related("project")
|
.select_related("project")
|
||||||
.select_related("workspace")
|
.select_related("workspace")
|
||||||
.select_related("owned_by")
|
.select_related("owned_by")
|
||||||
@ -102,7 +113,9 @@ class CycleAPIEndpoint(WebhookMixin, BaseAPIView):
|
|||||||
),
|
),
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
.annotate(total_estimates=Sum("issue_cycle__issue__estimate_point"))
|
.annotate(
|
||||||
|
total_estimates=Sum("issue_cycle__issue__estimate_point")
|
||||||
|
)
|
||||||
.annotate(
|
.annotate(
|
||||||
completed_estimates=Sum(
|
completed_estimates=Sum(
|
||||||
"issue_cycle__issue__estimate_point",
|
"issue_cycle__issue__estimate_point",
|
||||||
@ -129,7 +142,9 @@ class CycleAPIEndpoint(WebhookMixin, BaseAPIView):
|
|||||||
|
|
||||||
def get(self, request, slug, project_id, pk=None):
|
def get(self, request, slug, project_id, pk=None):
|
||||||
if pk:
|
if pk:
|
||||||
queryset = self.get_queryset().get(pk=pk)
|
queryset = (
|
||||||
|
self.get_queryset().filter(archived_at__isnull=True).get(pk=pk)
|
||||||
|
)
|
||||||
data = CycleSerializer(
|
data = CycleSerializer(
|
||||||
queryset,
|
queryset,
|
||||||
fields=self.fields,
|
fields=self.fields,
|
||||||
@ -139,7 +154,9 @@ class CycleAPIEndpoint(WebhookMixin, BaseAPIView):
|
|||||||
data,
|
data,
|
||||||
status=status.HTTP_200_OK,
|
status=status.HTTP_200_OK,
|
||||||
)
|
)
|
||||||
queryset = self.get_queryset()
|
queryset = (
|
||||||
|
self.get_queryset().filter(archived_at__isnull=True)
|
||||||
|
)
|
||||||
cycle_view = request.GET.get("cycle_view", "all")
|
cycle_view = request.GET.get("cycle_view", "all")
|
||||||
|
|
||||||
# Current Cycle
|
# Current Cycle
|
||||||
@ -201,7 +218,8 @@ class CycleAPIEndpoint(WebhookMixin, BaseAPIView):
|
|||||||
# Incomplete Cycles
|
# Incomplete Cycles
|
||||||
if cycle_view == "incomplete":
|
if cycle_view == "incomplete":
|
||||||
queryset = queryset.filter(
|
queryset = queryset.filter(
|
||||||
Q(end_date__gte=timezone.now().date()) | Q(end_date__isnull=True),
|
Q(end_date__gte=timezone.now().date())
|
||||||
|
| Q(end_date__isnull=True),
|
||||||
)
|
)
|
||||||
return self.paginate(
|
return self.paginate(
|
||||||
request=request,
|
request=request,
|
||||||
@ -234,12 +252,39 @@ class CycleAPIEndpoint(WebhookMixin, BaseAPIView):
|
|||||||
):
|
):
|
||||||
serializer = CycleSerializer(data=request.data)
|
serializer = CycleSerializer(data=request.data)
|
||||||
if serializer.is_valid():
|
if serializer.is_valid():
|
||||||
|
if (
|
||||||
|
request.data.get("external_id")
|
||||||
|
and request.data.get("external_source")
|
||||||
|
and Cycle.objects.filter(
|
||||||
|
project_id=project_id,
|
||||||
|
workspace__slug=slug,
|
||||||
|
external_source=request.data.get("external_source"),
|
||||||
|
external_id=request.data.get("external_id"),
|
||||||
|
).exists()
|
||||||
|
):
|
||||||
|
cycle = Cycle.objects.filter(
|
||||||
|
workspace__slug=slug,
|
||||||
|
project_id=project_id,
|
||||||
|
external_source=request.data.get("external_source"),
|
||||||
|
external_id=request.data.get("external_id"),
|
||||||
|
).first()
|
||||||
|
return Response(
|
||||||
|
{
|
||||||
|
"error": "Cycle with the same external id and external source already exists",
|
||||||
|
"id": str(cycle.id),
|
||||||
|
},
|
||||||
|
status=status.HTTP_409_CONFLICT,
|
||||||
|
)
|
||||||
serializer.save(
|
serializer.save(
|
||||||
project_id=project_id,
|
project_id=project_id,
|
||||||
owned_by=request.user,
|
owned_by=request.user,
|
||||||
)
|
)
|
||||||
return Response(serializer.data, status=status.HTTP_201_CREATED)
|
return Response(
|
||||||
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
|
serializer.data, status=status.HTTP_201_CREATED
|
||||||
|
)
|
||||||
|
return Response(
|
||||||
|
serializer.errors, status=status.HTTP_400_BAD_REQUEST
|
||||||
|
)
|
||||||
else:
|
else:
|
||||||
return Response(
|
return Response(
|
||||||
{
|
{
|
||||||
@ -249,15 +294,27 @@ class CycleAPIEndpoint(WebhookMixin, BaseAPIView):
|
|||||||
)
|
)
|
||||||
|
|
||||||
def patch(self, request, slug, project_id, pk):
|
def patch(self, request, slug, project_id, pk):
|
||||||
cycle = Cycle.objects.get(workspace__slug=slug, project_id=project_id, pk=pk)
|
cycle = Cycle.objects.get(
|
||||||
|
workspace__slug=slug, project_id=project_id, pk=pk
|
||||||
|
)
|
||||||
|
if cycle.archived_at:
|
||||||
|
return Response(
|
||||||
|
{"error": "Archived cycle cannot be edited"},
|
||||||
|
status=status.HTTP_400_BAD_REQUEST,
|
||||||
|
)
|
||||||
|
|
||||||
request_data = request.data
|
request_data = request.data
|
||||||
|
|
||||||
if cycle.end_date is not None and cycle.end_date < timezone.now().date():
|
if (
|
||||||
|
cycle.end_date is not None
|
||||||
|
and cycle.end_date < timezone.now().date()
|
||||||
|
):
|
||||||
if "sort_order" in request_data:
|
if "sort_order" in request_data:
|
||||||
# Can only change sort order
|
# Can only change sort order
|
||||||
request_data = {
|
request_data = {
|
||||||
"sort_order": request_data.get("sort_order", cycle.sort_order)
|
"sort_order": request_data.get(
|
||||||
|
"sort_order", cycle.sort_order
|
||||||
|
)
|
||||||
}
|
}
|
||||||
else:
|
else:
|
||||||
return Response(
|
return Response(
|
||||||
@ -269,17 +326,38 @@ class CycleAPIEndpoint(WebhookMixin, BaseAPIView):
|
|||||||
|
|
||||||
serializer = CycleSerializer(cycle, data=request.data, partial=True)
|
serializer = CycleSerializer(cycle, data=request.data, partial=True)
|
||||||
if serializer.is_valid():
|
if serializer.is_valid():
|
||||||
|
if (
|
||||||
|
request.data.get("external_id")
|
||||||
|
and (cycle.external_id != request.data.get("external_id"))
|
||||||
|
and Cycle.objects.filter(
|
||||||
|
project_id=project_id,
|
||||||
|
workspace__slug=slug,
|
||||||
|
external_source=request.data.get(
|
||||||
|
"external_source", cycle.external_source
|
||||||
|
),
|
||||||
|
external_id=request.data.get("external_id"),
|
||||||
|
).exists()
|
||||||
|
):
|
||||||
|
return Response(
|
||||||
|
{
|
||||||
|
"error": "Cycle with the same external id and external source already exists",
|
||||||
|
"id": str(cycle.id),
|
||||||
|
},
|
||||||
|
status=status.HTTP_409_CONFLICT,
|
||||||
|
)
|
||||||
serializer.save()
|
serializer.save()
|
||||||
return Response(serializer.data, status=status.HTTP_200_OK)
|
return Response(serializer.data, status=status.HTTP_200_OK)
|
||||||
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
|
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
|
||||||
|
|
||||||
def delete(self, request, slug, project_id, pk):
|
def delete(self, request, slug, project_id, pk):
|
||||||
cycle_issues = list(
|
cycle_issues = list(
|
||||||
CycleIssue.objects.filter(cycle_id=self.kwargs.get("pk")).values_list(
|
CycleIssue.objects.filter(
|
||||||
"issue", flat=True
|
cycle_id=self.kwargs.get("pk")
|
||||||
)
|
).values_list("issue", flat=True)
|
||||||
|
)
|
||||||
|
cycle = Cycle.objects.get(
|
||||||
|
workspace__slug=slug, project_id=project_id, pk=pk
|
||||||
)
|
)
|
||||||
cycle = Cycle.objects.get(workspace__slug=slug, project_id=project_id, pk=pk)
|
|
||||||
|
|
||||||
issue_activity.delay(
|
issue_activity.delay(
|
||||||
type="cycle.activity.deleted",
|
type="cycle.activity.deleted",
|
||||||
@ -301,6 +379,139 @@ class CycleAPIEndpoint(WebhookMixin, BaseAPIView):
|
|||||||
return Response(status=status.HTTP_204_NO_CONTENT)
|
return Response(status=status.HTTP_204_NO_CONTENT)
|
||||||
|
|
||||||
|
|
||||||
|
class CycleArchiveUnarchiveAPIEndpoint(BaseAPIView):
|
||||||
|
|
||||||
|
permission_classes = [
|
||||||
|
ProjectEntityPermission,
|
||||||
|
]
|
||||||
|
|
||||||
|
def get_queryset(self):
|
||||||
|
return (
|
||||||
|
Cycle.objects.filter(workspace__slug=self.kwargs.get("slug"))
|
||||||
|
.filter(project_id=self.kwargs.get("project_id"))
|
||||||
|
.filter(
|
||||||
|
project__project_projectmember__member=self.request.user,
|
||||||
|
project__project_projectmember__is_active=True,
|
||||||
|
)
|
||||||
|
.filter(archived_at__isnull=False)
|
||||||
|
.select_related("project")
|
||||||
|
.select_related("workspace")
|
||||||
|
.select_related("owned_by")
|
||||||
|
.annotate(
|
||||||
|
total_issues=Count(
|
||||||
|
"issue_cycle",
|
||||||
|
filter=Q(
|
||||||
|
issue_cycle__issue__archived_at__isnull=True,
|
||||||
|
issue_cycle__issue__is_draft=False,
|
||||||
|
),
|
||||||
|
)
|
||||||
|
)
|
||||||
|
.annotate(
|
||||||
|
completed_issues=Count(
|
||||||
|
"issue_cycle__issue__state__group",
|
||||||
|
filter=Q(
|
||||||
|
issue_cycle__issue__state__group="completed",
|
||||||
|
issue_cycle__issue__archived_at__isnull=True,
|
||||||
|
issue_cycle__issue__is_draft=False,
|
||||||
|
),
|
||||||
|
)
|
||||||
|
)
|
||||||
|
.annotate(
|
||||||
|
cancelled_issues=Count(
|
||||||
|
"issue_cycle__issue__state__group",
|
||||||
|
filter=Q(
|
||||||
|
issue_cycle__issue__state__group="cancelled",
|
||||||
|
issue_cycle__issue__archived_at__isnull=True,
|
||||||
|
issue_cycle__issue__is_draft=False,
|
||||||
|
),
|
||||||
|
)
|
||||||
|
)
|
||||||
|
.annotate(
|
||||||
|
started_issues=Count(
|
||||||
|
"issue_cycle__issue__state__group",
|
||||||
|
filter=Q(
|
||||||
|
issue_cycle__issue__state__group="started",
|
||||||
|
issue_cycle__issue__archived_at__isnull=True,
|
||||||
|
issue_cycle__issue__is_draft=False,
|
||||||
|
),
|
||||||
|
)
|
||||||
|
)
|
||||||
|
.annotate(
|
||||||
|
unstarted_issues=Count(
|
||||||
|
"issue_cycle__issue__state__group",
|
||||||
|
filter=Q(
|
||||||
|
issue_cycle__issue__state__group="unstarted",
|
||||||
|
issue_cycle__issue__archived_at__isnull=True,
|
||||||
|
issue_cycle__issue__is_draft=False,
|
||||||
|
),
|
||||||
|
)
|
||||||
|
)
|
||||||
|
.annotate(
|
||||||
|
backlog_issues=Count(
|
||||||
|
"issue_cycle__issue__state__group",
|
||||||
|
filter=Q(
|
||||||
|
issue_cycle__issue__state__group="backlog",
|
||||||
|
issue_cycle__issue__archived_at__isnull=True,
|
||||||
|
issue_cycle__issue__is_draft=False,
|
||||||
|
),
|
||||||
|
)
|
||||||
|
)
|
||||||
|
.annotate(
|
||||||
|
total_estimates=Sum("issue_cycle__issue__estimate_point")
|
||||||
|
)
|
||||||
|
.annotate(
|
||||||
|
completed_estimates=Sum(
|
||||||
|
"issue_cycle__issue__estimate_point",
|
||||||
|
filter=Q(
|
||||||
|
issue_cycle__issue__state__group="completed",
|
||||||
|
issue_cycle__issue__archived_at__isnull=True,
|
||||||
|
issue_cycle__issue__is_draft=False,
|
||||||
|
),
|
||||||
|
)
|
||||||
|
)
|
||||||
|
.annotate(
|
||||||
|
started_estimates=Sum(
|
||||||
|
"issue_cycle__issue__estimate_point",
|
||||||
|
filter=Q(
|
||||||
|
issue_cycle__issue__state__group="started",
|
||||||
|
issue_cycle__issue__archived_at__isnull=True,
|
||||||
|
issue_cycle__issue__is_draft=False,
|
||||||
|
),
|
||||||
|
)
|
||||||
|
)
|
||||||
|
.order_by(self.kwargs.get("order_by", "-created_at"))
|
||||||
|
.distinct()
|
||||||
|
)
|
||||||
|
|
||||||
|
def get(self, request, slug, project_id):
|
||||||
|
return self.paginate(
|
||||||
|
request=request,
|
||||||
|
queryset=(self.get_queryset()),
|
||||||
|
on_results=lambda cycles: CycleSerializer(
|
||||||
|
cycles,
|
||||||
|
many=True,
|
||||||
|
fields=self.fields,
|
||||||
|
expand=self.expand,
|
||||||
|
).data,
|
||||||
|
)
|
||||||
|
|
||||||
|
def post(self, request, slug, project_id, pk):
|
||||||
|
cycle = Cycle.objects.get(
|
||||||
|
pk=pk, project_id=project_id, workspace__slug=slug
|
||||||
|
)
|
||||||
|
cycle.archived_at = timezone.now()
|
||||||
|
cycle.save()
|
||||||
|
return Response(status=status.HTTP_204_NO_CONTENT)
|
||||||
|
|
||||||
|
def delete(self, request, slug, project_id, pk):
|
||||||
|
cycle = Cycle.objects.get(
|
||||||
|
pk=pk, project_id=project_id, workspace__slug=slug
|
||||||
|
)
|
||||||
|
cycle.archived_at = None
|
||||||
|
cycle.save()
|
||||||
|
return Response(status=status.HTTP_204_NO_CONTENT)
|
||||||
|
|
||||||
|
|
||||||
class CycleIssueAPIEndpoint(WebhookMixin, BaseAPIView):
|
class CycleIssueAPIEndpoint(WebhookMixin, BaseAPIView):
|
||||||
"""
|
"""
|
||||||
This viewset automatically provides `list`, `create`,
|
This viewset automatically provides `list`, `create`,
|
||||||
@ -319,14 +530,19 @@ class CycleIssueAPIEndpoint(WebhookMixin, BaseAPIView):
|
|||||||
def get_queryset(self):
|
def get_queryset(self):
|
||||||
return (
|
return (
|
||||||
CycleIssue.objects.annotate(
|
CycleIssue.objects.annotate(
|
||||||
sub_issues_count=Issue.issue_objects.filter(parent=OuterRef("issue_id"))
|
sub_issues_count=Issue.issue_objects.filter(
|
||||||
|
parent=OuterRef("issue_id")
|
||||||
|
)
|
||||||
.order_by()
|
.order_by()
|
||||||
.annotate(count=Func(F("id"), function="Count"))
|
.annotate(count=Func(F("id"), function="Count"))
|
||||||
.values("count")
|
.values("count")
|
||||||
)
|
)
|
||||||
.filter(workspace__slug=self.kwargs.get("slug"))
|
.filter(workspace__slug=self.kwargs.get("slug"))
|
||||||
.filter(project_id=self.kwargs.get("project_id"))
|
.filter(project_id=self.kwargs.get("project_id"))
|
||||||
.filter(project__project_projectmember__member=self.request.user)
|
.filter(
|
||||||
|
project__project_projectmember__member=self.request.user,
|
||||||
|
project__project_projectmember__is_active=True,
|
||||||
|
)
|
||||||
.filter(cycle_id=self.kwargs.get("cycle_id"))
|
.filter(cycle_id=self.kwargs.get("cycle_id"))
|
||||||
.select_related("project")
|
.select_related("project")
|
||||||
.select_related("workspace")
|
.select_related("workspace")
|
||||||
@ -337,12 +553,28 @@ class CycleIssueAPIEndpoint(WebhookMixin, BaseAPIView):
|
|||||||
.distinct()
|
.distinct()
|
||||||
)
|
)
|
||||||
|
|
||||||
def get(self, request, slug, project_id, cycle_id):
|
def get(self, request, slug, project_id, cycle_id, issue_id=None):
|
||||||
|
# Get
|
||||||
|
if issue_id:
|
||||||
|
cycle_issue = CycleIssue.objects.get(
|
||||||
|
workspace__slug=slug,
|
||||||
|
project_id=project_id,
|
||||||
|
cycle_id=cycle_id,
|
||||||
|
issue_id=issue_id,
|
||||||
|
)
|
||||||
|
serializer = CycleIssueSerializer(
|
||||||
|
cycle_issue, fields=self.fields, expand=self.expand
|
||||||
|
)
|
||||||
|
return Response(serializer.data, status=status.HTTP_200_OK)
|
||||||
|
|
||||||
|
# List
|
||||||
order_by = request.GET.get("order_by", "created_at")
|
order_by = request.GET.get("order_by", "created_at")
|
||||||
issues = (
|
issues = (
|
||||||
Issue.issue_objects.filter(issue_cycle__cycle_id=cycle_id)
|
Issue.issue_objects.filter(issue_cycle__cycle_id=cycle_id)
|
||||||
.annotate(
|
.annotate(
|
||||||
sub_issues_count=Issue.issue_objects.filter(parent=OuterRef("id"))
|
sub_issues_count=Issue.issue_objects.filter(
|
||||||
|
parent=OuterRef("id")
|
||||||
|
)
|
||||||
.order_by()
|
.order_by()
|
||||||
.annotate(count=Func(F("id"), function="Count"))
|
.annotate(count=Func(F("id"), function="Count"))
|
||||||
.values("count")
|
.values("count")
|
||||||
@ -364,7 +596,9 @@ class CycleIssueAPIEndpoint(WebhookMixin, BaseAPIView):
|
|||||||
.values("count")
|
.values("count")
|
||||||
)
|
)
|
||||||
.annotate(
|
.annotate(
|
||||||
attachment_count=IssueAttachment.objects.filter(issue=OuterRef("id"))
|
attachment_count=IssueAttachment.objects.filter(
|
||||||
|
issue=OuterRef("id")
|
||||||
|
)
|
||||||
.order_by()
|
.order_by()
|
||||||
.annotate(count=Func(F("id"), function="Count"))
|
.annotate(count=Func(F("id"), function="Count"))
|
||||||
.values("count")
|
.values("count")
|
||||||
@ -387,14 +621,18 @@ class CycleIssueAPIEndpoint(WebhookMixin, BaseAPIView):
|
|||||||
|
|
||||||
if not issues:
|
if not issues:
|
||||||
return Response(
|
return Response(
|
||||||
{"error": "Issues are required"}, status=status.HTTP_400_BAD_REQUEST
|
{"error": "Issues are required"},
|
||||||
|
status=status.HTTP_400_BAD_REQUEST,
|
||||||
)
|
)
|
||||||
|
|
||||||
cycle = Cycle.objects.get(
|
cycle = Cycle.objects.get(
|
||||||
workspace__slug=slug, project_id=project_id, pk=cycle_id
|
workspace__slug=slug, project_id=project_id, pk=cycle_id
|
||||||
)
|
)
|
||||||
|
|
||||||
if cycle.end_date is not None and cycle.end_date < timezone.now().date():
|
if (
|
||||||
|
cycle.end_date is not None
|
||||||
|
and cycle.end_date < timezone.now().date()
|
||||||
|
):
|
||||||
return Response(
|
return Response(
|
||||||
{
|
{
|
||||||
"error": "The Cycle has already been completed so no new issues can be added"
|
"error": "The Cycle has already been completed so no new issues can be added"
|
||||||
@ -479,7 +717,10 @@ class CycleIssueAPIEndpoint(WebhookMixin, BaseAPIView):
|
|||||||
|
|
||||||
def delete(self, request, slug, project_id, cycle_id, issue_id):
|
def delete(self, request, slug, project_id, cycle_id, issue_id):
|
||||||
cycle_issue = CycleIssue.objects.get(
|
cycle_issue = CycleIssue.objects.get(
|
||||||
issue_id=issue_id, workspace__slug=slug, project_id=project_id, cycle_id=cycle_id
|
issue_id=issue_id,
|
||||||
|
workspace__slug=slug,
|
||||||
|
project_id=project_id,
|
||||||
|
cycle_id=cycle_id,
|
||||||
)
|
)
|
||||||
issue_id = cycle_issue.issue_id
|
issue_id = cycle_issue.issue_id
|
||||||
cycle_issue.delete()
|
cycle_issue.delete()
|
||||||
@ -523,6 +764,209 @@ class TransferCycleIssueAPIEndpoint(BaseAPIView):
|
|||||||
workspace__slug=slug, project_id=project_id, pk=new_cycle_id
|
workspace__slug=slug, project_id=project_id, pk=new_cycle_id
|
||||||
)
|
)
|
||||||
|
|
||||||
|
old_cycle = (
|
||||||
|
Cycle.objects.filter(
|
||||||
|
workspace__slug=slug, project_id=project_id, pk=cycle_id
|
||||||
|
)
|
||||||
|
.annotate(
|
||||||
|
total_issues=Count(
|
||||||
|
"issue_cycle",
|
||||||
|
filter=Q(
|
||||||
|
issue_cycle__issue__archived_at__isnull=True,
|
||||||
|
issue_cycle__issue__is_draft=False,
|
||||||
|
),
|
||||||
|
)
|
||||||
|
)
|
||||||
|
.annotate(
|
||||||
|
completed_issues=Count(
|
||||||
|
"issue_cycle__issue__state__group",
|
||||||
|
filter=Q(
|
||||||
|
issue_cycle__issue__state__group="completed",
|
||||||
|
issue_cycle__issue__archived_at__isnull=True,
|
||||||
|
issue_cycle__issue__is_draft=False,
|
||||||
|
),
|
||||||
|
)
|
||||||
|
)
|
||||||
|
.annotate(
|
||||||
|
cancelled_issues=Count(
|
||||||
|
"issue_cycle__issue__state__group",
|
||||||
|
filter=Q(
|
||||||
|
issue_cycle__issue__state__group="cancelled",
|
||||||
|
issue_cycle__issue__archived_at__isnull=True,
|
||||||
|
issue_cycle__issue__is_draft=False,
|
||||||
|
),
|
||||||
|
)
|
||||||
|
)
|
||||||
|
.annotate(
|
||||||
|
started_issues=Count(
|
||||||
|
"issue_cycle__issue__state__group",
|
||||||
|
filter=Q(
|
||||||
|
issue_cycle__issue__state__group="started",
|
||||||
|
issue_cycle__issue__archived_at__isnull=True,
|
||||||
|
issue_cycle__issue__is_draft=False,
|
||||||
|
),
|
||||||
|
)
|
||||||
|
)
|
||||||
|
.annotate(
|
||||||
|
unstarted_issues=Count(
|
||||||
|
"issue_cycle__issue__state__group",
|
||||||
|
filter=Q(
|
||||||
|
issue_cycle__issue__state__group="unstarted",
|
||||||
|
issue_cycle__issue__archived_at__isnull=True,
|
||||||
|
issue_cycle__issue__is_draft=False,
|
||||||
|
),
|
||||||
|
)
|
||||||
|
)
|
||||||
|
.annotate(
|
||||||
|
backlog_issues=Count(
|
||||||
|
"issue_cycle__issue__state__group",
|
||||||
|
filter=Q(
|
||||||
|
issue_cycle__issue__state__group="backlog",
|
||||||
|
issue_cycle__issue__archived_at__isnull=True,
|
||||||
|
issue_cycle__issue__is_draft=False,
|
||||||
|
),
|
||||||
|
)
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
# Pass the new_cycle queryset to burndown_plot
|
||||||
|
completion_chart = burndown_plot(
|
||||||
|
queryset=old_cycle.first(),
|
||||||
|
slug=slug,
|
||||||
|
project_id=project_id,
|
||||||
|
cycle_id=cycle_id,
|
||||||
|
)
|
||||||
|
|
||||||
|
# Get the assignee distribution
|
||||||
|
assignee_distribution = (
|
||||||
|
Issue.objects.filter(
|
||||||
|
issue_cycle__cycle_id=cycle_id,
|
||||||
|
workspace__slug=slug,
|
||||||
|
project_id=project_id,
|
||||||
|
)
|
||||||
|
.annotate(display_name=F("assignees__display_name"))
|
||||||
|
.annotate(assignee_id=F("assignees__id"))
|
||||||
|
.annotate(avatar=F("assignees__avatar"))
|
||||||
|
.values("display_name", "assignee_id", "avatar")
|
||||||
|
.annotate(
|
||||||
|
total_issues=Count(
|
||||||
|
"id",
|
||||||
|
filter=Q(archived_at__isnull=True, is_draft=False),
|
||||||
|
),
|
||||||
|
)
|
||||||
|
.annotate(
|
||||||
|
completed_issues=Count(
|
||||||
|
"id",
|
||||||
|
filter=Q(
|
||||||
|
completed_at__isnull=False,
|
||||||
|
archived_at__isnull=True,
|
||||||
|
is_draft=False,
|
||||||
|
),
|
||||||
|
)
|
||||||
|
)
|
||||||
|
.annotate(
|
||||||
|
pending_issues=Count(
|
||||||
|
"id",
|
||||||
|
filter=Q(
|
||||||
|
completed_at__isnull=True,
|
||||||
|
archived_at__isnull=True,
|
||||||
|
is_draft=False,
|
||||||
|
),
|
||||||
|
)
|
||||||
|
)
|
||||||
|
.order_by("display_name")
|
||||||
|
)
|
||||||
|
# assignee distribution serialized
|
||||||
|
assignee_distribution_data = [
|
||||||
|
{
|
||||||
|
"display_name": item["display_name"],
|
||||||
|
"assignee_id": (
|
||||||
|
str(item["assignee_id"]) if item["assignee_id"] else None
|
||||||
|
),
|
||||||
|
"avatar": item["avatar"],
|
||||||
|
"total_issues": item["total_issues"],
|
||||||
|
"completed_issues": item["completed_issues"],
|
||||||
|
"pending_issues": item["pending_issues"],
|
||||||
|
}
|
||||||
|
for item in assignee_distribution
|
||||||
|
]
|
||||||
|
|
||||||
|
# Get the label distribution
|
||||||
|
label_distribution = (
|
||||||
|
Issue.objects.filter(
|
||||||
|
issue_cycle__cycle_id=cycle_id,
|
||||||
|
workspace__slug=slug,
|
||||||
|
project_id=project_id,
|
||||||
|
)
|
||||||
|
.annotate(label_name=F("labels__name"))
|
||||||
|
.annotate(color=F("labels__color"))
|
||||||
|
.annotate(label_id=F("labels__id"))
|
||||||
|
.values("label_name", "color", "label_id")
|
||||||
|
.annotate(
|
||||||
|
total_issues=Count(
|
||||||
|
"id",
|
||||||
|
filter=Q(archived_at__isnull=True, is_draft=False),
|
||||||
|
)
|
||||||
|
)
|
||||||
|
.annotate(
|
||||||
|
completed_issues=Count(
|
||||||
|
"id",
|
||||||
|
filter=Q(
|
||||||
|
completed_at__isnull=False,
|
||||||
|
archived_at__isnull=True,
|
||||||
|
is_draft=False,
|
||||||
|
),
|
||||||
|
)
|
||||||
|
)
|
||||||
|
.annotate(
|
||||||
|
pending_issues=Count(
|
||||||
|
"id",
|
||||||
|
filter=Q(
|
||||||
|
completed_at__isnull=True,
|
||||||
|
archived_at__isnull=True,
|
||||||
|
is_draft=False,
|
||||||
|
),
|
||||||
|
)
|
||||||
|
)
|
||||||
|
.order_by("label_name")
|
||||||
|
)
|
||||||
|
|
||||||
|
# Label distribution serilization
|
||||||
|
label_distribution_data = [
|
||||||
|
{
|
||||||
|
"label_name": item["label_name"],
|
||||||
|
"color": item["color"],
|
||||||
|
"label_id": (
|
||||||
|
str(item["label_id"]) if item["label_id"] else None
|
||||||
|
),
|
||||||
|
"total_issues": item["total_issues"],
|
||||||
|
"completed_issues": item["completed_issues"],
|
||||||
|
"pending_issues": item["pending_issues"],
|
||||||
|
}
|
||||||
|
for item in label_distribution
|
||||||
|
]
|
||||||
|
|
||||||
|
current_cycle = Cycle.objects.filter(
|
||||||
|
workspace__slug=slug, project_id=project_id, pk=cycle_id
|
||||||
|
).first()
|
||||||
|
|
||||||
|
if current_cycle:
|
||||||
|
current_cycle.progress_snapshot = {
|
||||||
|
"total_issues": old_cycle.first().total_issues,
|
||||||
|
"completed_issues": old_cycle.first().completed_issues,
|
||||||
|
"cancelled_issues": old_cycle.first().cancelled_issues,
|
||||||
|
"started_issues": old_cycle.first().started_issues,
|
||||||
|
"unstarted_issues": old_cycle.first().unstarted_issues,
|
||||||
|
"backlog_issues": old_cycle.first().backlog_issues,
|
||||||
|
"distribution": {
|
||||||
|
"labels": label_distribution_data,
|
||||||
|
"assignees": assignee_distribution_data,
|
||||||
|
"completion_chart": completion_chart,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
# Save the snapshot of the current cycle
|
||||||
|
current_cycle.save(update_fields=["progress_snapshot"])
|
||||||
|
|
||||||
if (
|
if (
|
||||||
new_cycle.end_date is not None
|
new_cycle.end_date is not None
|
||||||
and new_cycle.end_date < timezone.now().date()
|
and new_cycle.end_date < timezone.now().date()
|
||||||
|
@ -2,20 +2,28 @@
|
|||||||
import json
|
import json
|
||||||
|
|
||||||
# Django improts
|
# Django improts
|
||||||
from django.utils import timezone
|
|
||||||
from django.db.models import Q
|
|
||||||
from django.core.serializers.json import DjangoJSONEncoder
|
from django.core.serializers.json import DjangoJSONEncoder
|
||||||
|
from django.db.models import Q
|
||||||
|
from django.utils import timezone
|
||||||
|
|
||||||
# Third party imports
|
# Third party imports
|
||||||
from rest_framework import status
|
from rest_framework import status
|
||||||
from rest_framework.response import Response
|
from rest_framework.response import Response
|
||||||
|
|
||||||
# Module imports
|
# Module imports
|
||||||
from .base import BaseAPIView
|
|
||||||
from plane.app.permissions import ProjectLitePermission
|
|
||||||
from plane.api.serializers import InboxIssueSerializer, IssueSerializer
|
from plane.api.serializers import InboxIssueSerializer, IssueSerializer
|
||||||
from plane.db.models import InboxIssue, Issue, State, ProjectMember, Project, Inbox
|
from plane.app.permissions import ProjectLitePermission
|
||||||
from plane.bgtasks.issue_activites_task import issue_activity
|
from plane.bgtasks.issue_activites_task import issue_activity
|
||||||
|
from plane.db.models import (
|
||||||
|
Inbox,
|
||||||
|
InboxIssue,
|
||||||
|
Issue,
|
||||||
|
Project,
|
||||||
|
ProjectMember,
|
||||||
|
State,
|
||||||
|
)
|
||||||
|
|
||||||
|
from .base import BaseAPIView
|
||||||
|
|
||||||
|
|
||||||
class InboxIssueAPIEndpoint(BaseAPIView):
|
class InboxIssueAPIEndpoint(BaseAPIView):
|
||||||
@ -43,7 +51,8 @@ class InboxIssueAPIEndpoint(BaseAPIView):
|
|||||||
).first()
|
).first()
|
||||||
|
|
||||||
project = Project.objects.get(
|
project = Project.objects.get(
|
||||||
workspace__slug=self.kwargs.get("slug"), pk=self.kwargs.get("project_id")
|
workspace__slug=self.kwargs.get("slug"),
|
||||||
|
pk=self.kwargs.get("project_id"),
|
||||||
)
|
)
|
||||||
|
|
||||||
if inbox is None and not project.inbox_view:
|
if inbox is None and not project.inbox_view:
|
||||||
@ -51,7 +60,8 @@ class InboxIssueAPIEndpoint(BaseAPIView):
|
|||||||
|
|
||||||
return (
|
return (
|
||||||
InboxIssue.objects.filter(
|
InboxIssue.objects.filter(
|
||||||
Q(snoozed_till__gte=timezone.now()) | Q(snoozed_till__isnull=True),
|
Q(snoozed_till__gte=timezone.now())
|
||||||
|
| Q(snoozed_till__isnull=True),
|
||||||
workspace__slug=self.kwargs.get("slug"),
|
workspace__slug=self.kwargs.get("slug"),
|
||||||
project_id=self.kwargs.get("project_id"),
|
project_id=self.kwargs.get("project_id"),
|
||||||
inbox_id=inbox.id,
|
inbox_id=inbox.id,
|
||||||
@ -87,7 +97,8 @@ class InboxIssueAPIEndpoint(BaseAPIView):
|
|||||||
def post(self, request, slug, project_id):
|
def post(self, request, slug, project_id):
|
||||||
if not request.data.get("issue", {}).get("name", False):
|
if not request.data.get("issue", {}).get("name", False):
|
||||||
return Response(
|
return Response(
|
||||||
{"error": "Name is required"}, status=status.HTTP_400_BAD_REQUEST
|
{"error": "Name is required"},
|
||||||
|
status=status.HTTP_400_BAD_REQUEST,
|
||||||
)
|
)
|
||||||
|
|
||||||
inbox = Inbox.objects.filter(
|
inbox = Inbox.objects.filter(
|
||||||
@ -109,7 +120,7 @@ class InboxIssueAPIEndpoint(BaseAPIView):
|
|||||||
)
|
)
|
||||||
|
|
||||||
# Check for valid priority
|
# Check for valid priority
|
||||||
if not request.data.get("issue", {}).get("priority", "none") in [
|
if request.data.get("issue", {}).get("priority", "none") not in [
|
||||||
"low",
|
"low",
|
||||||
"medium",
|
"medium",
|
||||||
"high",
|
"high",
|
||||||
@ -117,16 +128,18 @@ class InboxIssueAPIEndpoint(BaseAPIView):
|
|||||||
"none",
|
"none",
|
||||||
]:
|
]:
|
||||||
return Response(
|
return Response(
|
||||||
{"error": "Invalid priority"}, status=status.HTTP_400_BAD_REQUEST
|
{"error": "Invalid priority"},
|
||||||
|
status=status.HTTP_400_BAD_REQUEST,
|
||||||
)
|
)
|
||||||
|
|
||||||
# Create or get state
|
# Create or get state
|
||||||
state, _ = State.objects.get_or_create(
|
state, _ = State.objects.get_or_create(
|
||||||
name="Triage",
|
name="Triage",
|
||||||
group="backlog",
|
group="triage",
|
||||||
description="Default state for managing all Inbox Issues",
|
description="Default state for managing all Inbox Issues",
|
||||||
project_id=project_id,
|
project_id=project_id,
|
||||||
color="#ff7700",
|
color="#ff7700",
|
||||||
|
is_triage=True,
|
||||||
)
|
)
|
||||||
|
|
||||||
# create an issue
|
# create an issue
|
||||||
@ -222,10 +235,14 @@ class InboxIssueAPIEndpoint(BaseAPIView):
|
|||||||
"description_html": issue_data.get(
|
"description_html": issue_data.get(
|
||||||
"description_html", issue.description_html
|
"description_html", issue.description_html
|
||||||
),
|
),
|
||||||
"description": issue_data.get("description", issue.description),
|
"description": issue_data.get(
|
||||||
|
"description", issue.description
|
||||||
|
),
|
||||||
}
|
}
|
||||||
|
|
||||||
issue_serializer = IssueSerializer(issue, data=issue_data, partial=True)
|
issue_serializer = IssueSerializer(
|
||||||
|
issue, data=issue_data, partial=True
|
||||||
|
)
|
||||||
|
|
||||||
if issue_serializer.is_valid():
|
if issue_serializer.is_valid():
|
||||||
current_instance = issue
|
current_instance = issue
|
||||||
@ -266,7 +283,9 @@ class InboxIssueAPIEndpoint(BaseAPIView):
|
|||||||
project_id=project_id,
|
project_id=project_id,
|
||||||
)
|
)
|
||||||
state = State.objects.filter(
|
state = State.objects.filter(
|
||||||
group="cancelled", workspace__slug=slug, project_id=project_id
|
group="cancelled",
|
||||||
|
workspace__slug=slug,
|
||||||
|
project_id=project_id,
|
||||||
).first()
|
).first()
|
||||||
if state is not None:
|
if state is not None:
|
||||||
issue.state = state
|
issue.state = state
|
||||||
@ -281,20 +300,25 @@ class InboxIssueAPIEndpoint(BaseAPIView):
|
|||||||
)
|
)
|
||||||
|
|
||||||
# Update the issue state only if it is in triage state
|
# Update the issue state only if it is in triage state
|
||||||
if issue.state.name == "Triage":
|
if issue.state.is_triage:
|
||||||
# Move to default state
|
# Move to default state
|
||||||
state = State.objects.filter(
|
state = State.objects.filter(
|
||||||
workspace__slug=slug, project_id=project_id, default=True
|
workspace__slug=slug,
|
||||||
|
project_id=project_id,
|
||||||
|
default=True,
|
||||||
).first()
|
).first()
|
||||||
if state is not None:
|
if state is not None:
|
||||||
issue.state = state
|
issue.state = state
|
||||||
issue.save()
|
issue.save()
|
||||||
|
|
||||||
return Response(serializer.data, status=status.HTTP_200_OK)
|
return Response(serializer.data, status=status.HTTP_200_OK)
|
||||||
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
|
return Response(
|
||||||
|
serializer.errors, status=status.HTTP_400_BAD_REQUEST
|
||||||
|
)
|
||||||
else:
|
else:
|
||||||
return Response(
|
return Response(
|
||||||
InboxIssueSerializer(inbox_issue).data, status=status.HTTP_200_OK
|
InboxIssueSerializer(inbox_issue).data,
|
||||||
|
status=status.HTTP_200_OK,
|
||||||
)
|
)
|
||||||
|
|
||||||
def delete(self, request, slug, project_id, issue_id):
|
def delete(self, request, slug, project_id, issue_id):
|
||||||
|
@ -1,22 +1,22 @@
|
|||||||
# Python imports
|
# Python imports
|
||||||
import json
|
import json
|
||||||
from itertools import chain
|
|
||||||
|
from django.core.serializers.json import DjangoJSONEncoder
|
||||||
|
|
||||||
# Django imports
|
# Django imports
|
||||||
from django.db import IntegrityError
|
from django.db import IntegrityError
|
||||||
from django.db.models import (
|
from django.db.models import (
|
||||||
OuterRef,
|
|
||||||
Func,
|
|
||||||
Q,
|
|
||||||
F,
|
|
||||||
Case,
|
Case,
|
||||||
When,
|
|
||||||
Value,
|
|
||||||
CharField,
|
CharField,
|
||||||
Max,
|
|
||||||
Exists,
|
Exists,
|
||||||
|
F,
|
||||||
|
Func,
|
||||||
|
Max,
|
||||||
|
OuterRef,
|
||||||
|
Q,
|
||||||
|
Value,
|
||||||
|
When,
|
||||||
)
|
)
|
||||||
from django.core.serializers.json import DjangoJSONEncoder
|
|
||||||
from django.utils import timezone
|
from django.utils import timezone
|
||||||
|
|
||||||
# Third party imports
|
# Third party imports
|
||||||
@ -24,30 +24,31 @@ from rest_framework import status
|
|||||||
from rest_framework.response import Response
|
from rest_framework.response import Response
|
||||||
|
|
||||||
# Module imports
|
# Module imports
|
||||||
from .base import BaseAPIView, WebhookMixin
|
|
||||||
from plane.app.permissions import (
|
|
||||||
ProjectEntityPermission,
|
|
||||||
ProjectMemberPermission,
|
|
||||||
ProjectLitePermission,
|
|
||||||
)
|
|
||||||
from plane.db.models import (
|
|
||||||
Issue,
|
|
||||||
IssueAttachment,
|
|
||||||
IssueLink,
|
|
||||||
Project,
|
|
||||||
Label,
|
|
||||||
ProjectMember,
|
|
||||||
IssueComment,
|
|
||||||
IssueActivity,
|
|
||||||
)
|
|
||||||
from plane.bgtasks.issue_activites_task import issue_activity
|
|
||||||
from plane.api.serializers import (
|
from plane.api.serializers import (
|
||||||
|
IssueActivitySerializer,
|
||||||
|
IssueCommentSerializer,
|
||||||
|
IssueLinkSerializer,
|
||||||
IssueSerializer,
|
IssueSerializer,
|
||||||
LabelSerializer,
|
LabelSerializer,
|
||||||
IssueLinkSerializer,
|
|
||||||
IssueCommentSerializer,
|
|
||||||
IssueActivitySerializer,
|
|
||||||
)
|
)
|
||||||
|
from plane.app.permissions import (
|
||||||
|
ProjectEntityPermission,
|
||||||
|
ProjectLitePermission,
|
||||||
|
ProjectMemberPermission,
|
||||||
|
)
|
||||||
|
from plane.bgtasks.issue_activites_task import issue_activity
|
||||||
|
from plane.db.models import (
|
||||||
|
Issue,
|
||||||
|
IssueActivity,
|
||||||
|
IssueAttachment,
|
||||||
|
IssueComment,
|
||||||
|
IssueLink,
|
||||||
|
Label,
|
||||||
|
Project,
|
||||||
|
ProjectMember,
|
||||||
|
)
|
||||||
|
|
||||||
|
from .base import BaseAPIView, WebhookMixin
|
||||||
|
|
||||||
|
|
||||||
class IssueAPIEndpoint(WebhookMixin, BaseAPIView):
|
class IssueAPIEndpoint(WebhookMixin, BaseAPIView):
|
||||||
@ -67,7 +68,9 @@ class IssueAPIEndpoint(WebhookMixin, BaseAPIView):
|
|||||||
def get_queryset(self):
|
def get_queryset(self):
|
||||||
return (
|
return (
|
||||||
Issue.issue_objects.annotate(
|
Issue.issue_objects.annotate(
|
||||||
sub_issues_count=Issue.issue_objects.filter(parent=OuterRef("id"))
|
sub_issues_count=Issue.issue_objects.filter(
|
||||||
|
parent=OuterRef("id")
|
||||||
|
)
|
||||||
.order_by()
|
.order_by()
|
||||||
.annotate(count=Func(F("id"), function="Count"))
|
.annotate(count=Func(F("id"), function="Count"))
|
||||||
.values("count")
|
.values("count")
|
||||||
@ -86,7 +89,9 @@ class IssueAPIEndpoint(WebhookMixin, BaseAPIView):
|
|||||||
def get(self, request, slug, project_id, pk=None):
|
def get(self, request, slug, project_id, pk=None):
|
||||||
if pk:
|
if pk:
|
||||||
issue = Issue.issue_objects.annotate(
|
issue = Issue.issue_objects.annotate(
|
||||||
sub_issues_count=Issue.issue_objects.filter(parent=OuterRef("id"))
|
sub_issues_count=Issue.issue_objects.filter(
|
||||||
|
parent=OuterRef("id")
|
||||||
|
)
|
||||||
.order_by()
|
.order_by()
|
||||||
.annotate(count=Func(F("id"), function="Count"))
|
.annotate(count=Func(F("id"), function="Count"))
|
||||||
.values("count")
|
.values("count")
|
||||||
@ -102,7 +107,13 @@ class IssueAPIEndpoint(WebhookMixin, BaseAPIView):
|
|||||||
|
|
||||||
# Custom ordering for priority and state
|
# Custom ordering for priority and state
|
||||||
priority_order = ["urgent", "high", "medium", "low", "none"]
|
priority_order = ["urgent", "high", "medium", "low", "none"]
|
||||||
state_order = ["backlog", "unstarted", "started", "completed", "cancelled"]
|
state_order = [
|
||||||
|
"backlog",
|
||||||
|
"unstarted",
|
||||||
|
"started",
|
||||||
|
"completed",
|
||||||
|
"cancelled",
|
||||||
|
]
|
||||||
|
|
||||||
order_by_param = request.GET.get("order_by", "-created_at")
|
order_by_param = request.GET.get("order_by", "-created_at")
|
||||||
|
|
||||||
@ -117,7 +128,9 @@ class IssueAPIEndpoint(WebhookMixin, BaseAPIView):
|
|||||||
.values("count")
|
.values("count")
|
||||||
)
|
)
|
||||||
.annotate(
|
.annotate(
|
||||||
attachment_count=IssueAttachment.objects.filter(issue=OuterRef("id"))
|
attachment_count=IssueAttachment.objects.filter(
|
||||||
|
issue=OuterRef("id")
|
||||||
|
)
|
||||||
.order_by()
|
.order_by()
|
||||||
.annotate(count=Func(F("id"), function="Count"))
|
.annotate(count=Func(F("id"), function="Count"))
|
||||||
.values("count")
|
.values("count")
|
||||||
@ -127,7 +140,9 @@ class IssueAPIEndpoint(WebhookMixin, BaseAPIView):
|
|||||||
# Priority Ordering
|
# Priority Ordering
|
||||||
if order_by_param == "priority" or order_by_param == "-priority":
|
if order_by_param == "priority" or order_by_param == "-priority":
|
||||||
priority_order = (
|
priority_order = (
|
||||||
priority_order if order_by_param == "priority" else priority_order[::-1]
|
priority_order
|
||||||
|
if order_by_param == "priority"
|
||||||
|
else priority_order[::-1]
|
||||||
)
|
)
|
||||||
issue_queryset = issue_queryset.annotate(
|
issue_queryset = issue_queryset.annotate(
|
||||||
priority_order=Case(
|
priority_order=Case(
|
||||||
@ -175,7 +190,9 @@ class IssueAPIEndpoint(WebhookMixin, BaseAPIView):
|
|||||||
else order_by_param
|
else order_by_param
|
||||||
)
|
)
|
||||||
).order_by(
|
).order_by(
|
||||||
"-max_values" if order_by_param.startswith("-") else "max_values"
|
"-max_values"
|
||||||
|
if order_by_param.startswith("-")
|
||||||
|
else "max_values"
|
||||||
)
|
)
|
||||||
else:
|
else:
|
||||||
issue_queryset = issue_queryset.order_by(order_by_param)
|
issue_queryset = issue_queryset.order_by(order_by_param)
|
||||||
@ -204,12 +221,38 @@ class IssueAPIEndpoint(WebhookMixin, BaseAPIView):
|
|||||||
)
|
)
|
||||||
|
|
||||||
if serializer.is_valid():
|
if serializer.is_valid():
|
||||||
|
if (
|
||||||
|
request.data.get("external_id")
|
||||||
|
and request.data.get("external_source")
|
||||||
|
and Issue.objects.filter(
|
||||||
|
project_id=project_id,
|
||||||
|
workspace__slug=slug,
|
||||||
|
external_source=request.data.get("external_source"),
|
||||||
|
external_id=request.data.get("external_id"),
|
||||||
|
).exists()
|
||||||
|
):
|
||||||
|
issue = Issue.objects.filter(
|
||||||
|
workspace__slug=slug,
|
||||||
|
project_id=project_id,
|
||||||
|
external_id=request.data.get("external_id"),
|
||||||
|
external_source=request.data.get("external_source"),
|
||||||
|
).first()
|
||||||
|
return Response(
|
||||||
|
{
|
||||||
|
"error": "Issue with the same external id and external source already exists",
|
||||||
|
"id": str(issue.id),
|
||||||
|
},
|
||||||
|
status=status.HTTP_409_CONFLICT,
|
||||||
|
)
|
||||||
|
|
||||||
serializer.save()
|
serializer.save()
|
||||||
|
|
||||||
# Track the issue
|
# Track the issue
|
||||||
issue_activity.delay(
|
issue_activity.delay(
|
||||||
type="issue.activity.created",
|
type="issue.activity.created",
|
||||||
requested_data=json.dumps(self.request.data, cls=DjangoJSONEncoder),
|
requested_data=json.dumps(
|
||||||
|
self.request.data, cls=DjangoJSONEncoder
|
||||||
|
),
|
||||||
actor_id=str(request.user.id),
|
actor_id=str(request.user.id),
|
||||||
issue_id=str(serializer.data.get("id", None)),
|
issue_id=str(serializer.data.get("id", None)),
|
||||||
project_id=str(project_id),
|
project_id=str(project_id),
|
||||||
@ -220,13 +263,44 @@ class IssueAPIEndpoint(WebhookMixin, BaseAPIView):
|
|||||||
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
|
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
|
||||||
|
|
||||||
def patch(self, request, slug, project_id, pk=None):
|
def patch(self, request, slug, project_id, pk=None):
|
||||||
issue = Issue.objects.get(workspace__slug=slug, project_id=project_id, pk=pk)
|
issue = Issue.objects.get(
|
||||||
|
workspace__slug=slug, project_id=project_id, pk=pk
|
||||||
|
)
|
||||||
|
project = Project.objects.get(pk=project_id)
|
||||||
current_instance = json.dumps(
|
current_instance = json.dumps(
|
||||||
IssueSerializer(issue).data, cls=DjangoJSONEncoder
|
IssueSerializer(issue).data, cls=DjangoJSONEncoder
|
||||||
)
|
)
|
||||||
requested_data = json.dumps(self.request.data, cls=DjangoJSONEncoder)
|
requested_data = json.dumps(self.request.data, cls=DjangoJSONEncoder)
|
||||||
serializer = IssueSerializer(issue, data=request.data, partial=True)
|
serializer = IssueSerializer(
|
||||||
|
issue,
|
||||||
|
data=request.data,
|
||||||
|
context={
|
||||||
|
"project_id": project_id,
|
||||||
|
"workspace_id": project.workspace_id,
|
||||||
|
},
|
||||||
|
partial=True,
|
||||||
|
)
|
||||||
if serializer.is_valid():
|
if serializer.is_valid():
|
||||||
|
if (
|
||||||
|
str(request.data.get("external_id"))
|
||||||
|
and (issue.external_id != str(request.data.get("external_id")))
|
||||||
|
and Issue.objects.filter(
|
||||||
|
project_id=project_id,
|
||||||
|
workspace__slug=slug,
|
||||||
|
external_source=request.data.get(
|
||||||
|
"external_source", issue.external_source
|
||||||
|
),
|
||||||
|
external_id=request.data.get("external_id"),
|
||||||
|
).exists()
|
||||||
|
):
|
||||||
|
return Response(
|
||||||
|
{
|
||||||
|
"error": "Issue with the same external id and external source already exists",
|
||||||
|
"id": str(issue.id),
|
||||||
|
},
|
||||||
|
status=status.HTTP_409_CONFLICT,
|
||||||
|
)
|
||||||
|
|
||||||
serializer.save()
|
serializer.save()
|
||||||
issue_activity.delay(
|
issue_activity.delay(
|
||||||
type="issue.activity.updated",
|
type="issue.activity.updated",
|
||||||
@ -241,7 +315,9 @@ class IssueAPIEndpoint(WebhookMixin, BaseAPIView):
|
|||||||
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
|
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
|
||||||
|
|
||||||
def delete(self, request, slug, project_id, pk=None):
|
def delete(self, request, slug, project_id, pk=None):
|
||||||
issue = Issue.objects.get(workspace__slug=slug, project_id=project_id, pk=pk)
|
issue = Issue.objects.get(
|
||||||
|
workspace__slug=slug, project_id=project_id, pk=pk
|
||||||
|
)
|
||||||
current_instance = json.dumps(
|
current_instance = json.dumps(
|
||||||
IssueSerializer(issue).data, cls=DjangoJSONEncoder
|
IssueSerializer(issue).data, cls=DjangoJSONEncoder
|
||||||
)
|
)
|
||||||
@ -275,7 +351,11 @@ class LabelAPIEndpoint(BaseAPIView):
|
|||||||
return (
|
return (
|
||||||
Label.objects.filter(workspace__slug=self.kwargs.get("slug"))
|
Label.objects.filter(workspace__slug=self.kwargs.get("slug"))
|
||||||
.filter(project_id=self.kwargs.get("project_id"))
|
.filter(project_id=self.kwargs.get("project_id"))
|
||||||
.filter(project__project_projectmember__member=self.request.user)
|
.filter(
|
||||||
|
project__project_projectmember__member=self.request.user,
|
||||||
|
project__project_projectmember__is_active=True,
|
||||||
|
)
|
||||||
|
.filter(project__archived_at__isnull=True)
|
||||||
.select_related("project")
|
.select_related("project")
|
||||||
.select_related("workspace")
|
.select_related("workspace")
|
||||||
.select_related("parent")
|
.select_related("parent")
|
||||||
@ -287,13 +367,49 @@ class LabelAPIEndpoint(BaseAPIView):
|
|||||||
try:
|
try:
|
||||||
serializer = LabelSerializer(data=request.data)
|
serializer = LabelSerializer(data=request.data)
|
||||||
if serializer.is_valid():
|
if serializer.is_valid():
|
||||||
|
if (
|
||||||
|
request.data.get("external_id")
|
||||||
|
and request.data.get("external_source")
|
||||||
|
and Label.objects.filter(
|
||||||
|
project_id=project_id,
|
||||||
|
workspace__slug=slug,
|
||||||
|
external_source=request.data.get("external_source"),
|
||||||
|
external_id=request.data.get("external_id"),
|
||||||
|
).exists()
|
||||||
|
):
|
||||||
|
label = Label.objects.filter(
|
||||||
|
workspace__slug=slug,
|
||||||
|
project_id=project_id,
|
||||||
|
external_id=request.data.get("external_id"),
|
||||||
|
external_source=request.data.get("external_source"),
|
||||||
|
).first()
|
||||||
|
return Response(
|
||||||
|
{
|
||||||
|
"error": "Label with the same external id and external source already exists",
|
||||||
|
"id": str(label.id),
|
||||||
|
},
|
||||||
|
status=status.HTTP_409_CONFLICT,
|
||||||
|
)
|
||||||
|
|
||||||
serializer.save(project_id=project_id)
|
serializer.save(project_id=project_id)
|
||||||
return Response(serializer.data, status=status.HTTP_201_CREATED)
|
return Response(
|
||||||
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
|
serializer.data, status=status.HTTP_201_CREATED
|
||||||
except IntegrityError:
|
)
|
||||||
return Response(
|
return Response(
|
||||||
{"error": "Label with the same name already exists in the project"},
|
serializer.errors, status=status.HTTP_400_BAD_REQUEST
|
||||||
status=status.HTTP_400_BAD_REQUEST,
|
)
|
||||||
|
except IntegrityError:
|
||||||
|
label = Label.objects.filter(
|
||||||
|
workspace__slug=slug,
|
||||||
|
project_id=project_id,
|
||||||
|
name=request.data.get("name"),
|
||||||
|
).first()
|
||||||
|
return Response(
|
||||||
|
{
|
||||||
|
"error": "Label with the same name already exists in the project",
|
||||||
|
"id": str(label.id),
|
||||||
|
},
|
||||||
|
status=status.HTTP_409_CONFLICT,
|
||||||
)
|
)
|
||||||
|
|
||||||
def get(self, request, slug, project_id, pk=None):
|
def get(self, request, slug, project_id, pk=None):
|
||||||
@ -309,18 +425,40 @@ class LabelAPIEndpoint(BaseAPIView):
|
|||||||
).data,
|
).data,
|
||||||
)
|
)
|
||||||
label = self.get_queryset().get(pk=pk)
|
label = self.get_queryset().get(pk=pk)
|
||||||
serializer = LabelSerializer(label, fields=self.fields, expand=self.expand,)
|
serializer = LabelSerializer(
|
||||||
|
label,
|
||||||
|
fields=self.fields,
|
||||||
|
expand=self.expand,
|
||||||
|
)
|
||||||
return Response(serializer.data, status=status.HTTP_200_OK)
|
return Response(serializer.data, status=status.HTTP_200_OK)
|
||||||
|
|
||||||
def patch(self, request, slug, project_id, pk=None):
|
def patch(self, request, slug, project_id, pk=None):
|
||||||
label = self.get_queryset().get(pk=pk)
|
label = self.get_queryset().get(pk=pk)
|
||||||
serializer = LabelSerializer(label, data=request.data, partial=True)
|
serializer = LabelSerializer(label, data=request.data, partial=True)
|
||||||
if serializer.is_valid():
|
if serializer.is_valid():
|
||||||
|
if (
|
||||||
|
str(request.data.get("external_id"))
|
||||||
|
and (label.external_id != str(request.data.get("external_id")))
|
||||||
|
and Issue.objects.filter(
|
||||||
|
project_id=project_id,
|
||||||
|
workspace__slug=slug,
|
||||||
|
external_source=request.data.get(
|
||||||
|
"external_source", label.external_source
|
||||||
|
),
|
||||||
|
external_id=request.data.get("external_id"),
|
||||||
|
).exists()
|
||||||
|
):
|
||||||
|
return Response(
|
||||||
|
{
|
||||||
|
"error": "Label with the same external id and external source already exists",
|
||||||
|
"id": str(label.id),
|
||||||
|
},
|
||||||
|
status=status.HTTP_409_CONFLICT,
|
||||||
|
)
|
||||||
serializer.save()
|
serializer.save()
|
||||||
return Response(serializer.data, status=status.HTTP_200_OK)
|
return Response(serializer.data, status=status.HTTP_200_OK)
|
||||||
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
|
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
|
||||||
|
|
||||||
|
|
||||||
def delete(self, request, slug, project_id, pk=None):
|
def delete(self, request, slug, project_id, pk=None):
|
||||||
label = self.get_queryset().get(pk=pk)
|
label = self.get_queryset().get(pk=pk)
|
||||||
label.delete()
|
label.delete()
|
||||||
@ -346,7 +484,11 @@ class IssueLinkAPIEndpoint(BaseAPIView):
|
|||||||
IssueLink.objects.filter(workspace__slug=self.kwargs.get("slug"))
|
IssueLink.objects.filter(workspace__slug=self.kwargs.get("slug"))
|
||||||
.filter(project_id=self.kwargs.get("project_id"))
|
.filter(project_id=self.kwargs.get("project_id"))
|
||||||
.filter(issue_id=self.kwargs.get("issue_id"))
|
.filter(issue_id=self.kwargs.get("issue_id"))
|
||||||
.filter(project__project_projectmember__member=self.request.user)
|
.filter(
|
||||||
|
project__project_projectmember__member=self.request.user,
|
||||||
|
project__project_projectmember__is_active=True,
|
||||||
|
)
|
||||||
|
.filter(project__archived_at__isnull=True)
|
||||||
.order_by(self.kwargs.get("order_by", "-created_at"))
|
.order_by(self.kwargs.get("order_by", "-created_at"))
|
||||||
.distinct()
|
.distinct()
|
||||||
)
|
)
|
||||||
@ -386,7 +528,9 @@ class IssueLinkAPIEndpoint(BaseAPIView):
|
|||||||
)
|
)
|
||||||
issue_activity.delay(
|
issue_activity.delay(
|
||||||
type="link.activity.created",
|
type="link.activity.created",
|
||||||
requested_data=json.dumps(serializer.data, cls=DjangoJSONEncoder),
|
requested_data=json.dumps(
|
||||||
|
serializer.data, cls=DjangoJSONEncoder
|
||||||
|
),
|
||||||
actor_id=str(self.request.user.id),
|
actor_id=str(self.request.user.id),
|
||||||
issue_id=str(self.kwargs.get("issue_id")),
|
issue_id=str(self.kwargs.get("issue_id")),
|
||||||
project_id=str(self.kwargs.get("project_id")),
|
project_id=str(self.kwargs.get("project_id")),
|
||||||
@ -398,14 +542,19 @@ class IssueLinkAPIEndpoint(BaseAPIView):
|
|||||||
|
|
||||||
def patch(self, request, slug, project_id, issue_id, pk):
|
def patch(self, request, slug, project_id, issue_id, pk):
|
||||||
issue_link = IssueLink.objects.get(
|
issue_link = IssueLink.objects.get(
|
||||||
workspace__slug=slug, project_id=project_id, issue_id=issue_id, pk=pk
|
workspace__slug=slug,
|
||||||
|
project_id=project_id,
|
||||||
|
issue_id=issue_id,
|
||||||
|
pk=pk,
|
||||||
)
|
)
|
||||||
requested_data = json.dumps(request.data, cls=DjangoJSONEncoder)
|
requested_data = json.dumps(request.data, cls=DjangoJSONEncoder)
|
||||||
current_instance = json.dumps(
|
current_instance = json.dumps(
|
||||||
IssueLinkSerializer(issue_link).data,
|
IssueLinkSerializer(issue_link).data,
|
||||||
cls=DjangoJSONEncoder,
|
cls=DjangoJSONEncoder,
|
||||||
)
|
)
|
||||||
serializer = IssueLinkSerializer(issue_link, data=request.data, partial=True)
|
serializer = IssueLinkSerializer(
|
||||||
|
issue_link, data=request.data, partial=True
|
||||||
|
)
|
||||||
if serializer.is_valid():
|
if serializer.is_valid():
|
||||||
serializer.save()
|
serializer.save()
|
||||||
issue_activity.delay(
|
issue_activity.delay(
|
||||||
@ -422,7 +571,10 @@ class IssueLinkAPIEndpoint(BaseAPIView):
|
|||||||
|
|
||||||
def delete(self, request, slug, project_id, issue_id, pk):
|
def delete(self, request, slug, project_id, issue_id, pk):
|
||||||
issue_link = IssueLink.objects.get(
|
issue_link = IssueLink.objects.get(
|
||||||
workspace__slug=slug, project_id=project_id, issue_id=issue_id, pk=pk
|
workspace__slug=slug,
|
||||||
|
project_id=project_id,
|
||||||
|
issue_id=issue_id,
|
||||||
|
pk=pk,
|
||||||
)
|
)
|
||||||
current_instance = json.dumps(
|
current_instance = json.dumps(
|
||||||
IssueLinkSerializer(issue_link).data,
|
IssueLinkSerializer(issue_link).data,
|
||||||
@ -457,14 +609,17 @@ class IssueCommentAPIEndpoint(WebhookMixin, BaseAPIView):
|
|||||||
|
|
||||||
def get_queryset(self):
|
def get_queryset(self):
|
||||||
return (
|
return (
|
||||||
IssueComment.objects.filter(workspace__slug=self.kwargs.get("slug"))
|
IssueComment.objects.filter(
|
||||||
|
workspace__slug=self.kwargs.get("slug")
|
||||||
|
)
|
||||||
.filter(project_id=self.kwargs.get("project_id"))
|
.filter(project_id=self.kwargs.get("project_id"))
|
||||||
.filter(issue_id=self.kwargs.get("issue_id"))
|
.filter(issue_id=self.kwargs.get("issue_id"))
|
||||||
.filter(project__project_projectmember__member=self.request.user)
|
.filter(
|
||||||
.select_related("project")
|
project__project_projectmember__member=self.request.user,
|
||||||
.select_related("workspace")
|
project__project_projectmember__is_active=True,
|
||||||
.select_related("issue")
|
)
|
||||||
.select_related("actor")
|
.filter(project__archived_at__isnull=True)
|
||||||
|
.select_related("workspace", "project", "issue", "actor")
|
||||||
.annotate(
|
.annotate(
|
||||||
is_member=Exists(
|
is_member=Exists(
|
||||||
ProjectMember.objects.filter(
|
ProjectMember.objects.filter(
|
||||||
@ -500,6 +655,31 @@ class IssueCommentAPIEndpoint(WebhookMixin, BaseAPIView):
|
|||||||
)
|
)
|
||||||
|
|
||||||
def post(self, request, slug, project_id, issue_id):
|
def post(self, request, slug, project_id, issue_id):
|
||||||
|
# Validation check if the issue already exists
|
||||||
|
if (
|
||||||
|
request.data.get("external_id")
|
||||||
|
and request.data.get("external_source")
|
||||||
|
and IssueComment.objects.filter(
|
||||||
|
project_id=project_id,
|
||||||
|
workspace__slug=slug,
|
||||||
|
external_source=request.data.get("external_source"),
|
||||||
|
external_id=request.data.get("external_id"),
|
||||||
|
).exists()
|
||||||
|
):
|
||||||
|
issue_comment = IssueComment.objects.filter(
|
||||||
|
workspace__slug=slug,
|
||||||
|
project_id=project_id,
|
||||||
|
external_id=request.data.get("external_id"),
|
||||||
|
external_source=request.data.get("external_source"),
|
||||||
|
).first()
|
||||||
|
return Response(
|
||||||
|
{
|
||||||
|
"error": "Issue Comment with the same external id and external source already exists",
|
||||||
|
"id": str(issue_comment.id),
|
||||||
|
},
|
||||||
|
status=status.HTTP_409_CONFLICT,
|
||||||
|
)
|
||||||
|
|
||||||
serializer = IssueCommentSerializer(data=request.data)
|
serializer = IssueCommentSerializer(data=request.data)
|
||||||
if serializer.is_valid():
|
if serializer.is_valid():
|
||||||
serializer.save(
|
serializer.save(
|
||||||
@ -509,7 +689,9 @@ class IssueCommentAPIEndpoint(WebhookMixin, BaseAPIView):
|
|||||||
)
|
)
|
||||||
issue_activity.delay(
|
issue_activity.delay(
|
||||||
type="comment.activity.created",
|
type="comment.activity.created",
|
||||||
requested_data=json.dumps(serializer.data, cls=DjangoJSONEncoder),
|
requested_data=json.dumps(
|
||||||
|
serializer.data, cls=DjangoJSONEncoder
|
||||||
|
),
|
||||||
actor_id=str(self.request.user.id),
|
actor_id=str(self.request.user.id),
|
||||||
issue_id=str(self.kwargs.get("issue_id")),
|
issue_id=str(self.kwargs.get("issue_id")),
|
||||||
project_id=str(self.kwargs.get("project_id")),
|
project_id=str(self.kwargs.get("project_id")),
|
||||||
@ -521,13 +703,41 @@ class IssueCommentAPIEndpoint(WebhookMixin, BaseAPIView):
|
|||||||
|
|
||||||
def patch(self, request, slug, project_id, issue_id, pk):
|
def patch(self, request, slug, project_id, issue_id, pk):
|
||||||
issue_comment = IssueComment.objects.get(
|
issue_comment = IssueComment.objects.get(
|
||||||
workspace__slug=slug, project_id=project_id, issue_id=issue_id, pk=pk
|
workspace__slug=slug,
|
||||||
|
project_id=project_id,
|
||||||
|
issue_id=issue_id,
|
||||||
|
pk=pk,
|
||||||
)
|
)
|
||||||
requested_data = json.dumps(self.request.data, cls=DjangoJSONEncoder)
|
requested_data = json.dumps(self.request.data, cls=DjangoJSONEncoder)
|
||||||
current_instance = json.dumps(
|
current_instance = json.dumps(
|
||||||
IssueCommentSerializer(issue_comment).data,
|
IssueCommentSerializer(issue_comment).data,
|
||||||
cls=DjangoJSONEncoder,
|
cls=DjangoJSONEncoder,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
# Validation check if the issue already exists
|
||||||
|
if (
|
||||||
|
request.data.get("external_id")
|
||||||
|
and (
|
||||||
|
issue_comment.external_id
|
||||||
|
!= str(request.data.get("external_id"))
|
||||||
|
)
|
||||||
|
and IssueComment.objects.filter(
|
||||||
|
project_id=project_id,
|
||||||
|
workspace__slug=slug,
|
||||||
|
external_source=request.data.get(
|
||||||
|
"external_source", issue_comment.external_source
|
||||||
|
),
|
||||||
|
external_id=request.data.get("external_id"),
|
||||||
|
).exists()
|
||||||
|
):
|
||||||
|
return Response(
|
||||||
|
{
|
||||||
|
"error": "Issue Comment with the same external id and external source already exists",
|
||||||
|
"id": str(issue_comment.id),
|
||||||
|
},
|
||||||
|
status=status.HTTP_409_CONFLICT,
|
||||||
|
)
|
||||||
|
|
||||||
serializer = IssueCommentSerializer(
|
serializer = IssueCommentSerializer(
|
||||||
issue_comment, data=request.data, partial=True
|
issue_comment, data=request.data, partial=True
|
||||||
)
|
)
|
||||||
@ -547,7 +757,10 @@ class IssueCommentAPIEndpoint(WebhookMixin, BaseAPIView):
|
|||||||
|
|
||||||
def delete(self, request, slug, project_id, issue_id, pk):
|
def delete(self, request, slug, project_id, issue_id, pk):
|
||||||
issue_comment = IssueComment.objects.get(
|
issue_comment = IssueComment.objects.get(
|
||||||
workspace__slug=slug, project_id=project_id, issue_id=issue_id, pk=pk
|
workspace__slug=slug,
|
||||||
|
project_id=project_id,
|
||||||
|
issue_id=issue_id,
|
||||||
|
pk=pk,
|
||||||
)
|
)
|
||||||
current_instance = json.dumps(
|
current_instance = json.dumps(
|
||||||
IssueCommentSerializer(issue_comment).data,
|
IssueCommentSerializer(issue_comment).data,
|
||||||
@ -579,7 +792,9 @@ class IssueActivityAPIEndpoint(BaseAPIView):
|
|||||||
.filter(
|
.filter(
|
||||||
~Q(field__in=["comment", "vote", "reaction", "draft"]),
|
~Q(field__in=["comment", "vote", "reaction", "draft"]),
|
||||||
project__project_projectmember__member=self.request.user,
|
project__project_projectmember__member=self.request.user,
|
||||||
|
project__project_projectmember__is_active=True,
|
||||||
)
|
)
|
||||||
|
.filter(project__archived_at__isnull=True)
|
||||||
.select_related("actor", "workspace", "issue", "project")
|
.select_related("actor", "workspace", "issue", "project")
|
||||||
).order_by(request.GET.get("order_by", "created_at"))
|
).order_by(request.GET.get("order_by", "created_at"))
|
||||||
|
|
||||||
|
@ -2,32 +2,33 @@
|
|||||||
import json
|
import json
|
||||||
|
|
||||||
# Django imports
|
# Django imports
|
||||||
from django.db.models import Count, Prefetch, Q, F, Func, OuterRef
|
|
||||||
from django.utils import timezone
|
|
||||||
from django.core import serializers
|
from django.core import serializers
|
||||||
|
from django.db.models import Count, F, Func, OuterRef, Prefetch, Q
|
||||||
|
from django.utils import timezone
|
||||||
|
|
||||||
# Third party imports
|
# Third party imports
|
||||||
from rest_framework import status
|
from rest_framework import status
|
||||||
from rest_framework.response import Response
|
from rest_framework.response import Response
|
||||||
|
|
||||||
# Module imports
|
# Module imports
|
||||||
from .base import BaseAPIView, WebhookMixin
|
from plane.api.serializers import (
|
||||||
|
IssueSerializer,
|
||||||
|
ModuleIssueSerializer,
|
||||||
|
ModuleSerializer,
|
||||||
|
)
|
||||||
from plane.app.permissions import ProjectEntityPermission
|
from plane.app.permissions import ProjectEntityPermission
|
||||||
|
from plane.bgtasks.issue_activites_task import issue_activity
|
||||||
from plane.db.models import (
|
from plane.db.models import (
|
||||||
Project,
|
|
||||||
Module,
|
|
||||||
ModuleLink,
|
|
||||||
Issue,
|
Issue,
|
||||||
ModuleIssue,
|
|
||||||
IssueAttachment,
|
IssueAttachment,
|
||||||
IssueLink,
|
IssueLink,
|
||||||
|
Module,
|
||||||
|
ModuleIssue,
|
||||||
|
ModuleLink,
|
||||||
|
Project,
|
||||||
)
|
)
|
||||||
from plane.api.serializers import (
|
|
||||||
ModuleSerializer,
|
from .base import BaseAPIView, WebhookMixin
|
||||||
ModuleIssueSerializer,
|
|
||||||
IssueSerializer,
|
|
||||||
)
|
|
||||||
from plane.bgtasks.issue_activites_task import issue_activity
|
|
||||||
|
|
||||||
|
|
||||||
class ModuleAPIEndpoint(WebhookMixin, BaseAPIView):
|
class ModuleAPIEndpoint(WebhookMixin, BaseAPIView):
|
||||||
@ -55,7 +56,9 @@ class ModuleAPIEndpoint(WebhookMixin, BaseAPIView):
|
|||||||
.prefetch_related(
|
.prefetch_related(
|
||||||
Prefetch(
|
Prefetch(
|
||||||
"link_module",
|
"link_module",
|
||||||
queryset=ModuleLink.objects.select_related("module", "created_by"),
|
queryset=ModuleLink.objects.select_related(
|
||||||
|
"module", "created_by"
|
||||||
|
),
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
.annotate(
|
.annotate(
|
||||||
@ -65,6 +68,7 @@ class ModuleAPIEndpoint(WebhookMixin, BaseAPIView):
|
|||||||
issue_module__issue__archived_at__isnull=True,
|
issue_module__issue__archived_at__isnull=True,
|
||||||
issue_module__issue__is_draft=False,
|
issue_module__issue__is_draft=False,
|
||||||
),
|
),
|
||||||
|
distinct=True,
|
||||||
),
|
),
|
||||||
)
|
)
|
||||||
.annotate(
|
.annotate(
|
||||||
@ -75,6 +79,7 @@ class ModuleAPIEndpoint(WebhookMixin, BaseAPIView):
|
|||||||
issue_module__issue__archived_at__isnull=True,
|
issue_module__issue__archived_at__isnull=True,
|
||||||
issue_module__issue__is_draft=False,
|
issue_module__issue__is_draft=False,
|
||||||
),
|
),
|
||||||
|
distinct=True,
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
.annotate(
|
.annotate(
|
||||||
@ -85,6 +90,7 @@ class ModuleAPIEndpoint(WebhookMixin, BaseAPIView):
|
|||||||
issue_module__issue__archived_at__isnull=True,
|
issue_module__issue__archived_at__isnull=True,
|
||||||
issue_module__issue__is_draft=False,
|
issue_module__issue__is_draft=False,
|
||||||
),
|
),
|
||||||
|
distinct=True,
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
.annotate(
|
.annotate(
|
||||||
@ -95,6 +101,7 @@ class ModuleAPIEndpoint(WebhookMixin, BaseAPIView):
|
|||||||
issue_module__issue__archived_at__isnull=True,
|
issue_module__issue__archived_at__isnull=True,
|
||||||
issue_module__issue__is_draft=False,
|
issue_module__issue__is_draft=False,
|
||||||
),
|
),
|
||||||
|
distinct=True,
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
.annotate(
|
.annotate(
|
||||||
@ -105,6 +112,7 @@ class ModuleAPIEndpoint(WebhookMixin, BaseAPIView):
|
|||||||
issue_module__issue__archived_at__isnull=True,
|
issue_module__issue__archived_at__isnull=True,
|
||||||
issue_module__issue__is_draft=False,
|
issue_module__issue__is_draft=False,
|
||||||
),
|
),
|
||||||
|
distinct=True,
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
.annotate(
|
.annotate(
|
||||||
@ -115,15 +123,45 @@ class ModuleAPIEndpoint(WebhookMixin, BaseAPIView):
|
|||||||
issue_module__issue__archived_at__isnull=True,
|
issue_module__issue__archived_at__isnull=True,
|
||||||
issue_module__issue__is_draft=False,
|
issue_module__issue__is_draft=False,
|
||||||
),
|
),
|
||||||
|
distinct=True,
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
.order_by(self.kwargs.get("order_by", "-created_at"))
|
.order_by(self.kwargs.get("order_by", "-created_at"))
|
||||||
)
|
)
|
||||||
|
|
||||||
def post(self, request, slug, project_id):
|
def post(self, request, slug, project_id):
|
||||||
project = Project.objects.get(workspace__slug=slug, pk=project_id)
|
project = Project.objects.get(pk=project_id, workspace__slug=slug)
|
||||||
serializer = ModuleSerializer(data=request.data, context={"project": project})
|
serializer = ModuleSerializer(
|
||||||
|
data=request.data,
|
||||||
|
context={
|
||||||
|
"project_id": project_id,
|
||||||
|
"workspace_id": project.workspace_id,
|
||||||
|
},
|
||||||
|
)
|
||||||
if serializer.is_valid():
|
if serializer.is_valid():
|
||||||
|
if (
|
||||||
|
request.data.get("external_id")
|
||||||
|
and request.data.get("external_source")
|
||||||
|
and Module.objects.filter(
|
||||||
|
project_id=project_id,
|
||||||
|
workspace__slug=slug,
|
||||||
|
external_source=request.data.get("external_source"),
|
||||||
|
external_id=request.data.get("external_id"),
|
||||||
|
).exists()
|
||||||
|
):
|
||||||
|
module = Module.objects.filter(
|
||||||
|
project_id=project_id,
|
||||||
|
workspace__slug=slug,
|
||||||
|
external_source=request.data.get("external_source"),
|
||||||
|
external_id=request.data.get("external_id"),
|
||||||
|
).first()
|
||||||
|
return Response(
|
||||||
|
{
|
||||||
|
"error": "Module with the same external id and external source already exists",
|
||||||
|
"id": str(module.id),
|
||||||
|
},
|
||||||
|
status=status.HTTP_409_CONFLICT,
|
||||||
|
)
|
||||||
serializer.save()
|
serializer.save()
|
||||||
module = Module.objects.get(pk=serializer.data["id"])
|
module = Module.objects.get(pk=serializer.data["id"])
|
||||||
serializer = ModuleSerializer(module)
|
serializer = ModuleSerializer(module)
|
||||||
@ -131,16 +169,49 @@ class ModuleAPIEndpoint(WebhookMixin, BaseAPIView):
|
|||||||
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
|
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
|
||||||
|
|
||||||
def patch(self, request, slug, project_id, pk):
|
def patch(self, request, slug, project_id, pk):
|
||||||
module = Module.objects.get(pk=pk, project_id=project_id, workspace__slug=slug)
|
module = Module.objects.get(
|
||||||
serializer = ModuleSerializer(module, data=request.data)
|
pk=pk, project_id=project_id, workspace__slug=slug
|
||||||
|
)
|
||||||
|
if module.archived_at:
|
||||||
|
return Response(
|
||||||
|
{"error": "Archived module cannot be edited"},
|
||||||
|
status=status.HTTP_400_BAD_REQUEST,
|
||||||
|
)
|
||||||
|
serializer = ModuleSerializer(
|
||||||
|
module,
|
||||||
|
data=request.data,
|
||||||
|
context={"project_id": project_id},
|
||||||
|
partial=True,
|
||||||
|
)
|
||||||
if serializer.is_valid():
|
if serializer.is_valid():
|
||||||
|
if (
|
||||||
|
request.data.get("external_id")
|
||||||
|
and (module.external_id != request.data.get("external_id"))
|
||||||
|
and Module.objects.filter(
|
||||||
|
project_id=project_id,
|
||||||
|
workspace__slug=slug,
|
||||||
|
external_source=request.data.get(
|
||||||
|
"external_source", module.external_source
|
||||||
|
),
|
||||||
|
external_id=request.data.get("external_id"),
|
||||||
|
).exists()
|
||||||
|
):
|
||||||
|
return Response(
|
||||||
|
{
|
||||||
|
"error": "Module with the same external id and external source already exists",
|
||||||
|
"id": str(module.id),
|
||||||
|
},
|
||||||
|
status=status.HTTP_409_CONFLICT,
|
||||||
|
)
|
||||||
serializer.save()
|
serializer.save()
|
||||||
return Response(serializer.data, status=status.HTTP_201_CREATED)
|
return Response(serializer.data, status=status.HTTP_200_OK)
|
||||||
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
|
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
|
||||||
|
|
||||||
def get(self, request, slug, project_id, pk=None):
|
def get(self, request, slug, project_id, pk=None):
|
||||||
if pk:
|
if pk:
|
||||||
queryset = self.get_queryset().get(pk=pk)
|
queryset = (
|
||||||
|
self.get_queryset().filter(archived_at__isnull=True).get(pk=pk)
|
||||||
|
)
|
||||||
data = ModuleSerializer(
|
data = ModuleSerializer(
|
||||||
queryset,
|
queryset,
|
||||||
fields=self.fields,
|
fields=self.fields,
|
||||||
@ -152,7 +223,7 @@ class ModuleAPIEndpoint(WebhookMixin, BaseAPIView):
|
|||||||
)
|
)
|
||||||
return self.paginate(
|
return self.paginate(
|
||||||
request=request,
|
request=request,
|
||||||
queryset=(self.get_queryset()),
|
queryset=(self.get_queryset().filter(archived_at__isnull=True)),
|
||||||
on_results=lambda modules: ModuleSerializer(
|
on_results=lambda modules: ModuleSerializer(
|
||||||
modules,
|
modules,
|
||||||
many=True,
|
many=True,
|
||||||
@ -162,9 +233,13 @@ class ModuleAPIEndpoint(WebhookMixin, BaseAPIView):
|
|||||||
)
|
)
|
||||||
|
|
||||||
def delete(self, request, slug, project_id, pk):
|
def delete(self, request, slug, project_id, pk):
|
||||||
module = Module.objects.get(workspace__slug=slug, project_id=project_id, pk=pk)
|
module = Module.objects.get(
|
||||||
|
workspace__slug=slug, project_id=project_id, pk=pk
|
||||||
|
)
|
||||||
module_issues = list(
|
module_issues = list(
|
||||||
ModuleIssue.objects.filter(module_id=pk).values_list("issue", flat=True)
|
ModuleIssue.objects.filter(module_id=pk).values_list(
|
||||||
|
"issue", flat=True
|
||||||
|
)
|
||||||
)
|
)
|
||||||
issue_activity.delay(
|
issue_activity.delay(
|
||||||
type="module.activity.deleted",
|
type="module.activity.deleted",
|
||||||
@ -204,7 +279,9 @@ class ModuleIssueAPIEndpoint(WebhookMixin, BaseAPIView):
|
|||||||
def get_queryset(self):
|
def get_queryset(self):
|
||||||
return (
|
return (
|
||||||
ModuleIssue.objects.annotate(
|
ModuleIssue.objects.annotate(
|
||||||
sub_issues_count=Issue.issue_objects.filter(parent=OuterRef("issue"))
|
sub_issues_count=Issue.issue_objects.filter(
|
||||||
|
parent=OuterRef("issue")
|
||||||
|
)
|
||||||
.order_by()
|
.order_by()
|
||||||
.annotate(count=Func(F("id"), function="Count"))
|
.annotate(count=Func(F("id"), function="Count"))
|
||||||
.values("count")
|
.values("count")
|
||||||
@ -212,7 +289,11 @@ class ModuleIssueAPIEndpoint(WebhookMixin, BaseAPIView):
|
|||||||
.filter(workspace__slug=self.kwargs.get("slug"))
|
.filter(workspace__slug=self.kwargs.get("slug"))
|
||||||
.filter(project_id=self.kwargs.get("project_id"))
|
.filter(project_id=self.kwargs.get("project_id"))
|
||||||
.filter(module_id=self.kwargs.get("module_id"))
|
.filter(module_id=self.kwargs.get("module_id"))
|
||||||
.filter(project__project_projectmember__member=self.request.user)
|
.filter(
|
||||||
|
project__project_projectmember__member=self.request.user,
|
||||||
|
project__project_projectmember__is_active=True,
|
||||||
|
)
|
||||||
|
.filter(project__archived_at__isnull=True)
|
||||||
.select_related("project")
|
.select_related("project")
|
||||||
.select_related("workspace")
|
.select_related("workspace")
|
||||||
.select_related("module")
|
.select_related("module")
|
||||||
@ -228,7 +309,9 @@ class ModuleIssueAPIEndpoint(WebhookMixin, BaseAPIView):
|
|||||||
issues = (
|
issues = (
|
||||||
Issue.issue_objects.filter(issue_module__module_id=module_id)
|
Issue.issue_objects.filter(issue_module__module_id=module_id)
|
||||||
.annotate(
|
.annotate(
|
||||||
sub_issues_count=Issue.issue_objects.filter(parent=OuterRef("id"))
|
sub_issues_count=Issue.issue_objects.filter(
|
||||||
|
parent=OuterRef("id")
|
||||||
|
)
|
||||||
.order_by()
|
.order_by()
|
||||||
.annotate(count=Func(F("id"), function="Count"))
|
.annotate(count=Func(F("id"), function="Count"))
|
||||||
.values("count")
|
.values("count")
|
||||||
@ -250,7 +333,9 @@ class ModuleIssueAPIEndpoint(WebhookMixin, BaseAPIView):
|
|||||||
.values("count")
|
.values("count")
|
||||||
)
|
)
|
||||||
.annotate(
|
.annotate(
|
||||||
attachment_count=IssueAttachment.objects.filter(issue=OuterRef("id"))
|
attachment_count=IssueAttachment.objects.filter(
|
||||||
|
issue=OuterRef("id")
|
||||||
|
)
|
||||||
.order_by()
|
.order_by()
|
||||||
.annotate(count=Func(F("id"), function="Count"))
|
.annotate(count=Func(F("id"), function="Count"))
|
||||||
.values("count")
|
.values("count")
|
||||||
@ -271,7 +356,8 @@ class ModuleIssueAPIEndpoint(WebhookMixin, BaseAPIView):
|
|||||||
issues = request.data.get("issues", [])
|
issues = request.data.get("issues", [])
|
||||||
if not len(issues):
|
if not len(issues):
|
||||||
return Response(
|
return Response(
|
||||||
{"error": "Issues are required"}, status=status.HTTP_400_BAD_REQUEST
|
{"error": "Issues are required"},
|
||||||
|
status=status.HTTP_400_BAD_REQUEST,
|
||||||
)
|
)
|
||||||
module = Module.objects.get(
|
module = Module.objects.get(
|
||||||
workspace__slug=slug, project_id=project_id, pk=module_id
|
workspace__slug=slug, project_id=project_id, pk=module_id
|
||||||
@ -354,7 +440,10 @@ class ModuleIssueAPIEndpoint(WebhookMixin, BaseAPIView):
|
|||||||
|
|
||||||
def delete(self, request, slug, project_id, module_id, issue_id):
|
def delete(self, request, slug, project_id, module_id, issue_id):
|
||||||
module_issue = ModuleIssue.objects.get(
|
module_issue = ModuleIssue.objects.get(
|
||||||
workspace__slug=slug, project_id=project_id, module_id=module_id, issue_id=issue_id
|
workspace__slug=slug,
|
||||||
|
project_id=project_id,
|
||||||
|
module_id=module_id,
|
||||||
|
issue_id=issue_id,
|
||||||
)
|
)
|
||||||
module_issue.delete()
|
module_issue.delete()
|
||||||
issue_activity.delay(
|
issue_activity.delay(
|
||||||
@ -372,3 +461,123 @@ class ModuleIssueAPIEndpoint(WebhookMixin, BaseAPIView):
|
|||||||
epoch=int(timezone.now().timestamp()),
|
epoch=int(timezone.now().timestamp()),
|
||||||
)
|
)
|
||||||
return Response(status=status.HTTP_204_NO_CONTENT)
|
return Response(status=status.HTTP_204_NO_CONTENT)
|
||||||
|
|
||||||
|
|
||||||
|
class ModuleArchiveUnarchiveAPIEndpoint(BaseAPIView):
|
||||||
|
|
||||||
|
permission_classes = [
|
||||||
|
ProjectEntityPermission,
|
||||||
|
]
|
||||||
|
|
||||||
|
def get_queryset(self):
|
||||||
|
return (
|
||||||
|
Module.objects.filter(project_id=self.kwargs.get("project_id"))
|
||||||
|
.filter(workspace__slug=self.kwargs.get("slug"))
|
||||||
|
.filter(archived_at__isnull=False)
|
||||||
|
.select_related("project")
|
||||||
|
.select_related("workspace")
|
||||||
|
.select_related("lead")
|
||||||
|
.prefetch_related("members")
|
||||||
|
.prefetch_related(
|
||||||
|
Prefetch(
|
||||||
|
"link_module",
|
||||||
|
queryset=ModuleLink.objects.select_related(
|
||||||
|
"module", "created_by"
|
||||||
|
),
|
||||||
|
)
|
||||||
|
)
|
||||||
|
.annotate(
|
||||||
|
total_issues=Count(
|
||||||
|
"issue_module",
|
||||||
|
filter=Q(
|
||||||
|
issue_module__issue__archived_at__isnull=True,
|
||||||
|
issue_module__issue__is_draft=False,
|
||||||
|
),
|
||||||
|
distinct=True,
|
||||||
|
),
|
||||||
|
)
|
||||||
|
.annotate(
|
||||||
|
completed_issues=Count(
|
||||||
|
"issue_module__issue__state__group",
|
||||||
|
filter=Q(
|
||||||
|
issue_module__issue__state__group="completed",
|
||||||
|
issue_module__issue__archived_at__isnull=True,
|
||||||
|
issue_module__issue__is_draft=False,
|
||||||
|
),
|
||||||
|
distinct=True,
|
||||||
|
)
|
||||||
|
)
|
||||||
|
.annotate(
|
||||||
|
cancelled_issues=Count(
|
||||||
|
"issue_module__issue__state__group",
|
||||||
|
filter=Q(
|
||||||
|
issue_module__issue__state__group="cancelled",
|
||||||
|
issue_module__issue__archived_at__isnull=True,
|
||||||
|
issue_module__issue__is_draft=False,
|
||||||
|
),
|
||||||
|
distinct=True,
|
||||||
|
)
|
||||||
|
)
|
||||||
|
.annotate(
|
||||||
|
started_issues=Count(
|
||||||
|
"issue_module__issue__state__group",
|
||||||
|
filter=Q(
|
||||||
|
issue_module__issue__state__group="started",
|
||||||
|
issue_module__issue__archived_at__isnull=True,
|
||||||
|
issue_module__issue__is_draft=False,
|
||||||
|
),
|
||||||
|
distinct=True,
|
||||||
|
)
|
||||||
|
)
|
||||||
|
.annotate(
|
||||||
|
unstarted_issues=Count(
|
||||||
|
"issue_module__issue__state__group",
|
||||||
|
filter=Q(
|
||||||
|
issue_module__issue__state__group="unstarted",
|
||||||
|
issue_module__issue__archived_at__isnull=True,
|
||||||
|
issue_module__issue__is_draft=False,
|
||||||
|
),
|
||||||
|
distinct=True,
|
||||||
|
)
|
||||||
|
)
|
||||||
|
.annotate(
|
||||||
|
backlog_issues=Count(
|
||||||
|
"issue_module__issue__state__group",
|
||||||
|
filter=Q(
|
||||||
|
issue_module__issue__state__group="backlog",
|
||||||
|
issue_module__issue__archived_at__isnull=True,
|
||||||
|
issue_module__issue__is_draft=False,
|
||||||
|
),
|
||||||
|
distinct=True,
|
||||||
|
)
|
||||||
|
)
|
||||||
|
.order_by(self.kwargs.get("order_by", "-created_at"))
|
||||||
|
)
|
||||||
|
|
||||||
|
def get(self, request, slug, project_id):
|
||||||
|
return self.paginate(
|
||||||
|
request=request,
|
||||||
|
queryset=(self.get_queryset()),
|
||||||
|
on_results=lambda modules: ModuleSerializer(
|
||||||
|
modules,
|
||||||
|
many=True,
|
||||||
|
fields=self.fields,
|
||||||
|
expand=self.expand,
|
||||||
|
).data,
|
||||||
|
)
|
||||||
|
|
||||||
|
def post(self, request, slug, project_id, pk):
|
||||||
|
module = Module.objects.get(
|
||||||
|
pk=pk, project_id=project_id, workspace__slug=slug
|
||||||
|
)
|
||||||
|
module.archived_at = timezone.now()
|
||||||
|
module.save()
|
||||||
|
return Response(status=status.HTTP_204_NO_CONTENT)
|
||||||
|
|
||||||
|
def delete(self, request, slug, project_id, pk):
|
||||||
|
module = Module.objects.get(
|
||||||
|
pk=pk, project_id=project_id, workspace__slug=slug
|
||||||
|
)
|
||||||
|
module.archived_at = None
|
||||||
|
module.save()
|
||||||
|
return Response(status=status.HTTP_204_NO_CONTENT)
|
||||||
|
@ -1,27 +1,29 @@
|
|||||||
# Django imports
|
# Django imports
|
||||||
from django.db import IntegrityError
|
from django.db import IntegrityError
|
||||||
from django.db.models import Exists, OuterRef, Q, F, Func, Subquery, Prefetch
|
from django.db.models import Exists, F, Func, OuterRef, Prefetch, Q, Subquery
|
||||||
|
from django.utils import timezone
|
||||||
|
|
||||||
# Third party imports
|
# Third party imports
|
||||||
from rest_framework import status
|
from rest_framework import status
|
||||||
from rest_framework.response import Response
|
from rest_framework.response import Response
|
||||||
from rest_framework.serializers import ValidationError
|
from rest_framework.serializers import ValidationError
|
||||||
|
|
||||||
|
from plane.api.serializers import ProjectSerializer
|
||||||
|
from plane.app.permissions import ProjectBasePermission
|
||||||
|
|
||||||
# Module imports
|
# Module imports
|
||||||
from plane.db.models import (
|
from plane.db.models import (
|
||||||
Workspace,
|
|
||||||
Project,
|
|
||||||
ProjectFavorite,
|
|
||||||
ProjectMember,
|
|
||||||
ProjectDeployBoard,
|
|
||||||
State,
|
|
||||||
Cycle,
|
Cycle,
|
||||||
Module,
|
|
||||||
IssueProperty,
|
|
||||||
Inbox,
|
Inbox,
|
||||||
|
IssueProperty,
|
||||||
|
Module,
|
||||||
|
Project,
|
||||||
|
ProjectDeployBoard,
|
||||||
|
ProjectMember,
|
||||||
|
State,
|
||||||
|
Workspace,
|
||||||
)
|
)
|
||||||
from plane.app.permissions import ProjectBasePermission
|
|
||||||
from plane.api.serializers import ProjectSerializer
|
|
||||||
from .base import BaseAPIView, WebhookMixin
|
from .base import BaseAPIView, WebhookMixin
|
||||||
|
|
||||||
|
|
||||||
@ -39,9 +41,18 @@ class ProjectAPIEndpoint(WebhookMixin, BaseAPIView):
|
|||||||
def get_queryset(self):
|
def get_queryset(self):
|
||||||
return (
|
return (
|
||||||
Project.objects.filter(workspace__slug=self.kwargs.get("slug"))
|
Project.objects.filter(workspace__slug=self.kwargs.get("slug"))
|
||||||
.filter(Q(project_projectmember__member=self.request.user) | Q(network=2))
|
.filter(
|
||||||
|
Q(
|
||||||
|
project_projectmember__member=self.request.user,
|
||||||
|
project_projectmember__is_active=True,
|
||||||
|
)
|
||||||
|
| Q(network=2)
|
||||||
|
)
|
||||||
.select_related(
|
.select_related(
|
||||||
"workspace", "workspace__owner", "default_assignee", "project_lead"
|
"workspace",
|
||||||
|
"workspace__owner",
|
||||||
|
"default_assignee",
|
||||||
|
"project_lead",
|
||||||
)
|
)
|
||||||
.annotate(
|
.annotate(
|
||||||
is_member=Exists(
|
is_member=Exists(
|
||||||
@ -94,8 +105,8 @@ class ProjectAPIEndpoint(WebhookMixin, BaseAPIView):
|
|||||||
.distinct()
|
.distinct()
|
||||||
)
|
)
|
||||||
|
|
||||||
def get(self, request, slug, project_id=None):
|
def get(self, request, slug, pk=None):
|
||||||
if project_id is None:
|
if pk is None:
|
||||||
sort_order_query = ProjectMember.objects.filter(
|
sort_order_query = ProjectMember.objects.filter(
|
||||||
member=request.user,
|
member=request.user,
|
||||||
project_id=OuterRef("pk"),
|
project_id=OuterRef("pk"),
|
||||||
@ -120,11 +131,18 @@ class ProjectAPIEndpoint(WebhookMixin, BaseAPIView):
|
|||||||
request=request,
|
request=request,
|
||||||
queryset=(projects),
|
queryset=(projects),
|
||||||
on_results=lambda projects: ProjectSerializer(
|
on_results=lambda projects: ProjectSerializer(
|
||||||
projects, many=True, fields=self.fields, expand=self.expand,
|
projects,
|
||||||
|
many=True,
|
||||||
|
fields=self.fields,
|
||||||
|
expand=self.expand,
|
||||||
).data,
|
).data,
|
||||||
)
|
)
|
||||||
project = self.get_queryset().get(workspace__slug=slug, pk=project_id)
|
project = self.get_queryset().get(workspace__slug=slug, pk=pk)
|
||||||
serializer = ProjectSerializer(project, fields=self.fields, expand=self.expand,)
|
serializer = ProjectSerializer(
|
||||||
|
project,
|
||||||
|
fields=self.fields,
|
||||||
|
expand=self.expand,
|
||||||
|
)
|
||||||
return Response(serializer.data, status=status.HTTP_200_OK)
|
return Response(serializer.data, status=status.HTTP_200_OK)
|
||||||
|
|
||||||
def post(self, request, slug):
|
def post(self, request, slug):
|
||||||
@ -137,8 +155,10 @@ class ProjectAPIEndpoint(WebhookMixin, BaseAPIView):
|
|||||||
serializer.save()
|
serializer.save()
|
||||||
|
|
||||||
# Add the user as Administrator to the project
|
# Add the user as Administrator to the project
|
||||||
project_member = ProjectMember.objects.create(
|
_ = ProjectMember.objects.create(
|
||||||
project_id=serializer.data["id"], member=request.user, role=20
|
project_id=serializer.data["id"],
|
||||||
|
member=request.user,
|
||||||
|
role=20,
|
||||||
)
|
)
|
||||||
# Also create the issue property for the user
|
# Also create the issue property for the user
|
||||||
_ = IssueProperty.objects.create(
|
_ = IssueProperty.objects.create(
|
||||||
@ -211,9 +231,15 @@ class ProjectAPIEndpoint(WebhookMixin, BaseAPIView):
|
|||||||
]
|
]
|
||||||
)
|
)
|
||||||
|
|
||||||
project = self.get_queryset().filter(pk=serializer.data["id"]).first()
|
project = (
|
||||||
|
self.get_queryset()
|
||||||
|
.filter(pk=serializer.data["id"])
|
||||||
|
.first()
|
||||||
|
)
|
||||||
serializer = ProjectSerializer(project)
|
serializer = ProjectSerializer(project)
|
||||||
return Response(serializer.data, status=status.HTTP_201_CREATED)
|
return Response(
|
||||||
|
serializer.data, status=status.HTTP_201_CREATED
|
||||||
|
)
|
||||||
return Response(
|
return Response(
|
||||||
serializer.errors,
|
serializer.errors,
|
||||||
status=status.HTTP_400_BAD_REQUEST,
|
status=status.HTTP_400_BAD_REQUEST,
|
||||||
@ -224,20 +250,27 @@ class ProjectAPIEndpoint(WebhookMixin, BaseAPIView):
|
|||||||
{"name": "The project name is already taken"},
|
{"name": "The project name is already taken"},
|
||||||
status=status.HTTP_410_GONE,
|
status=status.HTTP_410_GONE,
|
||||||
)
|
)
|
||||||
except Workspace.DoesNotExist as e:
|
except Workspace.DoesNotExist:
|
||||||
return Response(
|
return Response(
|
||||||
{"error": "Workspace does not exist"}, status=status.HTTP_404_NOT_FOUND
|
{"error": "Workspace does not exist"},
|
||||||
|
status=status.HTTP_404_NOT_FOUND,
|
||||||
)
|
)
|
||||||
except ValidationError as e:
|
except ValidationError:
|
||||||
return Response(
|
return Response(
|
||||||
{"identifier": "The project identifier is already taken"},
|
{"identifier": "The project identifier is already taken"},
|
||||||
status=status.HTTP_410_GONE,
|
status=status.HTTP_410_GONE,
|
||||||
)
|
)
|
||||||
|
|
||||||
def patch(self, request, slug, project_id=None):
|
def patch(self, request, slug, pk):
|
||||||
try:
|
try:
|
||||||
workspace = Workspace.objects.get(slug=slug)
|
workspace = Workspace.objects.get(slug=slug)
|
||||||
project = Project.objects.get(pk=project_id)
|
project = Project.objects.get(pk=pk)
|
||||||
|
|
||||||
|
if project.archived_at:
|
||||||
|
return Response(
|
||||||
|
{"error": "Archived project cannot be updated"},
|
||||||
|
status=status.HTTP_400_BAD_REQUEST,
|
||||||
|
)
|
||||||
|
|
||||||
serializer = ProjectSerializer(
|
serializer = ProjectSerializer(
|
||||||
project,
|
project,
|
||||||
@ -250,22 +283,31 @@ class ProjectAPIEndpoint(WebhookMixin, BaseAPIView):
|
|||||||
serializer.save()
|
serializer.save()
|
||||||
if serializer.data["inbox_view"]:
|
if serializer.data["inbox_view"]:
|
||||||
Inbox.objects.get_or_create(
|
Inbox.objects.get_or_create(
|
||||||
name=f"{project.name} Inbox", project=project, is_default=True
|
name=f"{project.name} Inbox",
|
||||||
|
project=project,
|
||||||
|
is_default=True,
|
||||||
)
|
)
|
||||||
|
|
||||||
# Create the triage state in Backlog group
|
# Create the triage state in Backlog group
|
||||||
State.objects.get_or_create(
|
State.objects.get_or_create(
|
||||||
name="Triage",
|
name="Triage",
|
||||||
group="backlog",
|
group="triage",
|
||||||
description="Default state for managing all Inbox Issues",
|
description="Default state for managing all Inbox Issues",
|
||||||
project_id=project_id,
|
project_id=pk,
|
||||||
color="#ff7700",
|
color="#ff7700",
|
||||||
|
is_triage=True,
|
||||||
)
|
)
|
||||||
|
|
||||||
project = self.get_queryset().filter(pk=serializer.data["id"]).first()
|
project = (
|
||||||
|
self.get_queryset()
|
||||||
|
.filter(pk=serializer.data["id"])
|
||||||
|
.first()
|
||||||
|
)
|
||||||
serializer = ProjectSerializer(project)
|
serializer = ProjectSerializer(project)
|
||||||
return Response(serializer.data, status=status.HTTP_200_OK)
|
return Response(serializer.data, status=status.HTTP_200_OK)
|
||||||
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
|
return Response(
|
||||||
|
serializer.errors, status=status.HTTP_400_BAD_REQUEST
|
||||||
|
)
|
||||||
except IntegrityError as e:
|
except IntegrityError as e:
|
||||||
if "already exists" in str(e):
|
if "already exists" in str(e):
|
||||||
return Response(
|
return Response(
|
||||||
@ -274,15 +316,35 @@ class ProjectAPIEndpoint(WebhookMixin, BaseAPIView):
|
|||||||
)
|
)
|
||||||
except (Project.DoesNotExist, Workspace.DoesNotExist):
|
except (Project.DoesNotExist, Workspace.DoesNotExist):
|
||||||
return Response(
|
return Response(
|
||||||
{"error": "Project does not exist"}, status=status.HTTP_404_NOT_FOUND
|
{"error": "Project does not exist"},
|
||||||
|
status=status.HTTP_404_NOT_FOUND,
|
||||||
)
|
)
|
||||||
except ValidationError as e:
|
except ValidationError:
|
||||||
return Response(
|
return Response(
|
||||||
{"identifier": "The project identifier is already taken"},
|
{"identifier": "The project identifier is already taken"},
|
||||||
status=status.HTTP_410_GONE,
|
status=status.HTTP_410_GONE,
|
||||||
)
|
)
|
||||||
|
|
||||||
def delete(self, request, slug, project_id):
|
def delete(self, request, slug, pk):
|
||||||
project = Project.objects.get(pk=project_id, workspace__slug=slug)
|
project = Project.objects.get(pk=pk, workspace__slug=slug)
|
||||||
project.delete()
|
project.delete()
|
||||||
return Response(status=status.HTTP_204_NO_CONTENT)
|
return Response(status=status.HTTP_204_NO_CONTENT)
|
||||||
|
|
||||||
|
|
||||||
|
class ProjectArchiveUnarchiveAPIEndpoint(BaseAPIView):
|
||||||
|
|
||||||
|
permission_classes = [
|
||||||
|
ProjectBasePermission,
|
||||||
|
]
|
||||||
|
|
||||||
|
def post(self, request, slug, project_id):
|
||||||
|
project = Project.objects.get(pk=project_id, workspace__slug=slug)
|
||||||
|
project.archived_at = timezone.now()
|
||||||
|
project.save()
|
||||||
|
return Response(status=status.HTTP_204_NO_CONTENT)
|
||||||
|
|
||||||
|
def delete(self, request, slug, project_id):
|
||||||
|
project = Project.objects.get(pk=project_id, workspace__slug=slug)
|
||||||
|
project.archived_at = None
|
||||||
|
project.save()
|
||||||
|
return Response(status=status.HTTP_204_NO_CONTENT)
|
||||||
|
@ -1,18 +1,16 @@
|
|||||||
# Python imports
|
|
||||||
from itertools import groupby
|
|
||||||
|
|
||||||
# Django imports
|
# Django imports
|
||||||
from django.db.models import Q
|
from django.db import IntegrityError
|
||||||
|
|
||||||
# Third party imports
|
# Third party imports
|
||||||
from rest_framework.response import Response
|
|
||||||
from rest_framework import status
|
from rest_framework import status
|
||||||
|
from rest_framework.response import Response
|
||||||
|
|
||||||
|
from plane.api.serializers import StateSerializer
|
||||||
|
from plane.app.permissions import ProjectEntityPermission
|
||||||
|
from plane.db.models import Issue, State
|
||||||
|
|
||||||
# Module imports
|
# Module imports
|
||||||
from .base import BaseAPIView
|
from .base import BaseAPIView
|
||||||
from plane.api.serializers import StateSerializer
|
|
||||||
from plane.app.permissions import ProjectEntityPermission
|
|
||||||
from plane.db.models import State, Issue
|
|
||||||
|
|
||||||
|
|
||||||
class StateAPIEndpoint(BaseAPIView):
|
class StateAPIEndpoint(BaseAPIView):
|
||||||
@ -26,23 +24,73 @@ class StateAPIEndpoint(BaseAPIView):
|
|||||||
return (
|
return (
|
||||||
State.objects.filter(workspace__slug=self.kwargs.get("slug"))
|
State.objects.filter(workspace__slug=self.kwargs.get("slug"))
|
||||||
.filter(project_id=self.kwargs.get("project_id"))
|
.filter(project_id=self.kwargs.get("project_id"))
|
||||||
.filter(project__project_projectmember__member=self.request.user)
|
.filter(
|
||||||
.filter(~Q(name="Triage"))
|
project__project_projectmember__member=self.request.user,
|
||||||
|
project__project_projectmember__is_active=True,
|
||||||
|
)
|
||||||
|
.filter(is_triage=False)
|
||||||
|
.filter(project__archived_at__isnull=True)
|
||||||
.select_related("project")
|
.select_related("project")
|
||||||
.select_related("workspace")
|
.select_related("workspace")
|
||||||
.distinct()
|
.distinct()
|
||||||
)
|
)
|
||||||
|
|
||||||
def post(self, request, slug, project_id):
|
def post(self, request, slug, project_id):
|
||||||
serializer = StateSerializer(data=request.data, context={"project_id": project_id})
|
try:
|
||||||
if serializer.is_valid():
|
serializer = StateSerializer(
|
||||||
serializer.save(project_id=project_id)
|
data=request.data, context={"project_id": project_id}
|
||||||
return Response(serializer.data, status=status.HTTP_200_OK)
|
)
|
||||||
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
|
if serializer.is_valid():
|
||||||
|
if (
|
||||||
|
request.data.get("external_id")
|
||||||
|
and request.data.get("external_source")
|
||||||
|
and State.objects.filter(
|
||||||
|
project_id=project_id,
|
||||||
|
workspace__slug=slug,
|
||||||
|
external_source=request.data.get("external_source"),
|
||||||
|
external_id=request.data.get("external_id"),
|
||||||
|
).exists()
|
||||||
|
):
|
||||||
|
state = State.objects.filter(
|
||||||
|
workspace__slug=slug,
|
||||||
|
project_id=project_id,
|
||||||
|
external_id=request.data.get("external_id"),
|
||||||
|
external_source=request.data.get("external_source"),
|
||||||
|
).first()
|
||||||
|
return Response(
|
||||||
|
{
|
||||||
|
"error": "State with the same external id and external source already exists",
|
||||||
|
"id": str(state.id),
|
||||||
|
},
|
||||||
|
status=status.HTTP_409_CONFLICT,
|
||||||
|
)
|
||||||
|
|
||||||
|
serializer.save(project_id=project_id)
|
||||||
|
return Response(serializer.data, status=status.HTTP_200_OK)
|
||||||
|
return Response(
|
||||||
|
serializer.errors, status=status.HTTP_400_BAD_REQUEST
|
||||||
|
)
|
||||||
|
except IntegrityError:
|
||||||
|
state = State.objects.filter(
|
||||||
|
workspace__slug=slug,
|
||||||
|
project_id=project_id,
|
||||||
|
name=request.data.get("name"),
|
||||||
|
).first()
|
||||||
|
return Response(
|
||||||
|
{
|
||||||
|
"error": "State with the same name already exists in the project",
|
||||||
|
"id": str(state.id),
|
||||||
|
},
|
||||||
|
status=status.HTTP_409_CONFLICT,
|
||||||
|
)
|
||||||
|
|
||||||
def get(self, request, slug, project_id, state_id=None):
|
def get(self, request, slug, project_id, state_id=None):
|
||||||
if state_id:
|
if state_id:
|
||||||
serializer = StateSerializer(self.get_queryset().get(pk=state_id))
|
serializer = StateSerializer(
|
||||||
|
self.get_queryset().get(pk=state_id),
|
||||||
|
fields=self.fields,
|
||||||
|
expand=self.expand,
|
||||||
|
)
|
||||||
return Response(serializer.data, status=status.HTTP_200_OK)
|
return Response(serializer.data, status=status.HTTP_200_OK)
|
||||||
return self.paginate(
|
return self.paginate(
|
||||||
request=request,
|
request=request,
|
||||||
@ -57,21 +105,26 @@ class StateAPIEndpoint(BaseAPIView):
|
|||||||
|
|
||||||
def delete(self, request, slug, project_id, state_id):
|
def delete(self, request, slug, project_id, state_id):
|
||||||
state = State.objects.get(
|
state = State.objects.get(
|
||||||
~Q(name="Triage"),
|
is_triage=False,
|
||||||
pk=state_id,
|
pk=state_id,
|
||||||
project_id=project_id,
|
project_id=project_id,
|
||||||
workspace__slug=slug,
|
workspace__slug=slug,
|
||||||
)
|
)
|
||||||
|
|
||||||
if state.default:
|
if state.default:
|
||||||
return Response({"error": "Default state cannot be deleted"}, status=status.HTTP_400_BAD_REQUEST)
|
return Response(
|
||||||
|
{"error": "Default state cannot be deleted"},
|
||||||
|
status=status.HTTP_400_BAD_REQUEST,
|
||||||
|
)
|
||||||
|
|
||||||
# Check for any issues in the state
|
# Check for any issues in the state
|
||||||
issue_exist = Issue.issue_objects.filter(state=state_id).exists()
|
issue_exist = Issue.issue_objects.filter(state=state_id).exists()
|
||||||
|
|
||||||
if issue_exist:
|
if issue_exist:
|
||||||
return Response(
|
return Response(
|
||||||
{"error": "The state is not empty, only empty states can be deleted"},
|
{
|
||||||
|
"error": "The state is not empty, only empty states can be deleted"
|
||||||
|
},
|
||||||
status=status.HTTP_400_BAD_REQUEST,
|
status=status.HTTP_400_BAD_REQUEST,
|
||||||
)
|
)
|
||||||
|
|
||||||
@ -79,9 +132,30 @@ class StateAPIEndpoint(BaseAPIView):
|
|||||||
return Response(status=status.HTTP_204_NO_CONTENT)
|
return Response(status=status.HTTP_204_NO_CONTENT)
|
||||||
|
|
||||||
def patch(self, request, slug, project_id, state_id=None):
|
def patch(self, request, slug, project_id, state_id=None):
|
||||||
state = State.objects.get(workspace__slug=slug, project_id=project_id, pk=state_id)
|
state = State.objects.get(
|
||||||
|
workspace__slug=slug, project_id=project_id, pk=state_id
|
||||||
|
)
|
||||||
serializer = StateSerializer(state, data=request.data, partial=True)
|
serializer = StateSerializer(state, data=request.data, partial=True)
|
||||||
if serializer.is_valid():
|
if serializer.is_valid():
|
||||||
|
if (
|
||||||
|
str(request.data.get("external_id"))
|
||||||
|
and (state.external_id != str(request.data.get("external_id")))
|
||||||
|
and State.objects.filter(
|
||||||
|
project_id=project_id,
|
||||||
|
workspace__slug=slug,
|
||||||
|
external_source=request.data.get(
|
||||||
|
"external_source", state.external_source
|
||||||
|
),
|
||||||
|
external_id=request.data.get("external_id"),
|
||||||
|
).exists()
|
||||||
|
):
|
||||||
|
return Response(
|
||||||
|
{
|
||||||
|
"error": "State with the same external id and external source already exists",
|
||||||
|
"id": str(state.id),
|
||||||
|
},
|
||||||
|
status=status.HTTP_409_CONFLICT,
|
||||||
|
)
|
||||||
serializer.save()
|
serializer.save()
|
||||||
return Response(serializer.data, status=status.HTTP_200_OK)
|
return Response(serializer.data, status=status.HTTP_200_OK)
|
||||||
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
|
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
|
@ -25,7 +25,10 @@ class APIKeyAuthentication(authentication.BaseAuthentication):
|
|||||||
def validate_api_token(self, token):
|
def validate_api_token(self, token):
|
||||||
try:
|
try:
|
||||||
api_token = APIToken.objects.get(
|
api_token = APIToken.objects.get(
|
||||||
Q(Q(expired_at__gt=timezone.now()) | Q(expired_at__isnull=True)),
|
Q(
|
||||||
|
Q(expired_at__gt=timezone.now())
|
||||||
|
| Q(expired_at__isnull=True)
|
||||||
|
),
|
||||||
token=token,
|
token=token,
|
||||||
is_active=True,
|
is_active=True,
|
||||||
)
|
)
|
||||||
|
@ -1,4 +1,3 @@
|
|||||||
|
|
||||||
from .workspace import (
|
from .workspace import (
|
||||||
WorkSpaceBasePermission,
|
WorkSpaceBasePermission,
|
||||||
WorkspaceOwnerPermission,
|
WorkspaceOwnerPermission,
|
||||||
@ -13,5 +12,3 @@ from .project import (
|
|||||||
ProjectMemberPermission,
|
ProjectMemberPermission,
|
||||||
ProjectLitePermission,
|
ProjectLitePermission,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@ -1,8 +1,8 @@
|
|||||||
# Third Party imports
|
# Third Party imports
|
||||||
from rest_framework.permissions import BasePermission, SAFE_METHODS
|
from rest_framework.permissions import SAFE_METHODS, BasePermission
|
||||||
|
|
||||||
# Module import
|
# Module import
|
||||||
from plane.db.models import WorkspaceMember, ProjectMember
|
from plane.db.models import ProjectMember, WorkspaceMember
|
||||||
|
|
||||||
# Permission Mappings
|
# Permission Mappings
|
||||||
Admin = 20
|
Admin = 20
|
||||||
|
@ -17,6 +17,7 @@ from .workspace import (
|
|||||||
WorkspaceThemeSerializer,
|
WorkspaceThemeSerializer,
|
||||||
WorkspaceMemberAdminSerializer,
|
WorkspaceMemberAdminSerializer,
|
||||||
WorkspaceMemberMeSerializer,
|
WorkspaceMemberMeSerializer,
|
||||||
|
WorkspaceUserPropertiesSerializer,
|
||||||
)
|
)
|
||||||
from .project import (
|
from .project import (
|
||||||
ProjectSerializer,
|
ProjectSerializer,
|
||||||
@ -31,14 +32,20 @@ from .project import (
|
|||||||
ProjectDeployBoardSerializer,
|
ProjectDeployBoardSerializer,
|
||||||
ProjectMemberAdminSerializer,
|
ProjectMemberAdminSerializer,
|
||||||
ProjectPublicMemberSerializer,
|
ProjectPublicMemberSerializer,
|
||||||
|
ProjectMemberRoleSerializer,
|
||||||
)
|
)
|
||||||
from .state import StateSerializer, StateLiteSerializer
|
from .state import StateSerializer, StateLiteSerializer
|
||||||
from .view import GlobalViewSerializer, IssueViewSerializer, IssueViewFavoriteSerializer
|
from .view import (
|
||||||
|
GlobalViewSerializer,
|
||||||
|
IssueViewSerializer,
|
||||||
|
IssueViewFavoriteSerializer,
|
||||||
|
)
|
||||||
from .cycle import (
|
from .cycle import (
|
||||||
CycleSerializer,
|
CycleSerializer,
|
||||||
CycleIssueSerializer,
|
CycleIssueSerializer,
|
||||||
CycleFavoriteSerializer,
|
CycleFavoriteSerializer,
|
||||||
CycleWriteSerializer,
|
CycleWriteSerializer,
|
||||||
|
CycleUserPropertiesSerializer,
|
||||||
)
|
)
|
||||||
from .asset import FileAssetSerializer
|
from .asset import FileAssetSerializer
|
||||||
from .issue import (
|
from .issue import (
|
||||||
@ -52,6 +59,7 @@ from .issue import (
|
|||||||
IssueFlatSerializer,
|
IssueFlatSerializer,
|
||||||
IssueStateSerializer,
|
IssueStateSerializer,
|
||||||
IssueLinkSerializer,
|
IssueLinkSerializer,
|
||||||
|
IssueInboxSerializer,
|
||||||
IssueLiteSerializer,
|
IssueLiteSerializer,
|
||||||
IssueAttachmentSerializer,
|
IssueAttachmentSerializer,
|
||||||
IssueSubscriberSerializer,
|
IssueSubscriberSerializer,
|
||||||
@ -61,44 +69,57 @@ from .issue import (
|
|||||||
IssueRelationSerializer,
|
IssueRelationSerializer,
|
||||||
RelatedIssueSerializer,
|
RelatedIssueSerializer,
|
||||||
IssuePublicSerializer,
|
IssuePublicSerializer,
|
||||||
|
IssueDetailSerializer,
|
||||||
|
IssueReactionLiteSerializer,
|
||||||
|
IssueAttachmentLiteSerializer,
|
||||||
|
IssueLinkLiteSerializer,
|
||||||
)
|
)
|
||||||
|
|
||||||
from .module import (
|
from .module import (
|
||||||
|
ModuleDetailSerializer,
|
||||||
ModuleWriteSerializer,
|
ModuleWriteSerializer,
|
||||||
ModuleSerializer,
|
ModuleSerializer,
|
||||||
ModuleIssueSerializer,
|
ModuleIssueSerializer,
|
||||||
ModuleLinkSerializer,
|
ModuleLinkSerializer,
|
||||||
ModuleFavoriteSerializer,
|
ModuleFavoriteSerializer,
|
||||||
|
ModuleUserPropertiesSerializer,
|
||||||
)
|
)
|
||||||
|
|
||||||
from .api import APITokenSerializer, APITokenReadSerializer
|
from .api import APITokenSerializer, APITokenReadSerializer
|
||||||
|
|
||||||
from .integration import (
|
|
||||||
IntegrationSerializer,
|
|
||||||
WorkspaceIntegrationSerializer,
|
|
||||||
GithubIssueSyncSerializer,
|
|
||||||
GithubRepositorySerializer,
|
|
||||||
GithubRepositorySyncSerializer,
|
|
||||||
GithubCommentSyncSerializer,
|
|
||||||
SlackProjectSyncSerializer,
|
|
||||||
)
|
|
||||||
|
|
||||||
from .importer import ImporterSerializer
|
from .importer import ImporterSerializer
|
||||||
|
|
||||||
from .page import PageSerializer, PageLogSerializer, SubPageSerializer, PageFavoriteSerializer
|
from .page import (
|
||||||
|
PageSerializer,
|
||||||
|
PageLogSerializer,
|
||||||
|
SubPageSerializer,
|
||||||
|
PageFavoriteSerializer,
|
||||||
|
)
|
||||||
|
|
||||||
from .estimate import (
|
from .estimate import (
|
||||||
EstimateSerializer,
|
EstimateSerializer,
|
||||||
EstimatePointSerializer,
|
EstimatePointSerializer,
|
||||||
EstimateReadSerializer,
|
EstimateReadSerializer,
|
||||||
|
WorkspaceEstimateSerializer,
|
||||||
)
|
)
|
||||||
|
|
||||||
from .inbox import InboxSerializer, InboxIssueSerializer, IssueStateInboxSerializer
|
from .inbox import (
|
||||||
|
InboxSerializer,
|
||||||
|
InboxIssueSerializer,
|
||||||
|
IssueStateInboxSerializer,
|
||||||
|
InboxIssueLiteSerializer,
|
||||||
|
InboxIssueDetailSerializer,
|
||||||
|
)
|
||||||
|
|
||||||
from .analytic import AnalyticViewSerializer
|
from .analytic import AnalyticViewSerializer
|
||||||
|
|
||||||
from .notification import NotificationSerializer
|
from .notification import (
|
||||||
|
NotificationSerializer,
|
||||||
|
UserNotificationPreferenceSerializer,
|
||||||
|
)
|
||||||
|
|
||||||
from .exporter import ExporterHistorySerializer
|
from .exporter import ExporterHistorySerializer
|
||||||
|
|
||||||
from .webhook import WebhookSerializer, WebhookLogSerializer
|
from .webhook import WebhookSerializer, WebhookLogSerializer
|
||||||
|
|
||||||
|
from .dashboard import DashboardSerializer, WidgetSerializer
|
||||||
|
@ -3,7 +3,6 @@ from plane.db.models import APIToken, APIActivityLog
|
|||||||
|
|
||||||
|
|
||||||
class APITokenSerializer(BaseSerializer):
|
class APITokenSerializer(BaseSerializer):
|
||||||
|
|
||||||
class Meta:
|
class Meta:
|
||||||
model = APIToken
|
model = APIToken
|
||||||
fields = "__all__"
|
fields = "__all__"
|
||||||
@ -18,14 +17,12 @@ class APITokenSerializer(BaseSerializer):
|
|||||||
|
|
||||||
|
|
||||||
class APITokenReadSerializer(BaseSerializer):
|
class APITokenReadSerializer(BaseSerializer):
|
||||||
|
|
||||||
class Meta:
|
class Meta:
|
||||||
model = APIToken
|
model = APIToken
|
||||||
exclude = ('token',)
|
exclude = ("token",)
|
||||||
|
|
||||||
|
|
||||||
class APIActivityLogSerializer(BaseSerializer):
|
class APIActivityLogSerializer(BaseSerializer):
|
||||||
|
|
||||||
class Meta:
|
class Meta:
|
||||||
model = APIActivityLog
|
model = APIActivityLog
|
||||||
fields = "__all__"
|
fields = "__all__"
|
||||||
|
@ -4,16 +4,17 @@ from rest_framework import serializers
|
|||||||
class BaseSerializer(serializers.ModelSerializer):
|
class BaseSerializer(serializers.ModelSerializer):
|
||||||
id = serializers.PrimaryKeyRelatedField(read_only=True)
|
id = serializers.PrimaryKeyRelatedField(read_only=True)
|
||||||
|
|
||||||
class DynamicBaseSerializer(BaseSerializer):
|
|
||||||
|
|
||||||
|
class DynamicBaseSerializer(BaseSerializer):
|
||||||
def __init__(self, *args, **kwargs):
|
def __init__(self, *args, **kwargs):
|
||||||
# If 'fields' is provided in the arguments, remove it and store it separately.
|
# If 'fields' is provided in the arguments, remove it and store it separately.
|
||||||
# This is done so as not to pass this custom argument up to the superclass.
|
# This is done so as not to pass this custom argument up to the superclass.
|
||||||
fields = kwargs.pop("fields", None)
|
fields = kwargs.pop("fields", [])
|
||||||
|
self.expand = kwargs.pop("expand", []) or []
|
||||||
|
fields = self.expand
|
||||||
|
|
||||||
# Call the initialization of the superclass.
|
# Call the initialization of the superclass.
|
||||||
super().__init__(*args, **kwargs)
|
super().__init__(*args, **kwargs)
|
||||||
|
|
||||||
# If 'fields' was provided, filter the fields of the serializer accordingly.
|
# If 'fields' was provided, filter the fields of the serializer accordingly.
|
||||||
if fields is not None:
|
if fields is not None:
|
||||||
self.fields = self._filter_fields(fields)
|
self.fields = self._filter_fields(fields)
|
||||||
@ -47,12 +48,134 @@ class DynamicBaseSerializer(BaseSerializer):
|
|||||||
elif isinstance(item, dict):
|
elif isinstance(item, dict):
|
||||||
allowed.append(list(item.keys())[0])
|
allowed.append(list(item.keys())[0])
|
||||||
|
|
||||||
# Convert the current serializer's fields and the allowed fields to sets.
|
for field in allowed:
|
||||||
existing = set(self.fields)
|
if field not in self.fields:
|
||||||
allowed = set(allowed)
|
from . import (
|
||||||
|
WorkspaceLiteSerializer,
|
||||||
|
ProjectLiteSerializer,
|
||||||
|
UserLiteSerializer,
|
||||||
|
StateLiteSerializer,
|
||||||
|
IssueSerializer,
|
||||||
|
LabelSerializer,
|
||||||
|
CycleIssueSerializer,
|
||||||
|
IssueLiteSerializer,
|
||||||
|
IssueRelationSerializer,
|
||||||
|
InboxIssueLiteSerializer,
|
||||||
|
IssueReactionLiteSerializer,
|
||||||
|
IssueAttachmentLiteSerializer,
|
||||||
|
IssueLinkLiteSerializer,
|
||||||
|
)
|
||||||
|
|
||||||
# Remove fields from the serializer that aren't in the 'allowed' list.
|
# Expansion mapper
|
||||||
for field_name in (existing - allowed):
|
expansion = {
|
||||||
self.fields.pop(field_name)
|
"user": UserLiteSerializer,
|
||||||
|
"workspace": WorkspaceLiteSerializer,
|
||||||
|
"project": ProjectLiteSerializer,
|
||||||
|
"default_assignee": UserLiteSerializer,
|
||||||
|
"project_lead": UserLiteSerializer,
|
||||||
|
"state": StateLiteSerializer,
|
||||||
|
"created_by": UserLiteSerializer,
|
||||||
|
"issue": IssueSerializer,
|
||||||
|
"actor": UserLiteSerializer,
|
||||||
|
"owned_by": UserLiteSerializer,
|
||||||
|
"members": UserLiteSerializer,
|
||||||
|
"assignees": UserLiteSerializer,
|
||||||
|
"labels": LabelSerializer,
|
||||||
|
"issue_cycle": CycleIssueSerializer,
|
||||||
|
"parent": IssueLiteSerializer,
|
||||||
|
"issue_relation": IssueRelationSerializer,
|
||||||
|
"issue_inbox": InboxIssueLiteSerializer,
|
||||||
|
"issue_reactions": IssueReactionLiteSerializer,
|
||||||
|
"issue_attachment": IssueAttachmentLiteSerializer,
|
||||||
|
"issue_link": IssueLinkLiteSerializer,
|
||||||
|
"sub_issues": IssueLiteSerializer,
|
||||||
|
}
|
||||||
|
|
||||||
|
self.fields[field] = expansion[field](
|
||||||
|
many=(
|
||||||
|
True
|
||||||
|
if field
|
||||||
|
in [
|
||||||
|
"members",
|
||||||
|
"assignees",
|
||||||
|
"labels",
|
||||||
|
"issue_cycle",
|
||||||
|
"issue_relation",
|
||||||
|
"issue_inbox",
|
||||||
|
"issue_reactions",
|
||||||
|
"issue_attachment",
|
||||||
|
"issue_link",
|
||||||
|
"sub_issues",
|
||||||
|
]
|
||||||
|
else False
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
return self.fields
|
return self.fields
|
||||||
|
|
||||||
|
def to_representation(self, instance):
|
||||||
|
response = super().to_representation(instance)
|
||||||
|
|
||||||
|
# Ensure 'expand' is iterable before processing
|
||||||
|
if self.expand:
|
||||||
|
for expand in self.expand:
|
||||||
|
if expand in self.fields:
|
||||||
|
# Import all the expandable serializers
|
||||||
|
from . import (
|
||||||
|
WorkspaceLiteSerializer,
|
||||||
|
ProjectLiteSerializer,
|
||||||
|
UserLiteSerializer,
|
||||||
|
StateLiteSerializer,
|
||||||
|
IssueSerializer,
|
||||||
|
LabelSerializer,
|
||||||
|
CycleIssueSerializer,
|
||||||
|
IssueRelationSerializer,
|
||||||
|
InboxIssueLiteSerializer,
|
||||||
|
IssueLiteSerializer,
|
||||||
|
IssueReactionLiteSerializer,
|
||||||
|
IssueAttachmentLiteSerializer,
|
||||||
|
IssueLinkLiteSerializer,
|
||||||
|
)
|
||||||
|
|
||||||
|
# Expansion mapper
|
||||||
|
expansion = {
|
||||||
|
"user": UserLiteSerializer,
|
||||||
|
"workspace": WorkspaceLiteSerializer,
|
||||||
|
"project": ProjectLiteSerializer,
|
||||||
|
"default_assignee": UserLiteSerializer,
|
||||||
|
"project_lead": UserLiteSerializer,
|
||||||
|
"state": StateLiteSerializer,
|
||||||
|
"created_by": UserLiteSerializer,
|
||||||
|
"issue": IssueSerializer,
|
||||||
|
"actor": UserLiteSerializer,
|
||||||
|
"owned_by": UserLiteSerializer,
|
||||||
|
"members": UserLiteSerializer,
|
||||||
|
"assignees": UserLiteSerializer,
|
||||||
|
"labels": LabelSerializer,
|
||||||
|
"issue_cycle": CycleIssueSerializer,
|
||||||
|
"parent": IssueLiteSerializer,
|
||||||
|
"issue_relation": IssueRelationSerializer,
|
||||||
|
"issue_inbox": InboxIssueLiteSerializer,
|
||||||
|
"issue_reactions": IssueReactionLiteSerializer,
|
||||||
|
"issue_attachment": IssueAttachmentLiteSerializer,
|
||||||
|
"issue_link": IssueLinkLiteSerializer,
|
||||||
|
"sub_issues": IssueLiteSerializer,
|
||||||
|
}
|
||||||
|
# Check if field in expansion then expand the field
|
||||||
|
if expand in expansion:
|
||||||
|
if isinstance(response.get(expand), list):
|
||||||
|
exp_serializer = expansion[expand](
|
||||||
|
getattr(instance, expand), many=True
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
exp_serializer = expansion[expand](
|
||||||
|
getattr(instance, expand)
|
||||||
|
)
|
||||||
|
response[expand] = exp_serializer.data
|
||||||
|
else:
|
||||||
|
# You might need to handle this case differently
|
||||||
|
response[expand] = getattr(
|
||||||
|
instance, f"{expand}_id", None
|
||||||
|
)
|
||||||
|
|
||||||
|
return response
|
||||||
|
@ -3,11 +3,13 @@ from rest_framework import serializers
|
|||||||
|
|
||||||
# Module imports
|
# Module imports
|
||||||
from .base import BaseSerializer
|
from .base import BaseSerializer
|
||||||
from .user import UserLiteSerializer
|
|
||||||
from .issue import IssueStateSerializer
|
from .issue import IssueStateSerializer
|
||||||
from .workspace import WorkspaceLiteSerializer
|
from plane.db.models import (
|
||||||
from .project import ProjectLiteSerializer
|
Cycle,
|
||||||
from plane.db.models import Cycle, CycleIssue, CycleFavorite
|
CycleIssue,
|
||||||
|
CycleFavorite,
|
||||||
|
CycleUserProperties,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
class CycleWriteSerializer(BaseSerializer):
|
class CycleWriteSerializer(BaseSerializer):
|
||||||
@ -17,69 +19,67 @@ class CycleWriteSerializer(BaseSerializer):
|
|||||||
and data.get("end_date", None) is not None
|
and data.get("end_date", None) is not None
|
||||||
and data.get("start_date", None) > data.get("end_date", None)
|
and data.get("start_date", None) > data.get("end_date", None)
|
||||||
):
|
):
|
||||||
raise serializers.ValidationError("Start date cannot exceed end date")
|
raise serializers.ValidationError(
|
||||||
|
"Start date cannot exceed end date"
|
||||||
|
)
|
||||||
return data
|
return data
|
||||||
|
|
||||||
class Meta:
|
class Meta:
|
||||||
model = Cycle
|
model = Cycle
|
||||||
fields = "__all__"
|
fields = "__all__"
|
||||||
|
|
||||||
|
|
||||||
class CycleSerializer(BaseSerializer):
|
|
||||||
owned_by = UserLiteSerializer(read_only=True)
|
|
||||||
is_favorite = serializers.BooleanField(read_only=True)
|
|
||||||
total_issues = serializers.IntegerField(read_only=True)
|
|
||||||
cancelled_issues = serializers.IntegerField(read_only=True)
|
|
||||||
completed_issues = serializers.IntegerField(read_only=True)
|
|
||||||
started_issues = serializers.IntegerField(read_only=True)
|
|
||||||
unstarted_issues = serializers.IntegerField(read_only=True)
|
|
||||||
backlog_issues = serializers.IntegerField(read_only=True)
|
|
||||||
assignees = serializers.SerializerMethodField(read_only=True)
|
|
||||||
total_estimates = serializers.IntegerField(read_only=True)
|
|
||||||
completed_estimates = serializers.IntegerField(read_only=True)
|
|
||||||
started_estimates = serializers.IntegerField(read_only=True)
|
|
||||||
workspace_detail = WorkspaceLiteSerializer(read_only=True, source="workspace")
|
|
||||||
project_detail = ProjectLiteSerializer(read_only=True, source="project")
|
|
||||||
|
|
||||||
def validate(self, data):
|
|
||||||
if (
|
|
||||||
data.get("start_date", None) is not None
|
|
||||||
and data.get("end_date", None) is not None
|
|
||||||
and data.get("start_date", None) > data.get("end_date", None)
|
|
||||||
):
|
|
||||||
raise serializers.ValidationError("Start date cannot exceed end date")
|
|
||||||
return data
|
|
||||||
|
|
||||||
def get_assignees(self, obj):
|
|
||||||
members = [
|
|
||||||
{
|
|
||||||
"avatar": assignee.avatar,
|
|
||||||
"display_name": assignee.display_name,
|
|
||||||
"id": assignee.id,
|
|
||||||
}
|
|
||||||
for issue_cycle in obj.issue_cycle.prefetch_related(
|
|
||||||
"issue__assignees"
|
|
||||||
).all()
|
|
||||||
for assignee in issue_cycle.issue.assignees.all()
|
|
||||||
]
|
|
||||||
# Use a set comprehension to return only the unique objects
|
|
||||||
unique_objects = {frozenset(item.items()) for item in members}
|
|
||||||
|
|
||||||
# Convert the set back to a list of dictionaries
|
|
||||||
unique_list = [dict(item) for item in unique_objects]
|
|
||||||
|
|
||||||
return unique_list
|
|
||||||
|
|
||||||
class Meta:
|
|
||||||
model = Cycle
|
|
||||||
fields = "__all__"
|
|
||||||
read_only_fields = [
|
read_only_fields = [
|
||||||
"workspace",
|
"workspace",
|
||||||
"project",
|
"project",
|
||||||
"owned_by",
|
"owned_by",
|
||||||
|
"archived_at",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
|
||||||
|
class CycleSerializer(BaseSerializer):
|
||||||
|
# favorite
|
||||||
|
is_favorite = serializers.BooleanField(read_only=True)
|
||||||
|
total_issues = serializers.IntegerField(read_only=True)
|
||||||
|
# state group wise distribution
|
||||||
|
cancelled_issues = serializers.IntegerField(read_only=True)
|
||||||
|
completed_issues = serializers.IntegerField(read_only=True)
|
||||||
|
started_issues = serializers.IntegerField(read_only=True)
|
||||||
|
unstarted_issues = serializers.IntegerField(read_only=True)
|
||||||
|
backlog_issues = serializers.IntegerField(read_only=True)
|
||||||
|
|
||||||
|
# active | draft | upcoming | completed
|
||||||
|
status = serializers.CharField(read_only=True)
|
||||||
|
|
||||||
|
class Meta:
|
||||||
|
model = Cycle
|
||||||
|
fields = [
|
||||||
|
# necessary fields
|
||||||
|
"id",
|
||||||
|
"workspace_id",
|
||||||
|
"project_id",
|
||||||
|
# model fields
|
||||||
|
"name",
|
||||||
|
"description",
|
||||||
|
"start_date",
|
||||||
|
"end_date",
|
||||||
|
"owned_by_id",
|
||||||
|
"view_props",
|
||||||
|
"sort_order",
|
||||||
|
"external_source",
|
||||||
|
"external_id",
|
||||||
|
"progress_snapshot",
|
||||||
|
# meta fields
|
||||||
|
"is_favorite",
|
||||||
|
"total_issues",
|
||||||
|
"cancelled_issues",
|
||||||
|
"completed_issues",
|
||||||
|
"started_issues",
|
||||||
|
"unstarted_issues",
|
||||||
|
"backlog_issues",
|
||||||
|
"status",
|
||||||
|
]
|
||||||
|
read_only_fields = fields
|
||||||
|
|
||||||
|
|
||||||
class CycleIssueSerializer(BaseSerializer):
|
class CycleIssueSerializer(BaseSerializer):
|
||||||
issue_detail = IssueStateSerializer(read_only=True, source="issue")
|
issue_detail = IssueStateSerializer(read_only=True, source="issue")
|
||||||
sub_issues_count = serializers.IntegerField(read_only=True)
|
sub_issues_count = serializers.IntegerField(read_only=True)
|
||||||
@ -105,3 +105,14 @@ class CycleFavoriteSerializer(BaseSerializer):
|
|||||||
"project",
|
"project",
|
||||||
"user",
|
"user",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
|
||||||
|
class CycleUserPropertiesSerializer(BaseSerializer):
|
||||||
|
class Meta:
|
||||||
|
model = CycleUserProperties
|
||||||
|
fields = "__all__"
|
||||||
|
read_only_fields = [
|
||||||
|
"workspace",
|
||||||
|
"project",
|
||||||
|
"cycle" "user",
|
||||||
|
]
|
||||||
|
21
apiserver/plane/app/serializers/dashboard.py
Normal file
21
apiserver/plane/app/serializers/dashboard.py
Normal file
@ -0,0 +1,21 @@
|
|||||||
|
# Module imports
|
||||||
|
from .base import BaseSerializer
|
||||||
|
from plane.db.models import Dashboard, Widget
|
||||||
|
|
||||||
|
# Third party frameworks
|
||||||
|
from rest_framework import serializers
|
||||||
|
|
||||||
|
|
||||||
|
class DashboardSerializer(BaseSerializer):
|
||||||
|
class Meta:
|
||||||
|
model = Dashboard
|
||||||
|
fields = "__all__"
|
||||||
|
|
||||||
|
|
||||||
|
class WidgetSerializer(BaseSerializer):
|
||||||
|
is_visible = serializers.BooleanField(read_only=True)
|
||||||
|
widget_filters = serializers.JSONField(read_only=True)
|
||||||
|
|
||||||
|
class Meta:
|
||||||
|
model = Widget
|
||||||
|
fields = ["id", "key", "is_visible", "widget_filters"]
|
@ -2,11 +2,18 @@
|
|||||||
from .base import BaseSerializer
|
from .base import BaseSerializer
|
||||||
|
|
||||||
from plane.db.models import Estimate, EstimatePoint
|
from plane.db.models import Estimate, EstimatePoint
|
||||||
from plane.app.serializers import WorkspaceLiteSerializer, ProjectLiteSerializer
|
from plane.app.serializers import (
|
||||||
|
WorkspaceLiteSerializer,
|
||||||
|
ProjectLiteSerializer,
|
||||||
|
)
|
||||||
|
|
||||||
|
from rest_framework import serializers
|
||||||
|
|
||||||
|
|
||||||
class EstimateSerializer(BaseSerializer):
|
class EstimateSerializer(BaseSerializer):
|
||||||
workspace_detail = WorkspaceLiteSerializer(read_only=True, source="workspace")
|
workspace_detail = WorkspaceLiteSerializer(
|
||||||
|
read_only=True, source="workspace"
|
||||||
|
)
|
||||||
project_detail = ProjectLiteSerializer(read_only=True, source="project")
|
project_detail = ProjectLiteSerializer(read_only=True, source="project")
|
||||||
|
|
||||||
class Meta:
|
class Meta:
|
||||||
@ -19,6 +26,16 @@ class EstimateSerializer(BaseSerializer):
|
|||||||
|
|
||||||
|
|
||||||
class EstimatePointSerializer(BaseSerializer):
|
class EstimatePointSerializer(BaseSerializer):
|
||||||
|
def validate(self, data):
|
||||||
|
if not data:
|
||||||
|
raise serializers.ValidationError("Estimate points are required")
|
||||||
|
value = data.get("value")
|
||||||
|
if value and len(value) > 20:
|
||||||
|
raise serializers.ValidationError(
|
||||||
|
"Value can't be more than 20 characters"
|
||||||
|
)
|
||||||
|
return data
|
||||||
|
|
||||||
class Meta:
|
class Meta:
|
||||||
model = EstimatePoint
|
model = EstimatePoint
|
||||||
fields = "__all__"
|
fields = "__all__"
|
||||||
@ -31,7 +48,9 @@ class EstimatePointSerializer(BaseSerializer):
|
|||||||
|
|
||||||
class EstimateReadSerializer(BaseSerializer):
|
class EstimateReadSerializer(BaseSerializer):
|
||||||
points = EstimatePointSerializer(read_only=True, many=True)
|
points = EstimatePointSerializer(read_only=True, many=True)
|
||||||
workspace_detail = WorkspaceLiteSerializer(read_only=True, source="workspace")
|
workspace_detail = WorkspaceLiteSerializer(
|
||||||
|
read_only=True, source="workspace"
|
||||||
|
)
|
||||||
project_detail = ProjectLiteSerializer(read_only=True, source="project")
|
project_detail = ProjectLiteSerializer(read_only=True, source="project")
|
||||||
|
|
||||||
class Meta:
|
class Meta:
|
||||||
@ -42,3 +61,16 @@ class EstimateReadSerializer(BaseSerializer):
|
|||||||
"name",
|
"name",
|
||||||
"description",
|
"description",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
|
||||||
|
class WorkspaceEstimateSerializer(BaseSerializer):
|
||||||
|
points = EstimatePointSerializer(read_only=True, many=True)
|
||||||
|
|
||||||
|
class Meta:
|
||||||
|
model = Estimate
|
||||||
|
fields = "__all__"
|
||||||
|
read_only_fields = [
|
||||||
|
"points",
|
||||||
|
"name",
|
||||||
|
"description",
|
||||||
|
]
|
||||||
|
@ -5,7 +5,9 @@ from .user import UserLiteSerializer
|
|||||||
|
|
||||||
|
|
||||||
class ExporterHistorySerializer(BaseSerializer):
|
class ExporterHistorySerializer(BaseSerializer):
|
||||||
initiated_by_detail = UserLiteSerializer(source="initiated_by", read_only=True)
|
initiated_by_detail = UserLiteSerializer(
|
||||||
|
source="initiated_by", read_only=True
|
||||||
|
)
|
||||||
|
|
||||||
class Meta:
|
class Meta:
|
||||||
model = ExporterHistory
|
model = ExporterHistory
|
||||||
|
@ -7,9 +7,13 @@ from plane.db.models import Importer
|
|||||||
|
|
||||||
|
|
||||||
class ImporterSerializer(BaseSerializer):
|
class ImporterSerializer(BaseSerializer):
|
||||||
initiated_by_detail = UserLiteSerializer(source="initiated_by", read_only=True)
|
initiated_by_detail = UserLiteSerializer(
|
||||||
|
source="initiated_by", read_only=True
|
||||||
|
)
|
||||||
project_detail = ProjectLiteSerializer(source="project", read_only=True)
|
project_detail = ProjectLiteSerializer(source="project", read_only=True)
|
||||||
workspace_detail = WorkspaceLiteSerializer(source="workspace", read_only=True)
|
workspace_detail = WorkspaceLiteSerializer(
|
||||||
|
source="workspace", read_only=True
|
||||||
|
)
|
||||||
|
|
||||||
class Meta:
|
class Meta:
|
||||||
model = Importer
|
model = Importer
|
||||||
|
@ -3,7 +3,11 @@ from rest_framework import serializers
|
|||||||
|
|
||||||
# Module imports
|
# Module imports
|
||||||
from .base import BaseSerializer
|
from .base import BaseSerializer
|
||||||
from .issue import IssueFlatSerializer, LabelLiteSerializer
|
from .issue import (
|
||||||
|
IssueInboxSerializer,
|
||||||
|
LabelLiteSerializer,
|
||||||
|
IssueDetailSerializer,
|
||||||
|
)
|
||||||
from .project import ProjectLiteSerializer
|
from .project import ProjectLiteSerializer
|
||||||
from .state import StateLiteSerializer
|
from .state import StateLiteSerializer
|
||||||
from .user import UserLiteSerializer
|
from .user import UserLiteSerializer
|
||||||
@ -24,17 +28,62 @@ class InboxSerializer(BaseSerializer):
|
|||||||
|
|
||||||
|
|
||||||
class InboxIssueSerializer(BaseSerializer):
|
class InboxIssueSerializer(BaseSerializer):
|
||||||
issue_detail = IssueFlatSerializer(source="issue", read_only=True)
|
issue = IssueInboxSerializer(read_only=True)
|
||||||
project_detail = ProjectLiteSerializer(source="project", read_only=True)
|
|
||||||
|
|
||||||
class Meta:
|
class Meta:
|
||||||
model = InboxIssue
|
model = InboxIssue
|
||||||
fields = "__all__"
|
fields = [
|
||||||
|
"id",
|
||||||
|
"status",
|
||||||
|
"duplicate_to",
|
||||||
|
"snoozed_till",
|
||||||
|
"source",
|
||||||
|
"issue",
|
||||||
|
"created_by",
|
||||||
|
]
|
||||||
read_only_fields = [
|
read_only_fields = [
|
||||||
"project",
|
"project",
|
||||||
"workspace",
|
"workspace",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
def to_representation(self, instance):
|
||||||
|
# Pass the annotated fields to the Issue instance if they exist
|
||||||
|
if hasattr(instance, "label_ids"):
|
||||||
|
instance.issue.label_ids = instance.label_ids
|
||||||
|
return super().to_representation(instance)
|
||||||
|
|
||||||
|
|
||||||
|
class InboxIssueDetailSerializer(BaseSerializer):
|
||||||
|
issue = IssueDetailSerializer(read_only=True)
|
||||||
|
duplicate_issue_detail = IssueInboxSerializer(
|
||||||
|
read_only=True, source="duplicate_to"
|
||||||
|
)
|
||||||
|
|
||||||
|
class Meta:
|
||||||
|
model = InboxIssue
|
||||||
|
fields = [
|
||||||
|
"id",
|
||||||
|
"status",
|
||||||
|
"duplicate_to",
|
||||||
|
"snoozed_till",
|
||||||
|
"duplicate_issue_detail",
|
||||||
|
"source",
|
||||||
|
"issue",
|
||||||
|
]
|
||||||
|
read_only_fields = [
|
||||||
|
"project",
|
||||||
|
"workspace",
|
||||||
|
]
|
||||||
|
|
||||||
|
def to_representation(self, instance):
|
||||||
|
# Pass the annotated fields to the Issue instance if they exist
|
||||||
|
if hasattr(instance, "assignee_ids"):
|
||||||
|
instance.issue.assignee_ids = instance.assignee_ids
|
||||||
|
if hasattr(instance, "label_ids"):
|
||||||
|
instance.issue.label_ids = instance.label_ids
|
||||||
|
|
||||||
|
return super().to_representation(instance)
|
||||||
|
|
||||||
|
|
||||||
class InboxIssueLiteSerializer(BaseSerializer):
|
class InboxIssueLiteSerializer(BaseSerializer):
|
||||||
class Meta:
|
class Meta:
|
||||||
@ -46,10 +95,13 @@ class InboxIssueLiteSerializer(BaseSerializer):
|
|||||||
class IssueStateInboxSerializer(BaseSerializer):
|
class IssueStateInboxSerializer(BaseSerializer):
|
||||||
state_detail = StateLiteSerializer(read_only=True, source="state")
|
state_detail = StateLiteSerializer(read_only=True, source="state")
|
||||||
project_detail = ProjectLiteSerializer(read_only=True, source="project")
|
project_detail = ProjectLiteSerializer(read_only=True, source="project")
|
||||||
label_details = LabelLiteSerializer(read_only=True, source="labels", many=True)
|
label_details = LabelLiteSerializer(
|
||||||
assignee_details = UserLiteSerializer(read_only=True, source="assignees", many=True)
|
read_only=True, source="labels", many=True
|
||||||
|
)
|
||||||
|
assignee_details = UserLiteSerializer(
|
||||||
|
read_only=True, source="assignees", many=True
|
||||||
|
)
|
||||||
sub_issues_count = serializers.IntegerField(read_only=True)
|
sub_issues_count = serializers.IntegerField(read_only=True)
|
||||||
bridge_id = serializers.UUIDField(read_only=True)
|
|
||||||
issue_inbox = InboxIssueLiteSerializer(read_only=True, many=True)
|
issue_inbox = InboxIssueLiteSerializer(read_only=True, many=True)
|
||||||
|
|
||||||
class Meta:
|
class Meta:
|
||||||
|
@ -1,8 +0,0 @@
|
|||||||
from .base import IntegrationSerializer, WorkspaceIntegrationSerializer
|
|
||||||
from .github import (
|
|
||||||
GithubRepositorySerializer,
|
|
||||||
GithubRepositorySyncSerializer,
|
|
||||||
GithubIssueSyncSerializer,
|
|
||||||
GithubCommentSyncSerializer,
|
|
||||||
)
|
|
||||||
from .slack import SlackProjectSyncSerializer
|
|
@ -1,20 +0,0 @@
|
|||||||
# Module imports
|
|
||||||
from plane.app.serializers import BaseSerializer
|
|
||||||
from plane.db.models import Integration, WorkspaceIntegration
|
|
||||||
|
|
||||||
|
|
||||||
class IntegrationSerializer(BaseSerializer):
|
|
||||||
class Meta:
|
|
||||||
model = Integration
|
|
||||||
fields = "__all__"
|
|
||||||
read_only_fields = [
|
|
||||||
"verified",
|
|
||||||
]
|
|
||||||
|
|
||||||
|
|
||||||
class WorkspaceIntegrationSerializer(BaseSerializer):
|
|
||||||
integration_detail = IntegrationSerializer(read_only=True, source="integration")
|
|
||||||
|
|
||||||
class Meta:
|
|
||||||
model = WorkspaceIntegration
|
|
||||||
fields = "__all__"
|
|
@ -1,45 +0,0 @@
|
|||||||
# Module imports
|
|
||||||
from plane.app.serializers import BaseSerializer
|
|
||||||
from plane.db.models import (
|
|
||||||
GithubIssueSync,
|
|
||||||
GithubRepository,
|
|
||||||
GithubRepositorySync,
|
|
||||||
GithubCommentSync,
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
class GithubRepositorySerializer(BaseSerializer):
|
|
||||||
class Meta:
|
|
||||||
model = GithubRepository
|
|
||||||
fields = "__all__"
|
|
||||||
|
|
||||||
|
|
||||||
class GithubRepositorySyncSerializer(BaseSerializer):
|
|
||||||
repo_detail = GithubRepositorySerializer(source="repository")
|
|
||||||
|
|
||||||
class Meta:
|
|
||||||
model = GithubRepositorySync
|
|
||||||
fields = "__all__"
|
|
||||||
|
|
||||||
|
|
||||||
class GithubIssueSyncSerializer(BaseSerializer):
|
|
||||||
class Meta:
|
|
||||||
model = GithubIssueSync
|
|
||||||
fields = "__all__"
|
|
||||||
read_only_fields = [
|
|
||||||
"project",
|
|
||||||
"workspace",
|
|
||||||
"repository_sync",
|
|
||||||
]
|
|
||||||
|
|
||||||
|
|
||||||
class GithubCommentSyncSerializer(BaseSerializer):
|
|
||||||
class Meta:
|
|
||||||
model = GithubCommentSync
|
|
||||||
fields = "__all__"
|
|
||||||
read_only_fields = [
|
|
||||||
"project",
|
|
||||||
"workspace",
|
|
||||||
"repository_sync",
|
|
||||||
"issue_sync",
|
|
||||||
]
|
|
@ -1,14 +0,0 @@
|
|||||||
# Module imports
|
|
||||||
from plane.app.serializers import BaseSerializer
|
|
||||||
from plane.db.models import SlackProjectSync
|
|
||||||
|
|
||||||
|
|
||||||
class SlackProjectSyncSerializer(BaseSerializer):
|
|
||||||
class Meta:
|
|
||||||
model = SlackProjectSync
|
|
||||||
fields = "__all__"
|
|
||||||
read_only_fields = [
|
|
||||||
"project",
|
|
||||||
"workspace",
|
|
||||||
"workspace_integration",
|
|
||||||
]
|
|
@ -1,5 +1,7 @@
|
|||||||
# Django imports
|
# Django imports
|
||||||
from django.utils import timezone
|
from django.utils import timezone
|
||||||
|
from django.core.validators import URLValidator
|
||||||
|
from django.core.exceptions import ValidationError
|
||||||
|
|
||||||
# Third Party imports
|
# Third Party imports
|
||||||
from rest_framework import serializers
|
from rest_framework import serializers
|
||||||
@ -7,7 +9,7 @@ from rest_framework import serializers
|
|||||||
# Module imports
|
# Module imports
|
||||||
from .base import BaseSerializer, DynamicBaseSerializer
|
from .base import BaseSerializer, DynamicBaseSerializer
|
||||||
from .user import UserLiteSerializer
|
from .user import UserLiteSerializer
|
||||||
from .state import StateSerializer, StateLiteSerializer
|
from .state import StateLiteSerializer
|
||||||
from .project import ProjectLiteSerializer
|
from .project import ProjectLiteSerializer
|
||||||
from .workspace import WorkspaceLiteSerializer
|
from .workspace import WorkspaceLiteSerializer
|
||||||
from plane.db.models import (
|
from plane.db.models import (
|
||||||
@ -30,6 +32,7 @@ from plane.db.models import (
|
|||||||
CommentReaction,
|
CommentReaction,
|
||||||
IssueVote,
|
IssueVote,
|
||||||
IssueRelation,
|
IssueRelation,
|
||||||
|
State,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
@ -69,19 +72,26 @@ class IssueProjectLiteSerializer(BaseSerializer):
|
|||||||
##TODO: Find a better way to write this serializer
|
##TODO: Find a better way to write this serializer
|
||||||
## Find a better approach to save manytomany?
|
## Find a better approach to save manytomany?
|
||||||
class IssueCreateSerializer(BaseSerializer):
|
class IssueCreateSerializer(BaseSerializer):
|
||||||
state_detail = StateSerializer(read_only=True, source="state")
|
# ids
|
||||||
created_by_detail = UserLiteSerializer(read_only=True, source="created_by")
|
state_id = serializers.PrimaryKeyRelatedField(
|
||||||
project_detail = ProjectLiteSerializer(read_only=True, source="project")
|
source="state",
|
||||||
workspace_detail = WorkspaceLiteSerializer(read_only=True, source="workspace")
|
queryset=State.objects.all(),
|
||||||
|
required=False,
|
||||||
assignees = serializers.ListField(
|
allow_null=True,
|
||||||
child=serializers.PrimaryKeyRelatedField(queryset=User.objects.all()),
|
)
|
||||||
|
parent_id = serializers.PrimaryKeyRelatedField(
|
||||||
|
source="parent",
|
||||||
|
queryset=Issue.objects.all(),
|
||||||
|
required=False,
|
||||||
|
allow_null=True,
|
||||||
|
)
|
||||||
|
label_ids = serializers.ListField(
|
||||||
|
child=serializers.PrimaryKeyRelatedField(queryset=Label.objects.all()),
|
||||||
write_only=True,
|
write_only=True,
|
||||||
required=False,
|
required=False,
|
||||||
)
|
)
|
||||||
|
assignee_ids = serializers.ListField(
|
||||||
labels = serializers.ListField(
|
child=serializers.PrimaryKeyRelatedField(queryset=User.objects.all()),
|
||||||
child=serializers.PrimaryKeyRelatedField(queryset=Label.objects.all()),
|
|
||||||
write_only=True,
|
write_only=True,
|
||||||
required=False,
|
required=False,
|
||||||
)
|
)
|
||||||
@ -100,8 +110,10 @@ class IssueCreateSerializer(BaseSerializer):
|
|||||||
|
|
||||||
def to_representation(self, instance):
|
def to_representation(self, instance):
|
||||||
data = super().to_representation(instance)
|
data = super().to_representation(instance)
|
||||||
data['assignees'] = [str(assignee.id) for assignee in instance.assignees.all()]
|
assignee_ids = self.initial_data.get("assignee_ids")
|
||||||
data['labels'] = [str(label.id) for label in instance.labels.all()]
|
data["assignee_ids"] = assignee_ids if assignee_ids else []
|
||||||
|
label_ids = self.initial_data.get("label_ids")
|
||||||
|
data["label_ids"] = label_ids if label_ids else []
|
||||||
return data
|
return data
|
||||||
|
|
||||||
def validate(self, data):
|
def validate(self, data):
|
||||||
@ -110,12 +122,14 @@ class IssueCreateSerializer(BaseSerializer):
|
|||||||
and data.get("target_date", None) is not None
|
and data.get("target_date", None) is not None
|
||||||
and data.get("start_date", None) > data.get("target_date", None)
|
and data.get("start_date", None) > data.get("target_date", None)
|
||||||
):
|
):
|
||||||
raise serializers.ValidationError("Start date cannot exceed target date")
|
raise serializers.ValidationError(
|
||||||
|
"Start date cannot exceed target date"
|
||||||
|
)
|
||||||
return data
|
return data
|
||||||
|
|
||||||
def create(self, validated_data):
|
def create(self, validated_data):
|
||||||
assignees = validated_data.pop("assignees", None)
|
assignees = validated_data.pop("assignee_ids", None)
|
||||||
labels = validated_data.pop("labels", None)
|
labels = validated_data.pop("label_ids", None)
|
||||||
|
|
||||||
project_id = self.context["project_id"]
|
project_id = self.context["project_id"]
|
||||||
workspace_id = self.context["workspace_id"]
|
workspace_id = self.context["workspace_id"]
|
||||||
@ -173,8 +187,8 @@ class IssueCreateSerializer(BaseSerializer):
|
|||||||
return issue
|
return issue
|
||||||
|
|
||||||
def update(self, instance, validated_data):
|
def update(self, instance, validated_data):
|
||||||
assignees = validated_data.pop("assignees", None)
|
assignees = validated_data.pop("assignee_ids", None)
|
||||||
labels = validated_data.pop("labels", None)
|
labels = validated_data.pop("label_ids", None)
|
||||||
|
|
||||||
# Related models
|
# Related models
|
||||||
project_id = instance.project_id
|
project_id = instance.project_id
|
||||||
@ -225,14 +239,15 @@ class IssueActivitySerializer(BaseSerializer):
|
|||||||
actor_detail = UserLiteSerializer(read_only=True, source="actor")
|
actor_detail = UserLiteSerializer(read_only=True, source="actor")
|
||||||
issue_detail = IssueFlatSerializer(read_only=True, source="issue")
|
issue_detail = IssueFlatSerializer(read_only=True, source="issue")
|
||||||
project_detail = ProjectLiteSerializer(read_only=True, source="project")
|
project_detail = ProjectLiteSerializer(read_only=True, source="project")
|
||||||
workspace_detail = WorkspaceLiteSerializer(read_only=True, source="workspace")
|
workspace_detail = WorkspaceLiteSerializer(
|
||||||
|
read_only=True, source="workspace"
|
||||||
|
)
|
||||||
|
|
||||||
class Meta:
|
class Meta:
|
||||||
model = IssueActivity
|
model = IssueActivity
|
||||||
fields = "__all__"
|
fields = "__all__"
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
class IssuePropertySerializer(BaseSerializer):
|
class IssuePropertySerializer(BaseSerializer):
|
||||||
class Meta:
|
class Meta:
|
||||||
model = IssueProperty
|
model = IssueProperty
|
||||||
@ -245,12 +260,17 @@ class IssuePropertySerializer(BaseSerializer):
|
|||||||
|
|
||||||
|
|
||||||
class LabelSerializer(BaseSerializer):
|
class LabelSerializer(BaseSerializer):
|
||||||
workspace_detail = WorkspaceLiteSerializer(source="workspace", read_only=True)
|
|
||||||
project_detail = ProjectLiteSerializer(source="project", read_only=True)
|
|
||||||
|
|
||||||
class Meta:
|
class Meta:
|
||||||
model = Label
|
model = Label
|
||||||
fields = "__all__"
|
fields = [
|
||||||
|
"parent",
|
||||||
|
"name",
|
||||||
|
"color",
|
||||||
|
"id",
|
||||||
|
"project_id",
|
||||||
|
"workspace_id",
|
||||||
|
"sort_order",
|
||||||
|
]
|
||||||
read_only_fields = [
|
read_only_fields = [
|
||||||
"workspace",
|
"workspace",
|
||||||
"project",
|
"project",
|
||||||
@ -268,7 +288,6 @@ class LabelLiteSerializer(BaseSerializer):
|
|||||||
|
|
||||||
|
|
||||||
class IssueLabelSerializer(BaseSerializer):
|
class IssueLabelSerializer(BaseSerializer):
|
||||||
|
|
||||||
class Meta:
|
class Meta:
|
||||||
model = IssueLabel
|
model = IssueLabel
|
||||||
fields = "__all__"
|
fields = "__all__"
|
||||||
@ -279,33 +298,50 @@ class IssueLabelSerializer(BaseSerializer):
|
|||||||
|
|
||||||
|
|
||||||
class IssueRelationSerializer(BaseSerializer):
|
class IssueRelationSerializer(BaseSerializer):
|
||||||
issue_detail = IssueProjectLiteSerializer(read_only=True, source="related_issue")
|
id = serializers.UUIDField(source="related_issue.id", read_only=True)
|
||||||
|
project_id = serializers.PrimaryKeyRelatedField(
|
||||||
|
source="related_issue.project_id", read_only=True
|
||||||
|
)
|
||||||
|
sequence_id = serializers.IntegerField(
|
||||||
|
source="related_issue.sequence_id", read_only=True
|
||||||
|
)
|
||||||
|
name = serializers.CharField(source="related_issue.name", read_only=True)
|
||||||
|
relation_type = serializers.CharField(read_only=True)
|
||||||
|
|
||||||
class Meta:
|
class Meta:
|
||||||
model = IssueRelation
|
model = IssueRelation
|
||||||
fields = [
|
fields = [
|
||||||
"issue_detail",
|
"id",
|
||||||
|
"project_id",
|
||||||
|
"sequence_id",
|
||||||
"relation_type",
|
"relation_type",
|
||||||
"related_issue",
|
"name",
|
||||||
"issue",
|
|
||||||
"id"
|
|
||||||
]
|
]
|
||||||
read_only_fields = [
|
read_only_fields = [
|
||||||
"workspace",
|
"workspace",
|
||||||
"project",
|
"project",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
|
||||||
class RelatedIssueSerializer(BaseSerializer):
|
class RelatedIssueSerializer(BaseSerializer):
|
||||||
issue_detail = IssueProjectLiteSerializer(read_only=True, source="issue")
|
id = serializers.UUIDField(source="issue.id", read_only=True)
|
||||||
|
project_id = serializers.PrimaryKeyRelatedField(
|
||||||
|
source="issue.project_id", read_only=True
|
||||||
|
)
|
||||||
|
sequence_id = serializers.IntegerField(
|
||||||
|
source="issue.sequence_id", read_only=True
|
||||||
|
)
|
||||||
|
name = serializers.CharField(source="issue.name", read_only=True)
|
||||||
|
relation_type = serializers.CharField(read_only=True)
|
||||||
|
|
||||||
class Meta:
|
class Meta:
|
||||||
model = IssueRelation
|
model = IssueRelation
|
||||||
fields = [
|
fields = [
|
||||||
"issue_detail",
|
"id",
|
||||||
|
"project_id",
|
||||||
|
"sequence_id",
|
||||||
"relation_type",
|
"relation_type",
|
||||||
"related_issue",
|
"name",
|
||||||
"issue",
|
|
||||||
"id"
|
|
||||||
]
|
]
|
||||||
read_only_fields = [
|
read_only_fields = [
|
||||||
"workspace",
|
"workspace",
|
||||||
@ -397,16 +433,57 @@ class IssueLinkSerializer(BaseSerializer):
|
|||||||
"issue",
|
"issue",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
def validate_url(self, value):
|
||||||
|
# Check URL format
|
||||||
|
validate_url = URLValidator()
|
||||||
|
try:
|
||||||
|
validate_url(value)
|
||||||
|
except ValidationError:
|
||||||
|
raise serializers.ValidationError("Invalid URL format.")
|
||||||
|
|
||||||
|
# Check URL scheme
|
||||||
|
if not value.startswith(('http://', 'https://')):
|
||||||
|
raise serializers.ValidationError("Invalid URL scheme.")
|
||||||
|
|
||||||
|
return value
|
||||||
|
|
||||||
# Validation if url already exists
|
# Validation if url already exists
|
||||||
def create(self, validated_data):
|
def create(self, validated_data):
|
||||||
if IssueLink.objects.filter(
|
if IssueLink.objects.filter(
|
||||||
url=validated_data.get("url"), issue_id=validated_data.get("issue_id")
|
url=validated_data.get("url"),
|
||||||
|
issue_id=validated_data.get("issue_id"),
|
||||||
).exists():
|
).exists():
|
||||||
raise serializers.ValidationError(
|
raise serializers.ValidationError(
|
||||||
{"error": "URL already exists for this Issue"}
|
{"error": "URL already exists for this Issue"}
|
||||||
)
|
)
|
||||||
return IssueLink.objects.create(**validated_data)
|
return IssueLink.objects.create(**validated_data)
|
||||||
|
|
||||||
|
def update(self, instance, validated_data):
|
||||||
|
if IssueLink.objects.filter(
|
||||||
|
url=validated_data.get("url"),
|
||||||
|
issue_id=instance.issue_id,
|
||||||
|
).exists():
|
||||||
|
raise serializers.ValidationError(
|
||||||
|
{"error": "URL already exists for this Issue"}
|
||||||
|
)
|
||||||
|
|
||||||
|
return super().update(instance, validated_data)
|
||||||
|
|
||||||
|
|
||||||
|
class IssueLinkLiteSerializer(BaseSerializer):
|
||||||
|
class Meta:
|
||||||
|
model = IssueLink
|
||||||
|
fields = [
|
||||||
|
"id",
|
||||||
|
"issue_id",
|
||||||
|
"title",
|
||||||
|
"url",
|
||||||
|
"metadata",
|
||||||
|
"created_by_id",
|
||||||
|
"created_at",
|
||||||
|
]
|
||||||
|
read_only_fields = fields
|
||||||
|
|
||||||
|
|
||||||
class IssueAttachmentSerializer(BaseSerializer):
|
class IssueAttachmentSerializer(BaseSerializer):
|
||||||
class Meta:
|
class Meta:
|
||||||
@ -423,8 +500,21 @@ class IssueAttachmentSerializer(BaseSerializer):
|
|||||||
]
|
]
|
||||||
|
|
||||||
|
|
||||||
class IssueReactionSerializer(BaseSerializer):
|
class IssueAttachmentLiteSerializer(DynamicBaseSerializer):
|
||||||
|
class Meta:
|
||||||
|
model = IssueAttachment
|
||||||
|
fields = [
|
||||||
|
"id",
|
||||||
|
"asset",
|
||||||
|
"attributes",
|
||||||
|
"issue_id",
|
||||||
|
"updated_at",
|
||||||
|
"updated_by_id",
|
||||||
|
]
|
||||||
|
read_only_fields = fields
|
||||||
|
|
||||||
|
|
||||||
|
class IssueReactionSerializer(BaseSerializer):
|
||||||
actor_detail = UserLiteSerializer(read_only=True, source="actor")
|
actor_detail = UserLiteSerializer(read_only=True, source="actor")
|
||||||
|
|
||||||
class Meta:
|
class Meta:
|
||||||
@ -438,16 +528,14 @@ class IssueReactionSerializer(BaseSerializer):
|
|||||||
]
|
]
|
||||||
|
|
||||||
|
|
||||||
class CommentReactionLiteSerializer(BaseSerializer):
|
class IssueReactionLiteSerializer(DynamicBaseSerializer):
|
||||||
actor_detail = UserLiteSerializer(read_only=True, source="actor")
|
|
||||||
|
|
||||||
class Meta:
|
class Meta:
|
||||||
model = CommentReaction
|
model = IssueReaction
|
||||||
fields = [
|
fields = [
|
||||||
"id",
|
"id",
|
||||||
|
"actor",
|
||||||
|
"issue",
|
||||||
"reaction",
|
"reaction",
|
||||||
"comment",
|
|
||||||
"actor_detail",
|
|
||||||
]
|
]
|
||||||
|
|
||||||
|
|
||||||
@ -459,12 +547,18 @@ class CommentReactionSerializer(BaseSerializer):
|
|||||||
|
|
||||||
|
|
||||||
class IssueVoteSerializer(BaseSerializer):
|
class IssueVoteSerializer(BaseSerializer):
|
||||||
|
|
||||||
actor_detail = UserLiteSerializer(read_only=True, source="actor")
|
actor_detail = UserLiteSerializer(read_only=True, source="actor")
|
||||||
|
|
||||||
class Meta:
|
class Meta:
|
||||||
model = IssueVote
|
model = IssueVote
|
||||||
fields = ["issue", "vote", "workspace", "project", "actor", "actor_detail"]
|
fields = [
|
||||||
|
"issue",
|
||||||
|
"vote",
|
||||||
|
"workspace",
|
||||||
|
"project",
|
||||||
|
"actor",
|
||||||
|
"actor_detail",
|
||||||
|
]
|
||||||
read_only_fields = fields
|
read_only_fields = fields
|
||||||
|
|
||||||
|
|
||||||
@ -472,8 +566,10 @@ class IssueCommentSerializer(BaseSerializer):
|
|||||||
actor_detail = UserLiteSerializer(read_only=True, source="actor")
|
actor_detail = UserLiteSerializer(read_only=True, source="actor")
|
||||||
issue_detail = IssueFlatSerializer(read_only=True, source="issue")
|
issue_detail = IssueFlatSerializer(read_only=True, source="issue")
|
||||||
project_detail = ProjectLiteSerializer(read_only=True, source="project")
|
project_detail = ProjectLiteSerializer(read_only=True, source="project")
|
||||||
workspace_detail = WorkspaceLiteSerializer(read_only=True, source="workspace")
|
workspace_detail = WorkspaceLiteSerializer(
|
||||||
comment_reactions = CommentReactionLiteSerializer(read_only=True, many=True)
|
read_only=True, source="workspace"
|
||||||
|
)
|
||||||
|
comment_reactions = CommentReactionSerializer(read_only=True, many=True)
|
||||||
is_member = serializers.BooleanField(read_only=True)
|
is_member = serializers.BooleanField(read_only=True)
|
||||||
|
|
||||||
class Meta:
|
class Meta:
|
||||||
@ -507,12 +603,15 @@ class IssueStateFlatSerializer(BaseSerializer):
|
|||||||
|
|
||||||
# Issue Serializer with state details
|
# Issue Serializer with state details
|
||||||
class IssueStateSerializer(DynamicBaseSerializer):
|
class IssueStateSerializer(DynamicBaseSerializer):
|
||||||
label_details = LabelLiteSerializer(read_only=True, source="labels", many=True)
|
label_details = LabelLiteSerializer(
|
||||||
|
read_only=True, source="labels", many=True
|
||||||
|
)
|
||||||
state_detail = StateLiteSerializer(read_only=True, source="state")
|
state_detail = StateLiteSerializer(read_only=True, source="state")
|
||||||
project_detail = ProjectLiteSerializer(read_only=True, source="project")
|
project_detail = ProjectLiteSerializer(read_only=True, source="project")
|
||||||
assignee_details = UserLiteSerializer(read_only=True, source="assignees", many=True)
|
assignee_details = UserLiteSerializer(
|
||||||
|
read_only=True, source="assignees", many=True
|
||||||
|
)
|
||||||
sub_issues_count = serializers.IntegerField(read_only=True)
|
sub_issues_count = serializers.IntegerField(read_only=True)
|
||||||
bridge_id = serializers.UUIDField(read_only=True)
|
|
||||||
attachment_count = serializers.IntegerField(read_only=True)
|
attachment_count = serializers.IntegerField(read_only=True)
|
||||||
link_count = serializers.IntegerField(read_only=True)
|
link_count = serializers.IntegerField(read_only=True)
|
||||||
|
|
||||||
@ -521,67 +620,110 @@ class IssueStateSerializer(DynamicBaseSerializer):
|
|||||||
fields = "__all__"
|
fields = "__all__"
|
||||||
|
|
||||||
|
|
||||||
class IssueSerializer(BaseSerializer):
|
class IssueInboxSerializer(DynamicBaseSerializer):
|
||||||
project_detail = ProjectLiteSerializer(read_only=True, source="project")
|
label_ids = serializers.ListField(
|
||||||
state_detail = StateSerializer(read_only=True, source="state")
|
child=serializers.UUIDField(),
|
||||||
parent_detail = IssueStateFlatSerializer(read_only=True, source="parent")
|
required=False,
|
||||||
label_details = LabelSerializer(read_only=True, source="labels", many=True)
|
)
|
||||||
assignee_details = UserLiteSerializer(read_only=True, source="assignees", many=True)
|
|
||||||
related_issues = IssueRelationSerializer(read_only=True, source="issue_relation", many=True)
|
|
||||||
issue_relations = RelatedIssueSerializer(read_only=True, source="issue_related", many=True)
|
|
||||||
issue_cycle = IssueCycleDetailSerializer(read_only=True)
|
|
||||||
issue_module = IssueModuleDetailSerializer(read_only=True)
|
|
||||||
issue_link = IssueLinkSerializer(read_only=True, many=True)
|
|
||||||
issue_attachment = IssueAttachmentSerializer(read_only=True, many=True)
|
|
||||||
sub_issues_count = serializers.IntegerField(read_only=True)
|
|
||||||
issue_reactions = IssueReactionSerializer(read_only=True, many=True)
|
|
||||||
|
|
||||||
class Meta:
|
class Meta:
|
||||||
model = Issue
|
model = Issue
|
||||||
fields = "__all__"
|
fields = [
|
||||||
read_only_fields = [
|
"id",
|
||||||
"workspace",
|
"name",
|
||||||
"project",
|
"priority",
|
||||||
"created_by",
|
"sequence_id",
|
||||||
"updated_by",
|
"project_id",
|
||||||
|
"created_at",
|
||||||
|
"label_ids",
|
||||||
|
]
|
||||||
|
read_only_fields = fields
|
||||||
|
|
||||||
|
|
||||||
|
class IssueSerializer(DynamicBaseSerializer):
|
||||||
|
# ids
|
||||||
|
cycle_id = serializers.PrimaryKeyRelatedField(read_only=True)
|
||||||
|
module_ids = serializers.ListField(
|
||||||
|
child=serializers.UUIDField(),
|
||||||
|
required=False,
|
||||||
|
)
|
||||||
|
|
||||||
|
# Many to many
|
||||||
|
label_ids = serializers.ListField(
|
||||||
|
child=serializers.UUIDField(),
|
||||||
|
required=False,
|
||||||
|
)
|
||||||
|
assignee_ids = serializers.ListField(
|
||||||
|
child=serializers.UUIDField(),
|
||||||
|
required=False,
|
||||||
|
)
|
||||||
|
|
||||||
|
# Count items
|
||||||
|
sub_issues_count = serializers.IntegerField(read_only=True)
|
||||||
|
attachment_count = serializers.IntegerField(read_only=True)
|
||||||
|
link_count = serializers.IntegerField(read_only=True)
|
||||||
|
|
||||||
|
class Meta:
|
||||||
|
model = Issue
|
||||||
|
fields = [
|
||||||
|
"id",
|
||||||
|
"name",
|
||||||
|
"state_id",
|
||||||
|
"sort_order",
|
||||||
|
"completed_at",
|
||||||
|
"estimate_point",
|
||||||
|
"priority",
|
||||||
|
"start_date",
|
||||||
|
"target_date",
|
||||||
|
"sequence_id",
|
||||||
|
"project_id",
|
||||||
|
"parent_id",
|
||||||
|
"cycle_id",
|
||||||
|
"module_ids",
|
||||||
|
"label_ids",
|
||||||
|
"assignee_ids",
|
||||||
|
"sub_issues_count",
|
||||||
"created_at",
|
"created_at",
|
||||||
"updated_at",
|
"updated_at",
|
||||||
|
"created_by",
|
||||||
|
"updated_by",
|
||||||
|
"attachment_count",
|
||||||
|
"link_count",
|
||||||
|
"is_draft",
|
||||||
|
"archived_at",
|
||||||
]
|
]
|
||||||
|
read_only_fields = fields
|
||||||
|
|
||||||
|
|
||||||
class IssueLiteSerializer(DynamicBaseSerializer):
|
class IssueLiteSerializer(DynamicBaseSerializer):
|
||||||
workspace_detail = WorkspaceLiteSerializer(read_only=True, source="workspace")
|
|
||||||
project_detail = ProjectLiteSerializer(read_only=True, source="project")
|
|
||||||
state_detail = StateLiteSerializer(read_only=True, source="state")
|
|
||||||
label_details = LabelLiteSerializer(read_only=True, source="labels", many=True)
|
|
||||||
assignee_details = UserLiteSerializer(read_only=True, source="assignees", many=True)
|
|
||||||
sub_issues_count = serializers.IntegerField(read_only=True)
|
|
||||||
cycle_id = serializers.UUIDField(read_only=True)
|
|
||||||
module_id = serializers.UUIDField(read_only=True)
|
|
||||||
attachment_count = serializers.IntegerField(read_only=True)
|
|
||||||
link_count = serializers.IntegerField(read_only=True)
|
|
||||||
issue_reactions = IssueReactionSerializer(read_only=True, many=True)
|
|
||||||
|
|
||||||
class Meta:
|
class Meta:
|
||||||
model = Issue
|
model = Issue
|
||||||
fields = "__all__"
|
fields = [
|
||||||
read_only_fields = [
|
"id",
|
||||||
"start_date",
|
"sequence_id",
|
||||||
"target_date",
|
"project_id",
|
||||||
"completed_at",
|
|
||||||
"workspace",
|
|
||||||
"project",
|
|
||||||
"created_by",
|
|
||||||
"updated_by",
|
|
||||||
"created_at",
|
|
||||||
"updated_at",
|
|
||||||
]
|
]
|
||||||
|
read_only_fields = fields
|
||||||
|
|
||||||
|
|
||||||
|
class IssueDetailSerializer(IssueSerializer):
|
||||||
|
description_html = serializers.CharField()
|
||||||
|
is_subscribed = serializers.BooleanField(read_only=True)
|
||||||
|
|
||||||
|
class Meta(IssueSerializer.Meta):
|
||||||
|
fields = IssueSerializer.Meta.fields + [
|
||||||
|
"description_html",
|
||||||
|
"is_subscribed",
|
||||||
|
]
|
||||||
|
read_only_fields = fields
|
||||||
|
|
||||||
|
|
||||||
class IssuePublicSerializer(BaseSerializer):
|
class IssuePublicSerializer(BaseSerializer):
|
||||||
project_detail = ProjectLiteSerializer(read_only=True, source="project")
|
project_detail = ProjectLiteSerializer(read_only=True, source="project")
|
||||||
state_detail = StateLiteSerializer(read_only=True, source="state")
|
state_detail = StateLiteSerializer(read_only=True, source="state")
|
||||||
reactions = IssueReactionSerializer(read_only=True, many=True, source="issue_reactions")
|
reactions = IssueReactionSerializer(
|
||||||
|
read_only=True, many=True, source="issue_reactions"
|
||||||
|
)
|
||||||
votes = IssueVoteSerializer(read_only=True, many=True)
|
votes = IssueVoteSerializer(read_only=True, many=True)
|
||||||
|
|
||||||
class Meta:
|
class Meta:
|
||||||
@ -604,7 +746,6 @@ class IssuePublicSerializer(BaseSerializer):
|
|||||||
read_only_fields = fields
|
read_only_fields = fields
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
class IssueSubscriberSerializer(BaseSerializer):
|
class IssueSubscriberSerializer(BaseSerializer):
|
||||||
class Meta:
|
class Meta:
|
||||||
model = IssueSubscriber
|
model = IssueSubscriber
|
||||||
|
@ -2,10 +2,8 @@
|
|||||||
from rest_framework import serializers
|
from rest_framework import serializers
|
||||||
|
|
||||||
# Module imports
|
# Module imports
|
||||||
from .base import BaseSerializer
|
from .base import BaseSerializer, DynamicBaseSerializer
|
||||||
from .user import UserLiteSerializer
|
|
||||||
from .project import ProjectLiteSerializer
|
from .project import ProjectLiteSerializer
|
||||||
from .workspace import WorkspaceLiteSerializer
|
|
||||||
|
|
||||||
from plane.db.models import (
|
from plane.db.models import (
|
||||||
User,
|
User,
|
||||||
@ -14,19 +12,23 @@ from plane.db.models import (
|
|||||||
ModuleIssue,
|
ModuleIssue,
|
||||||
ModuleLink,
|
ModuleLink,
|
||||||
ModuleFavorite,
|
ModuleFavorite,
|
||||||
|
ModuleUserProperties,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
class ModuleWriteSerializer(BaseSerializer):
|
class ModuleWriteSerializer(BaseSerializer):
|
||||||
members = serializers.ListField(
|
lead_id = serializers.PrimaryKeyRelatedField(
|
||||||
|
source="lead",
|
||||||
|
queryset=User.objects.all(),
|
||||||
|
required=False,
|
||||||
|
allow_null=True,
|
||||||
|
)
|
||||||
|
member_ids = serializers.ListField(
|
||||||
child=serializers.PrimaryKeyRelatedField(queryset=User.objects.all()),
|
child=serializers.PrimaryKeyRelatedField(queryset=User.objects.all()),
|
||||||
write_only=True,
|
write_only=True,
|
||||||
required=False,
|
required=False,
|
||||||
)
|
)
|
||||||
|
|
||||||
project_detail = ProjectLiteSerializer(source="project", read_only=True)
|
|
||||||
workspace_detail = WorkspaceLiteSerializer(source="workspace", read_only=True)
|
|
||||||
|
|
||||||
class Meta:
|
class Meta:
|
||||||
model = Module
|
model = Module
|
||||||
fields = "__all__"
|
fields = "__all__"
|
||||||
@ -37,25 +39,32 @@ class ModuleWriteSerializer(BaseSerializer):
|
|||||||
"updated_by",
|
"updated_by",
|
||||||
"created_at",
|
"created_at",
|
||||||
"updated_at",
|
"updated_at",
|
||||||
|
"archived_at",
|
||||||
]
|
]
|
||||||
|
|
||||||
def to_representation(self, instance):
|
def to_representation(self, instance):
|
||||||
data = super().to_representation(instance)
|
data = super().to_representation(instance)
|
||||||
data['members'] = [str(member.id) for member in instance.members.all()]
|
data["member_ids"] = [
|
||||||
|
str(member.id) for member in instance.members.all()
|
||||||
|
]
|
||||||
return data
|
return data
|
||||||
|
|
||||||
def validate(self, data):
|
def validate(self, data):
|
||||||
if data.get("start_date", None) is not None and data.get("target_date", None) is not None and data.get("start_date", None) > data.get("target_date", None):
|
if (
|
||||||
raise serializers.ValidationError("Start date cannot exceed target date")
|
data.get("start_date", None) is not None
|
||||||
|
and data.get("target_date", None) is not None
|
||||||
|
and data.get("start_date", None) > data.get("target_date", None)
|
||||||
|
):
|
||||||
|
raise serializers.ValidationError(
|
||||||
|
"Start date cannot exceed target date"
|
||||||
|
)
|
||||||
return data
|
return data
|
||||||
|
|
||||||
def create(self, validated_data):
|
def create(self, validated_data):
|
||||||
members = validated_data.pop("members", None)
|
members = validated_data.pop("member_ids", None)
|
||||||
|
|
||||||
project = self.context["project"]
|
project = self.context["project"]
|
||||||
|
|
||||||
module = Module.objects.create(**validated_data, project=project)
|
module = Module.objects.create(**validated_data, project=project)
|
||||||
|
|
||||||
if members is not None:
|
if members is not None:
|
||||||
ModuleMember.objects.bulk_create(
|
ModuleMember.objects.bulk_create(
|
||||||
[
|
[
|
||||||
@ -76,7 +85,7 @@ class ModuleWriteSerializer(BaseSerializer):
|
|||||||
return module
|
return module
|
||||||
|
|
||||||
def update(self, instance, validated_data):
|
def update(self, instance, validated_data):
|
||||||
members = validated_data.pop("members", None)
|
members = validated_data.pop("member_ids", None)
|
||||||
|
|
||||||
if members is not None:
|
if members is not None:
|
||||||
ModuleMember.objects.filter(module=instance).delete()
|
ModuleMember.objects.filter(module=instance).delete()
|
||||||
@ -133,8 +142,6 @@ class ModuleIssueSerializer(BaseSerializer):
|
|||||||
|
|
||||||
|
|
||||||
class ModuleLinkSerializer(BaseSerializer):
|
class ModuleLinkSerializer(BaseSerializer):
|
||||||
created_by_detail = UserLiteSerializer(read_only=True, source="created_by")
|
|
||||||
|
|
||||||
class Meta:
|
class Meta:
|
||||||
model = ModuleLink
|
model = ModuleLink
|
||||||
fields = "__all__"
|
fields = "__all__"
|
||||||
@ -151,7 +158,8 @@ class ModuleLinkSerializer(BaseSerializer):
|
|||||||
# Validation if url already exists
|
# Validation if url already exists
|
||||||
def create(self, validated_data):
|
def create(self, validated_data):
|
||||||
if ModuleLink.objects.filter(
|
if ModuleLink.objects.filter(
|
||||||
url=validated_data.get("url"), module_id=validated_data.get("module_id")
|
url=validated_data.get("url"),
|
||||||
|
module_id=validated_data.get("module_id"),
|
||||||
).exists():
|
).exists():
|
||||||
raise serializers.ValidationError(
|
raise serializers.ValidationError(
|
||||||
{"error": "URL already exists for this Issue"}
|
{"error": "URL already exists for this Issue"}
|
||||||
@ -159,11 +167,10 @@ class ModuleLinkSerializer(BaseSerializer):
|
|||||||
return ModuleLink.objects.create(**validated_data)
|
return ModuleLink.objects.create(**validated_data)
|
||||||
|
|
||||||
|
|
||||||
class ModuleSerializer(BaseSerializer):
|
class ModuleSerializer(DynamicBaseSerializer):
|
||||||
project_detail = ProjectLiteSerializer(read_only=True, source="project")
|
member_ids = serializers.ListField(
|
||||||
lead_detail = UserLiteSerializer(read_only=True, source="lead")
|
child=serializers.UUIDField(), required=False, allow_null=True
|
||||||
members_detail = UserLiteSerializer(read_only=True, many=True, source="members")
|
)
|
||||||
link_module = ModuleLinkSerializer(read_only=True, many=True)
|
|
||||||
is_favorite = serializers.BooleanField(read_only=True)
|
is_favorite = serializers.BooleanField(read_only=True)
|
||||||
total_issues = serializers.IntegerField(read_only=True)
|
total_issues = serializers.IntegerField(read_only=True)
|
||||||
cancelled_issues = serializers.IntegerField(read_only=True)
|
cancelled_issues = serializers.IntegerField(read_only=True)
|
||||||
@ -174,15 +181,45 @@ class ModuleSerializer(BaseSerializer):
|
|||||||
|
|
||||||
class Meta:
|
class Meta:
|
||||||
model = Module
|
model = Module
|
||||||
fields = "__all__"
|
fields = [
|
||||||
read_only_fields = [
|
# Required fields
|
||||||
"workspace",
|
"id",
|
||||||
"project",
|
"workspace_id",
|
||||||
"created_by",
|
"project_id",
|
||||||
"updated_by",
|
# Model fields
|
||||||
|
"name",
|
||||||
|
"description",
|
||||||
|
"description_text",
|
||||||
|
"description_html",
|
||||||
|
"start_date",
|
||||||
|
"target_date",
|
||||||
|
"status",
|
||||||
|
"lead_id",
|
||||||
|
"member_ids",
|
||||||
|
"view_props",
|
||||||
|
"sort_order",
|
||||||
|
"external_source",
|
||||||
|
"external_id",
|
||||||
|
# computed fields
|
||||||
|
"is_favorite",
|
||||||
|
"total_issues",
|
||||||
|
"cancelled_issues",
|
||||||
|
"completed_issues",
|
||||||
|
"started_issues",
|
||||||
|
"unstarted_issues",
|
||||||
|
"backlog_issues",
|
||||||
"created_at",
|
"created_at",
|
||||||
"updated_at",
|
"updated_at",
|
||||||
]
|
]
|
||||||
|
read_only_fields = fields
|
||||||
|
|
||||||
|
|
||||||
|
class ModuleDetailSerializer(ModuleSerializer):
|
||||||
|
link_module = ModuleLinkSerializer(read_only=True, many=True)
|
||||||
|
sub_issues = serializers.IntegerField(read_only=True)
|
||||||
|
|
||||||
|
class Meta(ModuleSerializer.Meta):
|
||||||
|
fields = ModuleSerializer.Meta.fields + ["link_module", "sub_issues"]
|
||||||
|
|
||||||
|
|
||||||
class ModuleFavoriteSerializer(BaseSerializer):
|
class ModuleFavoriteSerializer(BaseSerializer):
|
||||||
@ -196,3 +233,10 @@ class ModuleFavoriteSerializer(BaseSerializer):
|
|||||||
"project",
|
"project",
|
||||||
"user",
|
"user",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
|
||||||
|
class ModuleUserPropertiesSerializer(BaseSerializer):
|
||||||
|
class Meta:
|
||||||
|
model = ModuleUserProperties
|
||||||
|
fields = "__all__"
|
||||||
|
read_only_fields = ["workspace", "project", "module", "user"]
|
||||||
|
@ -1,12 +1,20 @@
|
|||||||
# Module imports
|
# Module imports
|
||||||
from .base import BaseSerializer
|
from .base import BaseSerializer
|
||||||
from .user import UserLiteSerializer
|
from .user import UserLiteSerializer
|
||||||
from plane.db.models import Notification
|
from plane.db.models import Notification, UserNotificationPreference
|
||||||
|
|
||||||
|
|
||||||
class NotificationSerializer(BaseSerializer):
|
class NotificationSerializer(BaseSerializer):
|
||||||
triggered_by_details = UserLiteSerializer(read_only=True, source="triggered_by")
|
triggered_by_details = UserLiteSerializer(
|
||||||
|
read_only=True, source="triggered_by"
|
||||||
|
)
|
||||||
|
|
||||||
class Meta:
|
class Meta:
|
||||||
model = Notification
|
model = Notification
|
||||||
fields = "__all__"
|
fields = "__all__"
|
||||||
|
|
||||||
|
|
||||||
|
class UserNotificationPreferenceSerializer(BaseSerializer):
|
||||||
|
class Meta:
|
||||||
|
model = UserNotificationPreference
|
||||||
|
fields = "__all__"
|
||||||
|
@ -3,22 +3,32 @@ from rest_framework import serializers
|
|||||||
|
|
||||||
# Module imports
|
# Module imports
|
||||||
from .base import BaseSerializer
|
from .base import BaseSerializer
|
||||||
from .issue import IssueFlatSerializer, LabelLiteSerializer
|
from .issue import LabelLiteSerializer
|
||||||
from .workspace import WorkspaceLiteSerializer
|
from .workspace import WorkspaceLiteSerializer
|
||||||
from .project import ProjectLiteSerializer
|
from .project import ProjectLiteSerializer
|
||||||
from plane.db.models import Page, PageLog, PageFavorite, PageLabel, Label, Issue, Module
|
from plane.db.models import (
|
||||||
|
Page,
|
||||||
|
PageLog,
|
||||||
|
PageFavorite,
|
||||||
|
PageLabel,
|
||||||
|
Label,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
class PageSerializer(BaseSerializer):
|
class PageSerializer(BaseSerializer):
|
||||||
is_favorite = serializers.BooleanField(read_only=True)
|
is_favorite = serializers.BooleanField(read_only=True)
|
||||||
label_details = LabelLiteSerializer(read_only=True, source="labels", many=True)
|
label_details = LabelLiteSerializer(
|
||||||
|
read_only=True, source="labels", many=True
|
||||||
|
)
|
||||||
labels = serializers.ListField(
|
labels = serializers.ListField(
|
||||||
child=serializers.PrimaryKeyRelatedField(queryset=Label.objects.all()),
|
child=serializers.PrimaryKeyRelatedField(queryset=Label.objects.all()),
|
||||||
write_only=True,
|
write_only=True,
|
||||||
required=False,
|
required=False,
|
||||||
)
|
)
|
||||||
project_detail = ProjectLiteSerializer(source="project", read_only=True)
|
project_detail = ProjectLiteSerializer(source="project", read_only=True)
|
||||||
workspace_detail = WorkspaceLiteSerializer(source="workspace", read_only=True)
|
workspace_detail = WorkspaceLiteSerializer(
|
||||||
|
source="workspace", read_only=True
|
||||||
|
)
|
||||||
|
|
||||||
class Meta:
|
class Meta:
|
||||||
model = Page
|
model = Page
|
||||||
@ -28,9 +38,10 @@ class PageSerializer(BaseSerializer):
|
|||||||
"project",
|
"project",
|
||||||
"owned_by",
|
"owned_by",
|
||||||
]
|
]
|
||||||
|
|
||||||
def to_representation(self, instance):
|
def to_representation(self, instance):
|
||||||
data = super().to_representation(instance)
|
data = super().to_representation(instance)
|
||||||
data['labels'] = [str(label.id) for label in instance.labels.all()]
|
data["labels"] = [str(label.id) for label in instance.labels.all()]
|
||||||
return data
|
return data
|
||||||
|
|
||||||
def create(self, validated_data):
|
def create(self, validated_data):
|
||||||
@ -94,7 +105,7 @@ class SubPageSerializer(BaseSerializer):
|
|||||||
|
|
||||||
def get_entity_details(self, obj):
|
def get_entity_details(self, obj):
|
||||||
entity_name = obj.entity_name
|
entity_name = obj.entity_name
|
||||||
if entity_name == 'forward_link' or entity_name == 'back_link':
|
if entity_name == "forward_link" or entity_name == "back_link":
|
||||||
try:
|
try:
|
||||||
page = Page.objects.get(pk=obj.entity_identifier)
|
page = Page.objects.get(pk=obj.entity_identifier)
|
||||||
return PageSerializer(page).data
|
return PageSerializer(page).data
|
||||||
@ -104,7 +115,6 @@ class SubPageSerializer(BaseSerializer):
|
|||||||
|
|
||||||
|
|
||||||
class PageLogSerializer(BaseSerializer):
|
class PageLogSerializer(BaseSerializer):
|
||||||
|
|
||||||
class Meta:
|
class Meta:
|
||||||
model = PageLog
|
model = PageLog
|
||||||
fields = "__all__"
|
fields = "__all__"
|
||||||
|
@ -4,7 +4,10 @@ from rest_framework import serializers
|
|||||||
# Module imports
|
# Module imports
|
||||||
from .base import BaseSerializer, DynamicBaseSerializer
|
from .base import BaseSerializer, DynamicBaseSerializer
|
||||||
from plane.app.serializers.workspace import WorkspaceLiteSerializer
|
from plane.app.serializers.workspace import WorkspaceLiteSerializer
|
||||||
from plane.app.serializers.user import UserLiteSerializer, UserAdminLiteSerializer
|
from plane.app.serializers.user import (
|
||||||
|
UserLiteSerializer,
|
||||||
|
UserAdminLiteSerializer,
|
||||||
|
)
|
||||||
from plane.db.models import (
|
from plane.db.models import (
|
||||||
Project,
|
Project,
|
||||||
ProjectMember,
|
ProjectMember,
|
||||||
@ -17,7 +20,9 @@ from plane.db.models import (
|
|||||||
|
|
||||||
|
|
||||||
class ProjectSerializer(BaseSerializer):
|
class ProjectSerializer(BaseSerializer):
|
||||||
workspace_detail = WorkspaceLiteSerializer(source="workspace", read_only=True)
|
workspace_detail = WorkspaceLiteSerializer(
|
||||||
|
source="workspace", read_only=True
|
||||||
|
)
|
||||||
|
|
||||||
class Meta:
|
class Meta:
|
||||||
model = Project
|
model = Project
|
||||||
@ -29,12 +34,16 @@ class ProjectSerializer(BaseSerializer):
|
|||||||
def create(self, validated_data):
|
def create(self, validated_data):
|
||||||
identifier = validated_data.get("identifier", "").strip().upper()
|
identifier = validated_data.get("identifier", "").strip().upper()
|
||||||
if identifier == "":
|
if identifier == "":
|
||||||
raise serializers.ValidationError(detail="Project Identifier is required")
|
raise serializers.ValidationError(
|
||||||
|
detail="Project Identifier is required"
|
||||||
|
)
|
||||||
|
|
||||||
if ProjectIdentifier.objects.filter(
|
if ProjectIdentifier.objects.filter(
|
||||||
name=identifier, workspace_id=self.context["workspace_id"]
|
name=identifier, workspace_id=self.context["workspace_id"]
|
||||||
).exists():
|
).exists():
|
||||||
raise serializers.ValidationError(detail="Project Identifier is taken")
|
raise serializers.ValidationError(
|
||||||
|
detail="Project Identifier is taken"
|
||||||
|
)
|
||||||
project = Project.objects.create(
|
project = Project.objects.create(
|
||||||
**validated_data, workspace_id=self.context["workspace_id"]
|
**validated_data, workspace_id=self.context["workspace_id"]
|
||||||
)
|
)
|
||||||
@ -73,7 +82,9 @@ class ProjectSerializer(BaseSerializer):
|
|||||||
return project
|
return project
|
||||||
|
|
||||||
# If not same fail update
|
# If not same fail update
|
||||||
raise serializers.ValidationError(detail="Project Identifier is already taken")
|
raise serializers.ValidationError(
|
||||||
|
detail="Project Identifier is already taken"
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
class ProjectLiteSerializer(BaseSerializer):
|
class ProjectLiteSerializer(BaseSerializer):
|
||||||
@ -84,14 +95,19 @@ class ProjectLiteSerializer(BaseSerializer):
|
|||||||
"identifier",
|
"identifier",
|
||||||
"name",
|
"name",
|
||||||
"cover_image",
|
"cover_image",
|
||||||
"icon_prop",
|
"logo_props",
|
||||||
"emoji",
|
|
||||||
"description",
|
"description",
|
||||||
]
|
]
|
||||||
read_only_fields = fields
|
read_only_fields = fields
|
||||||
|
|
||||||
|
|
||||||
class ProjectListSerializer(DynamicBaseSerializer):
|
class ProjectListSerializer(DynamicBaseSerializer):
|
||||||
|
total_issues = serializers.IntegerField(read_only=True)
|
||||||
|
archived_issues = serializers.IntegerField(read_only=True)
|
||||||
|
archived_sub_issues = serializers.IntegerField(read_only=True)
|
||||||
|
draft_issues = serializers.IntegerField(read_only=True)
|
||||||
|
draft_sub_issues = serializers.IntegerField(read_only=True)
|
||||||
|
sub_issues = serializers.IntegerField(read_only=True)
|
||||||
is_favorite = serializers.BooleanField(read_only=True)
|
is_favorite = serializers.BooleanField(read_only=True)
|
||||||
total_members = serializers.IntegerField(read_only=True)
|
total_members = serializers.IntegerField(read_only=True)
|
||||||
total_cycles = serializers.IntegerField(read_only=True)
|
total_cycles = serializers.IntegerField(read_only=True)
|
||||||
@ -160,6 +176,12 @@ class ProjectMemberAdminSerializer(BaseSerializer):
|
|||||||
fields = "__all__"
|
fields = "__all__"
|
||||||
|
|
||||||
|
|
||||||
|
class ProjectMemberRoleSerializer(DynamicBaseSerializer):
|
||||||
|
class Meta:
|
||||||
|
model = ProjectMember
|
||||||
|
fields = ("id", "role", "member", "project")
|
||||||
|
|
||||||
|
|
||||||
class ProjectMemberInviteSerializer(BaseSerializer):
|
class ProjectMemberInviteSerializer(BaseSerializer):
|
||||||
project = ProjectLiteSerializer(read_only=True)
|
project = ProjectLiteSerializer(read_only=True)
|
||||||
workspace = WorkspaceLiteSerializer(read_only=True)
|
workspace = WorkspaceLiteSerializer(read_only=True)
|
||||||
@ -197,7 +219,9 @@ class ProjectMemberLiteSerializer(BaseSerializer):
|
|||||||
|
|
||||||
class ProjectDeployBoardSerializer(BaseSerializer):
|
class ProjectDeployBoardSerializer(BaseSerializer):
|
||||||
project_details = ProjectLiteSerializer(read_only=True, source="project")
|
project_details = ProjectLiteSerializer(read_only=True, source="project")
|
||||||
workspace_detail = WorkspaceLiteSerializer(read_only=True, source="workspace")
|
workspace_detail = WorkspaceLiteSerializer(
|
||||||
|
read_only=True, source="workspace"
|
||||||
|
)
|
||||||
|
|
||||||
class Meta:
|
class Meta:
|
||||||
model = ProjectDeployBoard
|
model = ProjectDeployBoard
|
||||||
|
@ -6,10 +6,19 @@ from plane.db.models import State
|
|||||||
|
|
||||||
|
|
||||||
class StateSerializer(BaseSerializer):
|
class StateSerializer(BaseSerializer):
|
||||||
|
|
||||||
class Meta:
|
class Meta:
|
||||||
model = State
|
model = State
|
||||||
fields = "__all__"
|
fields = [
|
||||||
|
"id",
|
||||||
|
"project_id",
|
||||||
|
"workspace_id",
|
||||||
|
"name",
|
||||||
|
"color",
|
||||||
|
"group",
|
||||||
|
"default",
|
||||||
|
"description",
|
||||||
|
"sequence",
|
||||||
|
]
|
||||||
read_only_fields = [
|
read_only_fields = [
|
||||||
"workspace",
|
"workspace",
|
||||||
"project",
|
"project",
|
||||||
|
@ -4,7 +4,6 @@ from rest_framework import serializers
|
|||||||
# Module import
|
# Module import
|
||||||
from .base import BaseSerializer
|
from .base import BaseSerializer
|
||||||
from plane.db.models import User, Workspace, WorkspaceMemberInvite
|
from plane.db.models import User, Workspace, WorkspaceMemberInvite
|
||||||
from plane.license.models import InstanceAdmin, Instance
|
|
||||||
|
|
||||||
|
|
||||||
class UserSerializer(BaseSerializer):
|
class UserSerializer(BaseSerializer):
|
||||||
@ -99,17 +98,20 @@ class UserMeSettingsSerializer(BaseSerializer):
|
|||||||
).first()
|
).first()
|
||||||
return {
|
return {
|
||||||
"last_workspace_id": obj.last_workspace_id,
|
"last_workspace_id": obj.last_workspace_id,
|
||||||
"last_workspace_slug": workspace.slug if workspace is not None else "",
|
"last_workspace_slug": (
|
||||||
|
workspace.slug if workspace is not None else ""
|
||||||
|
),
|
||||||
"fallback_workspace_id": obj.last_workspace_id,
|
"fallback_workspace_id": obj.last_workspace_id,
|
||||||
"fallback_workspace_slug": workspace.slug
|
"fallback_workspace_slug": (
|
||||||
if workspace is not None
|
workspace.slug if workspace is not None else ""
|
||||||
else "",
|
),
|
||||||
"invites": workspace_invites,
|
"invites": workspace_invites,
|
||||||
}
|
}
|
||||||
else:
|
else:
|
||||||
fallback_workspace = (
|
fallback_workspace = (
|
||||||
Workspace.objects.filter(
|
Workspace.objects.filter(
|
||||||
workspace_member__member_id=obj.id, workspace_member__is_active=True
|
workspace_member__member_id=obj.id,
|
||||||
|
workspace_member__is_active=True,
|
||||||
)
|
)
|
||||||
.order_by("created_at")
|
.order_by("created_at")
|
||||||
.first()
|
.first()
|
||||||
@ -117,12 +119,16 @@ class UserMeSettingsSerializer(BaseSerializer):
|
|||||||
return {
|
return {
|
||||||
"last_workspace_id": None,
|
"last_workspace_id": None,
|
||||||
"last_workspace_slug": None,
|
"last_workspace_slug": None,
|
||||||
"fallback_workspace_id": fallback_workspace.id
|
"fallback_workspace_id": (
|
||||||
if fallback_workspace is not None
|
fallback_workspace.id
|
||||||
else None,
|
if fallback_workspace is not None
|
||||||
"fallback_workspace_slug": fallback_workspace.slug
|
else None
|
||||||
if fallback_workspace is not None
|
),
|
||||||
else None,
|
"fallback_workspace_slug": (
|
||||||
|
fallback_workspace.slug
|
||||||
|
if fallback_workspace is not None
|
||||||
|
else None
|
||||||
|
),
|
||||||
"invites": workspace_invites,
|
"invites": workspace_invites,
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -180,7 +186,9 @@ class ChangePasswordSerializer(serializers.Serializer):
|
|||||||
|
|
||||||
if data.get("new_password") != data.get("confirm_password"):
|
if data.get("new_password") != data.get("confirm_password"):
|
||||||
raise serializers.ValidationError(
|
raise serializers.ValidationError(
|
||||||
{"error": "Confirm password should be same as the new password."}
|
{
|
||||||
|
"error": "Confirm password should be same as the new password."
|
||||||
|
}
|
||||||
)
|
)
|
||||||
|
|
||||||
return data
|
return data
|
||||||
@ -190,4 +198,5 @@ class ResetPasswordSerializer(serializers.Serializer):
|
|||||||
"""
|
"""
|
||||||
Serializer for password change endpoint.
|
Serializer for password change endpoint.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
new_password = serializers.CharField(required=True, min_length=8)
|
new_password = serializers.CharField(required=True, min_length=8)
|
||||||
|
@ -2,7 +2,7 @@
|
|||||||
from rest_framework import serializers
|
from rest_framework import serializers
|
||||||
|
|
||||||
# Module imports
|
# Module imports
|
||||||
from .base import BaseSerializer
|
from .base import BaseSerializer, DynamicBaseSerializer
|
||||||
from .workspace import WorkspaceLiteSerializer
|
from .workspace import WorkspaceLiteSerializer
|
||||||
from .project import ProjectLiteSerializer
|
from .project import ProjectLiteSerializer
|
||||||
from plane.db.models import GlobalView, IssueView, IssueViewFavorite
|
from plane.db.models import GlobalView, IssueView, IssueViewFavorite
|
||||||
@ -10,7 +10,9 @@ from plane.utils.issue_filters import issue_filters
|
|||||||
|
|
||||||
|
|
||||||
class GlobalViewSerializer(BaseSerializer):
|
class GlobalViewSerializer(BaseSerializer):
|
||||||
workspace_detail = WorkspaceLiteSerializer(source="workspace", read_only=True)
|
workspace_detail = WorkspaceLiteSerializer(
|
||||||
|
source="workspace", read_only=True
|
||||||
|
)
|
||||||
|
|
||||||
class Meta:
|
class Meta:
|
||||||
model = GlobalView
|
model = GlobalView
|
||||||
@ -38,10 +40,12 @@ class GlobalViewSerializer(BaseSerializer):
|
|||||||
return super().update(instance, validated_data)
|
return super().update(instance, validated_data)
|
||||||
|
|
||||||
|
|
||||||
class IssueViewSerializer(BaseSerializer):
|
class IssueViewSerializer(DynamicBaseSerializer):
|
||||||
is_favorite = serializers.BooleanField(read_only=True)
|
is_favorite = serializers.BooleanField(read_only=True)
|
||||||
project_detail = ProjectLiteSerializer(source="project", read_only=True)
|
project_detail = ProjectLiteSerializer(source="project", read_only=True)
|
||||||
workspace_detail = WorkspaceLiteSerializer(source="workspace", read_only=True)
|
workspace_detail = WorkspaceLiteSerializer(
|
||||||
|
source="workspace", read_only=True
|
||||||
|
)
|
||||||
|
|
||||||
class Meta:
|
class Meta:
|
||||||
model = IssueView
|
model = IssueView
|
||||||
|
@ -1,5 +1,4 @@
|
|||||||
# Python imports
|
# Python imports
|
||||||
import urllib
|
|
||||||
import socket
|
import socket
|
||||||
import ipaddress
|
import ipaddress
|
||||||
from urllib.parse import urlparse
|
from urllib.parse import urlparse
|
||||||
@ -12,6 +11,7 @@ from .base import DynamicBaseSerializer
|
|||||||
from plane.db.models import Webhook, WebhookLog
|
from plane.db.models import Webhook, WebhookLog
|
||||||
from plane.db.models.webhook import validate_domain, validate_schema
|
from plane.db.models.webhook import validate_domain, validate_schema
|
||||||
|
|
||||||
|
|
||||||
class WebhookSerializer(DynamicBaseSerializer):
|
class WebhookSerializer(DynamicBaseSerializer):
|
||||||
url = serializers.URLField(validators=[validate_schema, validate_domain])
|
url = serializers.URLField(validators=[validate_schema, validate_domain])
|
||||||
|
|
||||||
@ -21,32 +21,49 @@ class WebhookSerializer(DynamicBaseSerializer):
|
|||||||
# Extract the hostname from the URL
|
# Extract the hostname from the URL
|
||||||
hostname = urlparse(url).hostname
|
hostname = urlparse(url).hostname
|
||||||
if not hostname:
|
if not hostname:
|
||||||
raise serializers.ValidationError({"url": "Invalid URL: No hostname found."})
|
raise serializers.ValidationError(
|
||||||
|
{"url": "Invalid URL: No hostname found."}
|
||||||
|
)
|
||||||
|
|
||||||
# Resolve the hostname to IP addresses
|
# Resolve the hostname to IP addresses
|
||||||
try:
|
try:
|
||||||
ip_addresses = socket.getaddrinfo(hostname, None)
|
ip_addresses = socket.getaddrinfo(hostname, None)
|
||||||
except socket.gaierror:
|
except socket.gaierror:
|
||||||
raise serializers.ValidationError({"url": "Hostname could not be resolved."})
|
raise serializers.ValidationError(
|
||||||
|
{"url": "Hostname could not be resolved."}
|
||||||
|
)
|
||||||
|
|
||||||
if not ip_addresses:
|
if not ip_addresses:
|
||||||
raise serializers.ValidationError({"url": "No IP addresses found for the hostname."})
|
raise serializers.ValidationError(
|
||||||
|
{"url": "No IP addresses found for the hostname."}
|
||||||
|
)
|
||||||
|
|
||||||
for addr in ip_addresses:
|
for addr in ip_addresses:
|
||||||
ip = ipaddress.ip_address(addr[4][0])
|
ip = ipaddress.ip_address(addr[4][0])
|
||||||
if ip.is_private or ip.is_loopback:
|
if ip.is_private or ip.is_loopback:
|
||||||
raise serializers.ValidationError({"url": "URL resolves to a blocked IP address."})
|
raise serializers.ValidationError(
|
||||||
|
{"url": "URL resolves to a blocked IP address."}
|
||||||
|
)
|
||||||
|
|
||||||
# Additional validation for multiple request domains and their subdomains
|
# Additional validation for multiple request domains and their subdomains
|
||||||
request = self.context.get('request')
|
request = self.context.get("request")
|
||||||
disallowed_domains = ['plane.so',] # Add your disallowed domains here
|
disallowed_domains = [
|
||||||
|
"plane.so",
|
||||||
|
] # Add your disallowed domains here
|
||||||
if request:
|
if request:
|
||||||
request_host = request.get_host().split(':')[0] # Remove port if present
|
request_host = request.get_host().split(":")[
|
||||||
|
0
|
||||||
|
] # Remove port if present
|
||||||
disallowed_domains.append(request_host)
|
disallowed_domains.append(request_host)
|
||||||
|
|
||||||
# Check if hostname is a subdomain or exact match of any disallowed domain
|
# Check if hostname is a subdomain or exact match of any disallowed domain
|
||||||
if any(hostname == domain or hostname.endswith('.' + domain) for domain in disallowed_domains):
|
if any(
|
||||||
raise serializers.ValidationError({"url": "URL domain or its subdomain is not allowed."})
|
hostname == domain or hostname.endswith("." + domain)
|
||||||
|
for domain in disallowed_domains
|
||||||
|
):
|
||||||
|
raise serializers.ValidationError(
|
||||||
|
{"url": "URL domain or its subdomain is not allowed."}
|
||||||
|
)
|
||||||
|
|
||||||
return Webhook.objects.create(**validated_data)
|
return Webhook.objects.create(**validated_data)
|
||||||
|
|
||||||
@ -56,32 +73,49 @@ class WebhookSerializer(DynamicBaseSerializer):
|
|||||||
# Extract the hostname from the URL
|
# Extract the hostname from the URL
|
||||||
hostname = urlparse(url).hostname
|
hostname = urlparse(url).hostname
|
||||||
if not hostname:
|
if not hostname:
|
||||||
raise serializers.ValidationError({"url": "Invalid URL: No hostname found."})
|
raise serializers.ValidationError(
|
||||||
|
{"url": "Invalid URL: No hostname found."}
|
||||||
|
)
|
||||||
|
|
||||||
# Resolve the hostname to IP addresses
|
# Resolve the hostname to IP addresses
|
||||||
try:
|
try:
|
||||||
ip_addresses = socket.getaddrinfo(hostname, None)
|
ip_addresses = socket.getaddrinfo(hostname, None)
|
||||||
except socket.gaierror:
|
except socket.gaierror:
|
||||||
raise serializers.ValidationError({"url": "Hostname could not be resolved."})
|
raise serializers.ValidationError(
|
||||||
|
{"url": "Hostname could not be resolved."}
|
||||||
|
)
|
||||||
|
|
||||||
if not ip_addresses:
|
if not ip_addresses:
|
||||||
raise serializers.ValidationError({"url": "No IP addresses found for the hostname."})
|
raise serializers.ValidationError(
|
||||||
|
{"url": "No IP addresses found for the hostname."}
|
||||||
|
)
|
||||||
|
|
||||||
for addr in ip_addresses:
|
for addr in ip_addresses:
|
||||||
ip = ipaddress.ip_address(addr[4][0])
|
ip = ipaddress.ip_address(addr[4][0])
|
||||||
if ip.is_private or ip.is_loopback:
|
if ip.is_private or ip.is_loopback:
|
||||||
raise serializers.ValidationError({"url": "URL resolves to a blocked IP address."})
|
raise serializers.ValidationError(
|
||||||
|
{"url": "URL resolves to a blocked IP address."}
|
||||||
|
)
|
||||||
|
|
||||||
# Additional validation for multiple request domains and their subdomains
|
# Additional validation for multiple request domains and their subdomains
|
||||||
request = self.context.get('request')
|
request = self.context.get("request")
|
||||||
disallowed_domains = ['plane.so',] # Add your disallowed domains here
|
disallowed_domains = [
|
||||||
|
"plane.so",
|
||||||
|
] # Add your disallowed domains here
|
||||||
if request:
|
if request:
|
||||||
request_host = request.get_host().split(':')[0] # Remove port if present
|
request_host = request.get_host().split(":")[
|
||||||
|
0
|
||||||
|
] # Remove port if present
|
||||||
disallowed_domains.append(request_host)
|
disallowed_domains.append(request_host)
|
||||||
|
|
||||||
# Check if hostname is a subdomain or exact match of any disallowed domain
|
# Check if hostname is a subdomain or exact match of any disallowed domain
|
||||||
if any(hostname == domain or hostname.endswith('.' + domain) for domain in disallowed_domains):
|
if any(
|
||||||
raise serializers.ValidationError({"url": "URL domain or its subdomain is not allowed."})
|
hostname == domain or hostname.endswith("." + domain)
|
||||||
|
for domain in disallowed_domains
|
||||||
|
):
|
||||||
|
raise serializers.ValidationError(
|
||||||
|
{"url": "URL domain or its subdomain is not allowed."}
|
||||||
|
)
|
||||||
|
|
||||||
return super().update(instance, validated_data)
|
return super().update(instance, validated_data)
|
||||||
|
|
||||||
@ -95,12 +129,7 @@ class WebhookSerializer(DynamicBaseSerializer):
|
|||||||
|
|
||||||
|
|
||||||
class WebhookLogSerializer(DynamicBaseSerializer):
|
class WebhookLogSerializer(DynamicBaseSerializer):
|
||||||
|
|
||||||
class Meta:
|
class Meta:
|
||||||
model = WebhookLog
|
model = WebhookLog
|
||||||
fields = "__all__"
|
fields = "__all__"
|
||||||
read_only_fields = [
|
read_only_fields = ["workspace", "webhook"]
|
||||||
"workspace",
|
|
||||||
"webhook"
|
|
||||||
]
|
|
||||||
|
|
||||||
|
@ -2,7 +2,7 @@
|
|||||||
from rest_framework import serializers
|
from rest_framework import serializers
|
||||||
|
|
||||||
# Module imports
|
# Module imports
|
||||||
from .base import BaseSerializer
|
from .base import BaseSerializer, DynamicBaseSerializer
|
||||||
from .user import UserLiteSerializer, UserAdminLiteSerializer
|
from .user import UserLiteSerializer, UserAdminLiteSerializer
|
||||||
|
|
||||||
from plane.db.models import (
|
from plane.db.models import (
|
||||||
@ -13,10 +13,11 @@ from plane.db.models import (
|
|||||||
TeamMember,
|
TeamMember,
|
||||||
WorkspaceMemberInvite,
|
WorkspaceMemberInvite,
|
||||||
WorkspaceTheme,
|
WorkspaceTheme,
|
||||||
|
WorkspaceUserProperties,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
class WorkSpaceSerializer(BaseSerializer):
|
class WorkSpaceSerializer(DynamicBaseSerializer):
|
||||||
owner = UserLiteSerializer(read_only=True)
|
owner = UserLiteSerializer(read_only=True)
|
||||||
total_members = serializers.IntegerField(read_only=True)
|
total_members = serializers.IntegerField(read_only=True)
|
||||||
total_issues = serializers.IntegerField(read_only=True)
|
total_issues = serializers.IntegerField(read_only=True)
|
||||||
@ -50,6 +51,7 @@ class WorkSpaceSerializer(BaseSerializer):
|
|||||||
"owner",
|
"owner",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
|
||||||
class WorkspaceLiteSerializer(BaseSerializer):
|
class WorkspaceLiteSerializer(BaseSerializer):
|
||||||
class Meta:
|
class Meta:
|
||||||
model = Workspace
|
model = Workspace
|
||||||
@ -61,8 +63,7 @@ class WorkspaceLiteSerializer(BaseSerializer):
|
|||||||
read_only_fields = fields
|
read_only_fields = fields
|
||||||
|
|
||||||
|
|
||||||
|
class WorkSpaceMemberSerializer(DynamicBaseSerializer):
|
||||||
class WorkSpaceMemberSerializer(BaseSerializer):
|
|
||||||
member = UserLiteSerializer(read_only=True)
|
member = UserLiteSerializer(read_only=True)
|
||||||
workspace = WorkspaceLiteSerializer(read_only=True)
|
workspace = WorkspaceLiteSerializer(read_only=True)
|
||||||
|
|
||||||
@ -72,13 +73,12 @@ class WorkSpaceMemberSerializer(BaseSerializer):
|
|||||||
|
|
||||||
|
|
||||||
class WorkspaceMemberMeSerializer(BaseSerializer):
|
class WorkspaceMemberMeSerializer(BaseSerializer):
|
||||||
|
|
||||||
class Meta:
|
class Meta:
|
||||||
model = WorkspaceMember
|
model = WorkspaceMember
|
||||||
fields = "__all__"
|
fields = "__all__"
|
||||||
|
|
||||||
|
|
||||||
class WorkspaceMemberAdminSerializer(BaseSerializer):
|
class WorkspaceMemberAdminSerializer(DynamicBaseSerializer):
|
||||||
member = UserAdminLiteSerializer(read_only=True)
|
member = UserAdminLiteSerializer(read_only=True)
|
||||||
workspace = WorkspaceLiteSerializer(read_only=True)
|
workspace = WorkspaceLiteSerializer(read_only=True)
|
||||||
|
|
||||||
@ -108,7 +108,9 @@ class WorkSpaceMemberInviteSerializer(BaseSerializer):
|
|||||||
|
|
||||||
|
|
||||||
class TeamSerializer(BaseSerializer):
|
class TeamSerializer(BaseSerializer):
|
||||||
members_detail = UserLiteSerializer(read_only=True, source="members", many=True)
|
members_detail = UserLiteSerializer(
|
||||||
|
read_only=True, source="members", many=True
|
||||||
|
)
|
||||||
members = serializers.ListField(
|
members = serializers.ListField(
|
||||||
child=serializers.PrimaryKeyRelatedField(queryset=User.objects.all()),
|
child=serializers.PrimaryKeyRelatedField(queryset=User.objects.all()),
|
||||||
write_only=True,
|
write_only=True,
|
||||||
@ -145,7 +147,9 @@ class TeamSerializer(BaseSerializer):
|
|||||||
members = validated_data.pop("members")
|
members = validated_data.pop("members")
|
||||||
TeamMember.objects.filter(team=instance).delete()
|
TeamMember.objects.filter(team=instance).delete()
|
||||||
team_members = [
|
team_members = [
|
||||||
TeamMember(member=member, team=instance, workspace=instance.workspace)
|
TeamMember(
|
||||||
|
member=member, team=instance, workspace=instance.workspace
|
||||||
|
)
|
||||||
for member in members
|
for member in members
|
||||||
]
|
]
|
||||||
TeamMember.objects.bulk_create(team_members, batch_size=10)
|
TeamMember.objects.bulk_create(team_members, batch_size=10)
|
||||||
@ -161,3 +165,13 @@ class WorkspaceThemeSerializer(BaseSerializer):
|
|||||||
"workspace",
|
"workspace",
|
||||||
"actor",
|
"actor",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
|
||||||
|
class WorkspaceUserPropertiesSerializer(BaseSerializer):
|
||||||
|
class Meta:
|
||||||
|
model = WorkspaceUserProperties
|
||||||
|
fields = "__all__"
|
||||||
|
read_only_fields = [
|
||||||
|
"workspace",
|
||||||
|
"user",
|
||||||
|
]
|
||||||
|
@ -3,11 +3,10 @@ from .asset import urlpatterns as asset_urls
|
|||||||
from .authentication import urlpatterns as authentication_urls
|
from .authentication import urlpatterns as authentication_urls
|
||||||
from .config import urlpatterns as configuration_urls
|
from .config import urlpatterns as configuration_urls
|
||||||
from .cycle import urlpatterns as cycle_urls
|
from .cycle import urlpatterns as cycle_urls
|
||||||
|
from .dashboard import urlpatterns as dashboard_urls
|
||||||
from .estimate import urlpatterns as estimate_urls
|
from .estimate import urlpatterns as estimate_urls
|
||||||
from .external import urlpatterns as external_urls
|
from .external import urlpatterns as external_urls
|
||||||
from .importer import urlpatterns as importer_urls
|
|
||||||
from .inbox import urlpatterns as inbox_urls
|
from .inbox import urlpatterns as inbox_urls
|
||||||
from .integration import urlpatterns as integration_urls
|
|
||||||
from .issue import urlpatterns as issue_urls
|
from .issue import urlpatterns as issue_urls
|
||||||
from .module import urlpatterns as module_urls
|
from .module import urlpatterns as module_urls
|
||||||
from .notification import urlpatterns as notification_urls
|
from .notification import urlpatterns as notification_urls
|
||||||
@ -28,11 +27,10 @@ urlpatterns = [
|
|||||||
*authentication_urls,
|
*authentication_urls,
|
||||||
*configuration_urls,
|
*configuration_urls,
|
||||||
*cycle_urls,
|
*cycle_urls,
|
||||||
|
*dashboard_urls,
|
||||||
*estimate_urls,
|
*estimate_urls,
|
||||||
*external_urls,
|
*external_urls,
|
||||||
*importer_urls,
|
|
||||||
*inbox_urls,
|
*inbox_urls,
|
||||||
*integration_urls,
|
|
||||||
*issue_urls,
|
*issue_urls,
|
||||||
*module_urls,
|
*module_urls,
|
||||||
*notification_urls,
|
*notification_urls,
|
||||||
|
@ -31,8 +31,14 @@ urlpatterns = [
|
|||||||
path("sign-in/", SignInEndpoint.as_view(), name="sign-in"),
|
path("sign-in/", SignInEndpoint.as_view(), name="sign-in"),
|
||||||
path("sign-out/", SignOutEndpoint.as_view(), name="sign-out"),
|
path("sign-out/", SignOutEndpoint.as_view(), name="sign-out"),
|
||||||
# magic sign in
|
# magic sign in
|
||||||
path("magic-generate/", MagicGenerateEndpoint.as_view(), name="magic-generate"),
|
path(
|
||||||
path("magic-sign-in/", MagicSignInEndpoint.as_view(), name="magic-sign-in"),
|
"magic-generate/",
|
||||||
|
MagicGenerateEndpoint.as_view(),
|
||||||
|
name="magic-generate",
|
||||||
|
),
|
||||||
|
path(
|
||||||
|
"magic-sign-in/", MagicSignInEndpoint.as_view(), name="magic-sign-in"
|
||||||
|
),
|
||||||
path("token/refresh/", TokenRefreshView.as_view(), name="token_refresh"),
|
path("token/refresh/", TokenRefreshView.as_view(), name="token_refresh"),
|
||||||
# Password Manipulation
|
# Password Manipulation
|
||||||
path(
|
path(
|
||||||
@ -52,6 +58,8 @@ urlpatterns = [
|
|||||||
),
|
),
|
||||||
# API Tokens
|
# API Tokens
|
||||||
path("api-tokens/", ApiTokenEndpoint.as_view(), name="api-tokens"),
|
path("api-tokens/", ApiTokenEndpoint.as_view(), name="api-tokens"),
|
||||||
path("api-tokens/<uuid:pk>/", ApiTokenEndpoint.as_view(), name="api-tokens"),
|
path(
|
||||||
|
"api-tokens/<uuid:pk>/", ApiTokenEndpoint.as_view(), name="api-tokens"
|
||||||
|
),
|
||||||
## End API Tokens
|
## End API Tokens
|
||||||
]
|
]
|
||||||
|
@ -1,7 +1,7 @@
|
|||||||
from django.urls import path
|
from django.urls import path
|
||||||
|
|
||||||
|
|
||||||
from plane.app.views import ConfigurationEndpoint
|
from plane.app.views import ConfigurationEndpoint, MobileConfigurationEndpoint
|
||||||
|
|
||||||
urlpatterns = [
|
urlpatterns = [
|
||||||
path(
|
path(
|
||||||
@ -9,4 +9,9 @@ urlpatterns = [
|
|||||||
ConfigurationEndpoint.as_view(),
|
ConfigurationEndpoint.as_view(),
|
||||||
name="configuration",
|
name="configuration",
|
||||||
),
|
),
|
||||||
|
path(
|
||||||
|
"mobile-configs/",
|
||||||
|
MobileConfigurationEndpoint.as_view(),
|
||||||
|
name="configuration",
|
||||||
|
),
|
||||||
]
|
]
|
@ -7,6 +7,8 @@ from plane.app.views import (
|
|||||||
CycleDateCheckEndpoint,
|
CycleDateCheckEndpoint,
|
||||||
CycleFavoriteViewSet,
|
CycleFavoriteViewSet,
|
||||||
TransferCycleIssueEndpoint,
|
TransferCycleIssueEndpoint,
|
||||||
|
CycleUserPropertiesEndpoint,
|
||||||
|
CycleArchiveUnarchiveEndpoint,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
@ -44,7 +46,7 @@ urlpatterns = [
|
|||||||
name="project-issue-cycle",
|
name="project-issue-cycle",
|
||||||
),
|
),
|
||||||
path(
|
path(
|
||||||
"workspaces/<str:slug>/projects/<uuid:project_id>/cycles/<uuid:cycle_id>/cycle-issues/<uuid:pk>/",
|
"workspaces/<str:slug>/projects/<uuid:project_id>/cycles/<uuid:cycle_id>/cycle-issues/<uuid:issue_id>/",
|
||||||
CycleIssueViewSet.as_view(
|
CycleIssueViewSet.as_view(
|
||||||
{
|
{
|
||||||
"get": "retrieve",
|
"get": "retrieve",
|
||||||
@ -84,4 +86,19 @@ urlpatterns = [
|
|||||||
TransferCycleIssueEndpoint.as_view(),
|
TransferCycleIssueEndpoint.as_view(),
|
||||||
name="transfer-issues",
|
name="transfer-issues",
|
||||||
),
|
),
|
||||||
|
path(
|
||||||
|
"workspaces/<str:slug>/projects/<uuid:project_id>/cycles/<uuid:cycle_id>/user-properties/",
|
||||||
|
CycleUserPropertiesEndpoint.as_view(),
|
||||||
|
name="cycle-user-filters",
|
||||||
|
),
|
||||||
|
path(
|
||||||
|
"workspaces/<str:slug>/projects/<uuid:project_id>/cycles/<uuid:cycle_id>/archive/",
|
||||||
|
CycleArchiveUnarchiveEndpoint.as_view(),
|
||||||
|
name="cycle-archive-unarchive",
|
||||||
|
),
|
||||||
|
path(
|
||||||
|
"workspaces/<str:slug>/projects/<uuid:project_id>/archived-cycles/",
|
||||||
|
CycleArchiveUnarchiveEndpoint.as_view(),
|
||||||
|
name="cycle-archive-unarchive",
|
||||||
|
),
|
||||||
]
|
]
|
||||||
|
23
apiserver/plane/app/urls/dashboard.py
Normal file
23
apiserver/plane/app/urls/dashboard.py
Normal file
@ -0,0 +1,23 @@
|
|||||||
|
from django.urls import path
|
||||||
|
|
||||||
|
|
||||||
|
from plane.app.views import DashboardEndpoint, WidgetsEndpoint
|
||||||
|
|
||||||
|
|
||||||
|
urlpatterns = [
|
||||||
|
path(
|
||||||
|
"workspaces/<str:slug>/dashboard/",
|
||||||
|
DashboardEndpoint.as_view(),
|
||||||
|
name="dashboard",
|
||||||
|
),
|
||||||
|
path(
|
||||||
|
"workspaces/<str:slug>/dashboard/<uuid:dashboard_id>/",
|
||||||
|
DashboardEndpoint.as_view(),
|
||||||
|
name="dashboard",
|
||||||
|
),
|
||||||
|
path(
|
||||||
|
"dashboard/<uuid:dashboard_id>/widgets/<uuid:widget_id>/",
|
||||||
|
WidgetsEndpoint.as_view(),
|
||||||
|
name="widgets",
|
||||||
|
),
|
||||||
|
]
|
@ -2,7 +2,6 @@ from django.urls import path
|
|||||||
|
|
||||||
|
|
||||||
from plane.app.views import UnsplashEndpoint
|
from plane.app.views import UnsplashEndpoint
|
||||||
from plane.app.views import ReleaseNotesEndpoint
|
|
||||||
from plane.app.views import GPTIntegrationEndpoint
|
from plane.app.views import GPTIntegrationEndpoint
|
||||||
|
|
||||||
|
|
||||||
@ -12,11 +11,6 @@ urlpatterns = [
|
|||||||
UnsplashEndpoint.as_view(),
|
UnsplashEndpoint.as_view(),
|
||||||
name="unsplash",
|
name="unsplash",
|
||||||
),
|
),
|
||||||
path(
|
|
||||||
"release-notes/",
|
|
||||||
ReleaseNotesEndpoint.as_view(),
|
|
||||||
name="release-notes",
|
|
||||||
),
|
|
||||||
path(
|
path(
|
||||||
"workspaces/<str:slug>/projects/<uuid:project_id>/ai-assistant/",
|
"workspaces/<str:slug>/projects/<uuid:project_id>/ai-assistant/",
|
||||||
GPTIntegrationEndpoint.as_view(),
|
GPTIntegrationEndpoint.as_view(),
|
||||||
|
@ -1,37 +0,0 @@
|
|||||||
from django.urls import path
|
|
||||||
|
|
||||||
|
|
||||||
from plane.app.views import (
|
|
||||||
ServiceIssueImportSummaryEndpoint,
|
|
||||||
ImportServiceEndpoint,
|
|
||||||
UpdateServiceImportStatusEndpoint,
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
urlpatterns = [
|
|
||||||
path(
|
|
||||||
"workspaces/<str:slug>/importers/<str:service>/",
|
|
||||||
ServiceIssueImportSummaryEndpoint.as_view(),
|
|
||||||
name="importer-summary",
|
|
||||||
),
|
|
||||||
path(
|
|
||||||
"workspaces/<str:slug>/projects/importers/<str:service>/",
|
|
||||||
ImportServiceEndpoint.as_view(),
|
|
||||||
name="importer",
|
|
||||||
),
|
|
||||||
path(
|
|
||||||
"workspaces/<str:slug>/importers/",
|
|
||||||
ImportServiceEndpoint.as_view(),
|
|
||||||
name="importer",
|
|
||||||
),
|
|
||||||
path(
|
|
||||||
"workspaces/<str:slug>/importers/<str:service>/<uuid:pk>/",
|
|
||||||
ImportServiceEndpoint.as_view(),
|
|
||||||
name="importer",
|
|
||||||
),
|
|
||||||
path(
|
|
||||||
"workspaces/<str:slug>/projects/<uuid:project_id>/service/<str:service>/importers/<uuid:importer_id>/",
|
|
||||||
UpdateServiceImportStatusEndpoint.as_view(),
|
|
||||||
name="importer-status",
|
|
||||||
),
|
|
||||||
]
|
|
@ -30,7 +30,7 @@ urlpatterns = [
|
|||||||
name="inbox",
|
name="inbox",
|
||||||
),
|
),
|
||||||
path(
|
path(
|
||||||
"workspaces/<str:slug>/projects/<uuid:project_id>/inboxes/<uuid:inbox_id>/inbox-issues/",
|
"workspaces/<str:slug>/projects/<uuid:project_id>/inbox-issues/",
|
||||||
InboxIssueViewSet.as_view(
|
InboxIssueViewSet.as_view(
|
||||||
{
|
{
|
||||||
"get": "list",
|
"get": "list",
|
||||||
@ -40,7 +40,7 @@ urlpatterns = [
|
|||||||
name="inbox-issue",
|
name="inbox-issue",
|
||||||
),
|
),
|
||||||
path(
|
path(
|
||||||
"workspaces/<str:slug>/projects/<uuid:project_id>/inboxes/<uuid:inbox_id>/inbox-issues/<uuid:pk>/",
|
"workspaces/<str:slug>/projects/<uuid:project_id>/inbox-issues/<uuid:issue_id>/",
|
||||||
InboxIssueViewSet.as_view(
|
InboxIssueViewSet.as_view(
|
||||||
{
|
{
|
||||||
"get": "retrieve",
|
"get": "retrieve",
|
||||||
|
@ -1,150 +0,0 @@
|
|||||||
from django.urls import path
|
|
||||||
|
|
||||||
|
|
||||||
from plane.app.views import (
|
|
||||||
IntegrationViewSet,
|
|
||||||
WorkspaceIntegrationViewSet,
|
|
||||||
GithubRepositoriesEndpoint,
|
|
||||||
GithubRepositorySyncViewSet,
|
|
||||||
GithubIssueSyncViewSet,
|
|
||||||
GithubCommentSyncViewSet,
|
|
||||||
BulkCreateGithubIssueSyncEndpoint,
|
|
||||||
SlackProjectSyncViewSet,
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
urlpatterns = [
|
|
||||||
path(
|
|
||||||
"integrations/",
|
|
||||||
IntegrationViewSet.as_view(
|
|
||||||
{
|
|
||||||
"get": "list",
|
|
||||||
"post": "create",
|
|
||||||
}
|
|
||||||
),
|
|
||||||
name="integrations",
|
|
||||||
),
|
|
||||||
path(
|
|
||||||
"integrations/<uuid:pk>/",
|
|
||||||
IntegrationViewSet.as_view(
|
|
||||||
{
|
|
||||||
"get": "retrieve",
|
|
||||||
"patch": "partial_update",
|
|
||||||
"delete": "destroy",
|
|
||||||
}
|
|
||||||
),
|
|
||||||
name="integrations",
|
|
||||||
),
|
|
||||||
path(
|
|
||||||
"workspaces/<str:slug>/workspace-integrations/",
|
|
||||||
WorkspaceIntegrationViewSet.as_view(
|
|
||||||
{
|
|
||||||
"get": "list",
|
|
||||||
}
|
|
||||||
),
|
|
||||||
name="workspace-integrations",
|
|
||||||
),
|
|
||||||
path(
|
|
||||||
"workspaces/<str:slug>/workspace-integrations/<str:provider>/",
|
|
||||||
WorkspaceIntegrationViewSet.as_view(
|
|
||||||
{
|
|
||||||
"post": "create",
|
|
||||||
}
|
|
||||||
),
|
|
||||||
name="workspace-integrations",
|
|
||||||
),
|
|
||||||
path(
|
|
||||||
"workspaces/<str:slug>/workspace-integrations/<uuid:pk>/provider/",
|
|
||||||
WorkspaceIntegrationViewSet.as_view(
|
|
||||||
{
|
|
||||||
"get": "retrieve",
|
|
||||||
"delete": "destroy",
|
|
||||||
}
|
|
||||||
),
|
|
||||||
name="workspace-integrations",
|
|
||||||
),
|
|
||||||
# Github Integrations
|
|
||||||
path(
|
|
||||||
"workspaces/<str:slug>/workspace-integrations/<uuid:workspace_integration_id>/github-repositories/",
|
|
||||||
GithubRepositoriesEndpoint.as_view(),
|
|
||||||
),
|
|
||||||
path(
|
|
||||||
"workspaces/<str:slug>/projects/<uuid:project_id>/workspace-integrations/<uuid:workspace_integration_id>/github-repository-sync/",
|
|
||||||
GithubRepositorySyncViewSet.as_view(
|
|
||||||
{
|
|
||||||
"get": "list",
|
|
||||||
"post": "create",
|
|
||||||
}
|
|
||||||
),
|
|
||||||
),
|
|
||||||
path(
|
|
||||||
"workspaces/<str:slug>/projects/<uuid:project_id>/workspace-integrations/<uuid:workspace_integration_id>/github-repository-sync/<uuid:pk>/",
|
|
||||||
GithubRepositorySyncViewSet.as_view(
|
|
||||||
{
|
|
||||||
"get": "retrieve",
|
|
||||||
"delete": "destroy",
|
|
||||||
}
|
|
||||||
),
|
|
||||||
),
|
|
||||||
path(
|
|
||||||
"workspaces/<str:slug>/projects/<uuid:project_id>/github-repository-sync/<uuid:repo_sync_id>/github-issue-sync/",
|
|
||||||
GithubIssueSyncViewSet.as_view(
|
|
||||||
{
|
|
||||||
"post": "create",
|
|
||||||
"get": "list",
|
|
||||||
}
|
|
||||||
),
|
|
||||||
),
|
|
||||||
path(
|
|
||||||
"workspaces/<str:slug>/projects/<uuid:project_id>/github-repository-sync/<uuid:repo_sync_id>/bulk-create-github-issue-sync/",
|
|
||||||
BulkCreateGithubIssueSyncEndpoint.as_view(),
|
|
||||||
),
|
|
||||||
path(
|
|
||||||
"workspaces/<str:slug>/projects/<uuid:project_id>/github-repository-sync/<uuid:repo_sync_id>/github-issue-sync/<uuid:pk>/",
|
|
||||||
GithubIssueSyncViewSet.as_view(
|
|
||||||
{
|
|
||||||
"get": "retrieve",
|
|
||||||
"delete": "destroy",
|
|
||||||
}
|
|
||||||
),
|
|
||||||
),
|
|
||||||
path(
|
|
||||||
"workspaces/<str:slug>/projects/<uuid:project_id>/github-repository-sync/<uuid:repo_sync_id>/github-issue-sync/<uuid:issue_sync_id>/github-comment-sync/",
|
|
||||||
GithubCommentSyncViewSet.as_view(
|
|
||||||
{
|
|
||||||
"post": "create",
|
|
||||||
"get": "list",
|
|
||||||
}
|
|
||||||
),
|
|
||||||
),
|
|
||||||
path(
|
|
||||||
"workspaces/<str:slug>/projects/<uuid:project_id>/github-repository-sync/<uuid:repo_sync_id>/github-issue-sync/<uuid:issue_sync_id>/github-comment-sync/<uuid:pk>/",
|
|
||||||
GithubCommentSyncViewSet.as_view(
|
|
||||||
{
|
|
||||||
"get": "retrieve",
|
|
||||||
"delete": "destroy",
|
|
||||||
}
|
|
||||||
),
|
|
||||||
),
|
|
||||||
## End Github Integrations
|
|
||||||
# Slack Integration
|
|
||||||
path(
|
|
||||||
"workspaces/<str:slug>/projects/<uuid:project_id>/workspace-integrations/<uuid:workspace_integration_id>/project-slack-sync/",
|
|
||||||
SlackProjectSyncViewSet.as_view(
|
|
||||||
{
|
|
||||||
"post": "create",
|
|
||||||
"get": "list",
|
|
||||||
}
|
|
||||||
),
|
|
||||||
),
|
|
||||||
path(
|
|
||||||
"workspaces/<str:slug>/projects/<uuid:project_id>/workspace-integrations/<uuid:workspace_integration_id>/project-slack-sync/<uuid:pk>/",
|
|
||||||
SlackProjectSyncViewSet.as_view(
|
|
||||||
{
|
|
||||||
"delete": "destroy",
|
|
||||||
"get": "retrieve",
|
|
||||||
}
|
|
||||||
),
|
|
||||||
),
|
|
||||||
## End Slack Integration
|
|
||||||
]
|
|
@ -1,30 +1,32 @@
|
|||||||
from django.urls import path
|
from django.urls import path
|
||||||
|
|
||||||
|
|
||||||
from plane.app.views import (
|
from plane.app.views import (
|
||||||
IssueViewSet,
|
|
||||||
LabelViewSet,
|
|
||||||
BulkCreateIssueLabelsEndpoint,
|
BulkCreateIssueLabelsEndpoint,
|
||||||
BulkDeleteIssuesEndpoint,
|
BulkDeleteIssuesEndpoint,
|
||||||
BulkImportIssuesEndpoint,
|
|
||||||
UserWorkSpaceIssues,
|
|
||||||
SubIssuesEndpoint,
|
SubIssuesEndpoint,
|
||||||
IssueLinkViewSet,
|
IssueLinkViewSet,
|
||||||
IssueAttachmentEndpoint,
|
IssueAttachmentEndpoint,
|
||||||
|
CommentReactionViewSet,
|
||||||
ExportIssuesEndpoint,
|
ExportIssuesEndpoint,
|
||||||
IssueActivityEndpoint,
|
IssueActivityEndpoint,
|
||||||
IssueCommentViewSet,
|
|
||||||
IssueSubscriberViewSet,
|
|
||||||
IssueReactionViewSet,
|
|
||||||
CommentReactionViewSet,
|
|
||||||
IssueUserDisplayPropertyEndpoint,
|
|
||||||
IssueArchiveViewSet,
|
IssueArchiveViewSet,
|
||||||
IssueRelationViewSet,
|
IssueCommentViewSet,
|
||||||
IssueDraftViewSet,
|
IssueDraftViewSet,
|
||||||
|
IssueListEndpoint,
|
||||||
|
IssueReactionViewSet,
|
||||||
|
IssueRelationViewSet,
|
||||||
|
IssueSubscriberViewSet,
|
||||||
|
IssueUserDisplayPropertyEndpoint,
|
||||||
|
IssueViewSet,
|
||||||
|
LabelViewSet,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
urlpatterns = [
|
urlpatterns = [
|
||||||
|
path(
|
||||||
|
"workspaces/<str:slug>/projects/<uuid:project_id>/issues/list/",
|
||||||
|
IssueListEndpoint.as_view(),
|
||||||
|
name="project-issue",
|
||||||
|
),
|
||||||
path(
|
path(
|
||||||
"workspaces/<str:slug>/projects/<uuid:project_id>/issues/",
|
"workspaces/<str:slug>/projects/<uuid:project_id>/issues/",
|
||||||
IssueViewSet.as_view(
|
IssueViewSet.as_view(
|
||||||
@ -79,16 +81,7 @@ urlpatterns = [
|
|||||||
BulkDeleteIssuesEndpoint.as_view(),
|
BulkDeleteIssuesEndpoint.as_view(),
|
||||||
name="project-issues-bulk",
|
name="project-issues-bulk",
|
||||||
),
|
),
|
||||||
path(
|
##
|
||||||
"workspaces/<str:slug>/projects/<uuid:project_id>/bulk-import-issues/<str:service>/",
|
|
||||||
BulkImportIssuesEndpoint.as_view(),
|
|
||||||
name="project-issues-bulk",
|
|
||||||
),
|
|
||||||
path(
|
|
||||||
"workspaces/<str:slug>/my-issues/",
|
|
||||||
UserWorkSpaceIssues.as_view(),
|
|
||||||
name="workspace-issues",
|
|
||||||
),
|
|
||||||
path(
|
path(
|
||||||
"workspaces/<str:slug>/projects/<uuid:project_id>/issues/<uuid:issue_id>/sub-issues/",
|
"workspaces/<str:slug>/projects/<uuid:project_id>/issues/<uuid:issue_id>/sub-issues/",
|
||||||
SubIssuesEndpoint.as_view(),
|
SubIssuesEndpoint.as_view(),
|
||||||
@ -235,7 +228,7 @@ urlpatterns = [
|
|||||||
## End Comment Reactions
|
## End Comment Reactions
|
||||||
## IssueProperty
|
## IssueProperty
|
||||||
path(
|
path(
|
||||||
"workspaces/<str:slug>/projects/<uuid:project_id>/issue-display-properties/",
|
"workspaces/<str:slug>/projects/<uuid:project_id>/user-properties/",
|
||||||
IssueUserDisplayPropertyEndpoint.as_view(),
|
IssueUserDisplayPropertyEndpoint.as_view(),
|
||||||
name="project-issue-display-properties",
|
name="project-issue-display-properties",
|
||||||
),
|
),
|
||||||
@ -251,23 +244,15 @@ urlpatterns = [
|
|||||||
name="project-issue-archive",
|
name="project-issue-archive",
|
||||||
),
|
),
|
||||||
path(
|
path(
|
||||||
"workspaces/<str:slug>/projects/<uuid:project_id>/archived-issues/<uuid:pk>/",
|
"workspaces/<str:slug>/projects/<uuid:project_id>/issues/<uuid:pk>/archive/",
|
||||||
IssueArchiveViewSet.as_view(
|
IssueArchiveViewSet.as_view(
|
||||||
{
|
{
|
||||||
"get": "retrieve",
|
"get": "retrieve",
|
||||||
"delete": "destroy",
|
"post": "archive",
|
||||||
|
"delete": "unarchive",
|
||||||
}
|
}
|
||||||
),
|
),
|
||||||
name="project-issue-archive",
|
name="project-issue-archive-unarchive",
|
||||||
),
|
|
||||||
path(
|
|
||||||
"workspaces/<str:slug>/projects/<uuid:project_id>/unarchive/<uuid:pk>/",
|
|
||||||
IssueArchiveViewSet.as_view(
|
|
||||||
{
|
|
||||||
"post": "unarchive",
|
|
||||||
}
|
|
||||||
),
|
|
||||||
name="project-issue-archive",
|
|
||||||
),
|
),
|
||||||
## End Issue Archives
|
## End Issue Archives
|
||||||
## Issue Relation
|
## Issue Relation
|
||||||
@ -275,16 +260,17 @@ urlpatterns = [
|
|||||||
"workspaces/<str:slug>/projects/<uuid:project_id>/issues/<uuid:issue_id>/issue-relation/",
|
"workspaces/<str:slug>/projects/<uuid:project_id>/issues/<uuid:issue_id>/issue-relation/",
|
||||||
IssueRelationViewSet.as_view(
|
IssueRelationViewSet.as_view(
|
||||||
{
|
{
|
||||||
|
"get": "list",
|
||||||
"post": "create",
|
"post": "create",
|
||||||
}
|
}
|
||||||
),
|
),
|
||||||
name="issue-relation",
|
name="issue-relation",
|
||||||
),
|
),
|
||||||
path(
|
path(
|
||||||
"workspaces/<str:slug>/projects/<uuid:project_id>/issues/<uuid:issue_id>/issue-relation/<uuid:pk>/",
|
"workspaces/<str:slug>/projects/<uuid:project_id>/issues/<uuid:issue_id>/remove-relation/",
|
||||||
IssueRelationViewSet.as_view(
|
IssueRelationViewSet.as_view(
|
||||||
{
|
{
|
||||||
"delete": "destroy",
|
"post": "remove_relation",
|
||||||
}
|
}
|
||||||
),
|
),
|
||||||
name="issue-relation",
|
name="issue-relation",
|
||||||
|
@ -6,7 +6,8 @@ from plane.app.views import (
|
|||||||
ModuleIssueViewSet,
|
ModuleIssueViewSet,
|
||||||
ModuleLinkViewSet,
|
ModuleLinkViewSet,
|
||||||
ModuleFavoriteViewSet,
|
ModuleFavoriteViewSet,
|
||||||
BulkImportModulesEndpoint,
|
ModuleUserPropertiesEndpoint,
|
||||||
|
ModuleArchiveUnarchiveEndpoint,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
@ -34,17 +35,26 @@ urlpatterns = [
|
|||||||
name="project-modules",
|
name="project-modules",
|
||||||
),
|
),
|
||||||
path(
|
path(
|
||||||
"workspaces/<str:slug>/projects/<uuid:project_id>/modules/<uuid:module_id>/module-issues/",
|
"workspaces/<str:slug>/projects/<uuid:project_id>/issues/<uuid:issue_id>/modules/",
|
||||||
ModuleIssueViewSet.as_view(
|
ModuleIssueViewSet.as_view(
|
||||||
{
|
{
|
||||||
|
"post": "create_issue_modules",
|
||||||
|
}
|
||||||
|
),
|
||||||
|
name="issue-module",
|
||||||
|
),
|
||||||
|
path(
|
||||||
|
"workspaces/<str:slug>/projects/<uuid:project_id>/modules/<uuid:module_id>/issues/",
|
||||||
|
ModuleIssueViewSet.as_view(
|
||||||
|
{
|
||||||
|
"post": "create_module_issues",
|
||||||
"get": "list",
|
"get": "list",
|
||||||
"post": "create",
|
|
||||||
}
|
}
|
||||||
),
|
),
|
||||||
name="project-module-issues",
|
name="project-module-issues",
|
||||||
),
|
),
|
||||||
path(
|
path(
|
||||||
"workspaces/<str:slug>/projects/<uuid:project_id>/modules/<uuid:module_id>/module-issues/<uuid:pk>/",
|
"workspaces/<str:slug>/projects/<uuid:project_id>/modules/<uuid:module_id>/issues/<uuid:issue_id>/",
|
||||||
ModuleIssueViewSet.as_view(
|
ModuleIssueViewSet.as_view(
|
||||||
{
|
{
|
||||||
"get": "retrieve",
|
"get": "retrieve",
|
||||||
@ -97,8 +107,18 @@ urlpatterns = [
|
|||||||
name="user-favorite-module",
|
name="user-favorite-module",
|
||||||
),
|
),
|
||||||
path(
|
path(
|
||||||
"workspaces/<str:slug>/projects/<uuid:project_id>/bulk-import-modules/<str:service>/",
|
"workspaces/<str:slug>/projects/<uuid:project_id>/modules/<uuid:module_id>/user-properties/",
|
||||||
BulkImportModulesEndpoint.as_view(),
|
ModuleUserPropertiesEndpoint.as_view(),
|
||||||
name="bulk-modules-create",
|
name="cycle-user-filters",
|
||||||
|
),
|
||||||
|
path(
|
||||||
|
"workspaces/<str:slug>/projects/<uuid:project_id>/modules/<uuid:module_id>/archive/",
|
||||||
|
ModuleArchiveUnarchiveEndpoint.as_view(),
|
||||||
|
name="module-archive-unarchive",
|
||||||
|
),
|
||||||
|
path(
|
||||||
|
"workspaces/<str:slug>/projects/<uuid:project_id>/archived-modules/",
|
||||||
|
ModuleArchiveUnarchiveEndpoint.as_view(),
|
||||||
|
name="module-archive-unarchive",
|
||||||
),
|
),
|
||||||
]
|
]
|
||||||
|
@ -5,6 +5,7 @@ from plane.app.views import (
|
|||||||
NotificationViewSet,
|
NotificationViewSet,
|
||||||
UnreadNotificationEndpoint,
|
UnreadNotificationEndpoint,
|
||||||
MarkAllReadNotificationViewSet,
|
MarkAllReadNotificationViewSet,
|
||||||
|
UserNotificationPreferenceEndpoint,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
@ -63,4 +64,9 @@ urlpatterns = [
|
|||||||
),
|
),
|
||||||
name="mark-all-read-notifications",
|
name="mark-all-read-notifications",
|
||||||
),
|
),
|
||||||
|
path(
|
||||||
|
"users/me/notification-preferences/",
|
||||||
|
UserNotificationPreferenceEndpoint.as_view(),
|
||||||
|
name="user-notification-preferences",
|
||||||
|
),
|
||||||
]
|
]
|
||||||
|
@ -14,6 +14,7 @@ from plane.app.views import (
|
|||||||
ProjectPublicCoverImagesEndpoint,
|
ProjectPublicCoverImagesEndpoint,
|
||||||
ProjectDeployBoardViewSet,
|
ProjectDeployBoardViewSet,
|
||||||
UserProjectRolesEndpoint,
|
UserProjectRolesEndpoint,
|
||||||
|
ProjectArchiveUnarchiveEndpoint,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
@ -175,4 +176,9 @@ urlpatterns = [
|
|||||||
),
|
),
|
||||||
name="project-deploy-board",
|
name="project-deploy-board",
|
||||||
),
|
),
|
||||||
|
path(
|
||||||
|
"workspaces/<str:slug>/projects/<uuid:project_id>/archive/",
|
||||||
|
ProjectArchiveUnarchiveEndpoint.as_view(),
|
||||||
|
name="project-archive-unarchive",
|
||||||
|
),
|
||||||
]
|
]
|
@ -18,6 +18,13 @@ from plane.app.views import (
|
|||||||
WorkspaceUserProfileEndpoint,
|
WorkspaceUserProfileEndpoint,
|
||||||
WorkspaceUserProfileIssuesEndpoint,
|
WorkspaceUserProfileIssuesEndpoint,
|
||||||
WorkspaceLabelsEndpoint,
|
WorkspaceLabelsEndpoint,
|
||||||
|
WorkspaceProjectMemberEndpoint,
|
||||||
|
WorkspaceUserPropertiesEndpoint,
|
||||||
|
WorkspaceStatesEndpoint,
|
||||||
|
WorkspaceEstimatesEndpoint,
|
||||||
|
ExportWorkspaceUserActivityEndpoint,
|
||||||
|
WorkspaceModulesEndpoint,
|
||||||
|
WorkspaceCyclesEndpoint,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
@ -92,6 +99,11 @@ urlpatterns = [
|
|||||||
WorkSpaceMemberViewSet.as_view({"get": "list"}),
|
WorkSpaceMemberViewSet.as_view({"get": "list"}),
|
||||||
name="workspace-member",
|
name="workspace-member",
|
||||||
),
|
),
|
||||||
|
path(
|
||||||
|
"workspaces/<str:slug>/project-members/",
|
||||||
|
WorkspaceProjectMemberEndpoint.as_view(),
|
||||||
|
name="workspace-member-roles",
|
||||||
|
),
|
||||||
path(
|
path(
|
||||||
"workspaces/<str:slug>/members/<uuid:pk>/",
|
"workspaces/<str:slug>/members/<uuid:pk>/",
|
||||||
WorkSpaceMemberViewSet.as_view(
|
WorkSpaceMemberViewSet.as_view(
|
||||||
@ -180,6 +192,11 @@ urlpatterns = [
|
|||||||
WorkspaceUserActivityEndpoint.as_view(),
|
WorkspaceUserActivityEndpoint.as_view(),
|
||||||
name="workspace-user-activity",
|
name="workspace-user-activity",
|
||||||
),
|
),
|
||||||
|
path(
|
||||||
|
"workspaces/<str:slug>/user-activity/<uuid:user_id>/export/",
|
||||||
|
ExportWorkspaceUserActivityEndpoint.as_view(),
|
||||||
|
name="export-workspace-user-activity",
|
||||||
|
),
|
||||||
path(
|
path(
|
||||||
"workspaces/<str:slug>/user-profile/<uuid:user_id>/",
|
"workspaces/<str:slug>/user-profile/<uuid:user_id>/",
|
||||||
WorkspaceUserProfileEndpoint.as_view(),
|
WorkspaceUserProfileEndpoint.as_view(),
|
||||||
@ -195,4 +212,29 @@ urlpatterns = [
|
|||||||
WorkspaceLabelsEndpoint.as_view(),
|
WorkspaceLabelsEndpoint.as_view(),
|
||||||
name="workspace-labels",
|
name="workspace-labels",
|
||||||
),
|
),
|
||||||
|
path(
|
||||||
|
"workspaces/<str:slug>/user-properties/",
|
||||||
|
WorkspaceUserPropertiesEndpoint.as_view(),
|
||||||
|
name="workspace-user-filters",
|
||||||
|
),
|
||||||
|
path(
|
||||||
|
"workspaces/<str:slug>/states/",
|
||||||
|
WorkspaceStatesEndpoint.as_view(),
|
||||||
|
name="workspace-state",
|
||||||
|
),
|
||||||
|
path(
|
||||||
|
"workspaces/<str:slug>/estimates/",
|
||||||
|
WorkspaceEstimatesEndpoint.as_view(),
|
||||||
|
name="workspace-estimate",
|
||||||
|
),
|
||||||
|
path(
|
||||||
|
"workspaces/<str:slug>/modules/",
|
||||||
|
WorkspaceModulesEndpoint.as_view(),
|
||||||
|
name="workspace-modules",
|
||||||
|
),
|
||||||
|
path(
|
||||||
|
"workspaces/<str:slug>/cycles/",
|
||||||
|
WorkspaceCyclesEndpoint.as_view(),
|
||||||
|
name="workspace-cycles",
|
||||||
|
),
|
||||||
]
|
]
|
||||||
|
File diff suppressed because it is too large
Load Diff
@ -1,19 +1,27 @@
|
|||||||
from .project import (
|
from .project.base import (
|
||||||
ProjectViewSet,
|
ProjectViewSet,
|
||||||
ProjectMemberViewSet,
|
|
||||||
UserProjectInvitationsViewset,
|
|
||||||
ProjectInvitationsViewset,
|
|
||||||
AddTeamToProjectEndpoint,
|
|
||||||
ProjectIdentifierEndpoint,
|
ProjectIdentifierEndpoint,
|
||||||
ProjectJoinEndpoint,
|
|
||||||
ProjectUserViewsEndpoint,
|
ProjectUserViewsEndpoint,
|
||||||
ProjectMemberUserEndpoint,
|
|
||||||
ProjectFavoritesViewSet,
|
ProjectFavoritesViewSet,
|
||||||
ProjectPublicCoverImagesEndpoint,
|
ProjectPublicCoverImagesEndpoint,
|
||||||
ProjectDeployBoardViewSet,
|
ProjectDeployBoardViewSet,
|
||||||
|
ProjectArchiveUnarchiveEndpoint,
|
||||||
|
)
|
||||||
|
|
||||||
|
from .project.invite import (
|
||||||
|
UserProjectInvitationsViewset,
|
||||||
|
ProjectInvitationsViewset,
|
||||||
|
ProjectJoinEndpoint,
|
||||||
|
)
|
||||||
|
|
||||||
|
from .project.member import (
|
||||||
|
ProjectMemberViewSet,
|
||||||
|
AddTeamToProjectEndpoint,
|
||||||
|
ProjectMemberUserEndpoint,
|
||||||
UserProjectRolesEndpoint,
|
UserProjectRolesEndpoint,
|
||||||
)
|
)
|
||||||
from .user import (
|
|
||||||
|
from .user.base import (
|
||||||
UserEndpoint,
|
UserEndpoint,
|
||||||
UpdateUserOnBoardedEndpoint,
|
UpdateUserOnBoardedEndpoint,
|
||||||
UpdateUserTourCompletedEndpoint,
|
UpdateUserTourCompletedEndpoint,
|
||||||
@ -24,62 +32,122 @@ from .oauth import OauthEndpoint
|
|||||||
|
|
||||||
from .base import BaseAPIView, BaseViewSet, WebhookMixin
|
from .base import BaseAPIView, BaseViewSet, WebhookMixin
|
||||||
|
|
||||||
from .workspace import (
|
from .workspace.base import (
|
||||||
WorkSpaceViewSet,
|
WorkSpaceViewSet,
|
||||||
UserWorkSpacesEndpoint,
|
UserWorkSpacesEndpoint,
|
||||||
WorkSpaceAvailabilityCheckEndpoint,
|
WorkSpaceAvailabilityCheckEndpoint,
|
||||||
WorkspaceJoinEndpoint,
|
|
||||||
WorkSpaceMemberViewSet,
|
|
||||||
TeamMemberViewSet,
|
|
||||||
WorkspaceInvitationsViewset,
|
|
||||||
UserWorkspaceInvitationsViewSet,
|
|
||||||
UserLastProjectWithWorkspaceEndpoint,
|
|
||||||
WorkspaceMemberUserEndpoint,
|
|
||||||
WorkspaceMemberUserViewsEndpoint,
|
|
||||||
UserActivityGraphEndpoint,
|
|
||||||
UserIssueCompletedGraphEndpoint,
|
|
||||||
UserWorkspaceDashboardEndpoint,
|
UserWorkspaceDashboardEndpoint,
|
||||||
WorkspaceThemeViewSet,
|
WorkspaceThemeViewSet,
|
||||||
WorkspaceUserProfileStatsEndpoint,
|
ExportWorkspaceUserActivityEndpoint
|
||||||
WorkspaceUserActivityEndpoint,
|
)
|
||||||
WorkspaceUserProfileEndpoint,
|
|
||||||
WorkspaceUserProfileIssuesEndpoint,
|
from .workspace.member import (
|
||||||
|
WorkSpaceMemberViewSet,
|
||||||
|
TeamMemberViewSet,
|
||||||
|
WorkspaceMemberUserEndpoint,
|
||||||
|
WorkspaceProjectMemberEndpoint,
|
||||||
|
WorkspaceMemberUserViewsEndpoint,
|
||||||
|
)
|
||||||
|
from .workspace.invite import (
|
||||||
|
WorkspaceInvitationsViewset,
|
||||||
|
WorkspaceJoinEndpoint,
|
||||||
|
UserWorkspaceInvitationsViewSet,
|
||||||
|
)
|
||||||
|
from .workspace.label import (
|
||||||
WorkspaceLabelsEndpoint,
|
WorkspaceLabelsEndpoint,
|
||||||
)
|
)
|
||||||
from .state import StateViewSet
|
from .workspace.state import (
|
||||||
from .view import (
|
WorkspaceStatesEndpoint,
|
||||||
|
)
|
||||||
|
from .workspace.user import (
|
||||||
|
UserLastProjectWithWorkspaceEndpoint,
|
||||||
|
WorkspaceUserProfileIssuesEndpoint,
|
||||||
|
WorkspaceUserPropertiesEndpoint,
|
||||||
|
WorkspaceUserProfileEndpoint,
|
||||||
|
WorkspaceUserActivityEndpoint,
|
||||||
|
WorkspaceUserProfileStatsEndpoint,
|
||||||
|
UserActivityGraphEndpoint,
|
||||||
|
UserIssueCompletedGraphEndpoint,
|
||||||
|
)
|
||||||
|
from .workspace.estimate import (
|
||||||
|
WorkspaceEstimatesEndpoint,
|
||||||
|
)
|
||||||
|
from .workspace.module import (
|
||||||
|
WorkspaceModulesEndpoint,
|
||||||
|
)
|
||||||
|
from .workspace.cycle import (
|
||||||
|
WorkspaceCyclesEndpoint,
|
||||||
|
)
|
||||||
|
|
||||||
|
from .state.base import StateViewSet
|
||||||
|
from .view.base import (
|
||||||
GlobalViewViewSet,
|
GlobalViewViewSet,
|
||||||
GlobalViewIssuesViewSet,
|
GlobalViewIssuesViewSet,
|
||||||
IssueViewViewSet,
|
IssueViewViewSet,
|
||||||
IssueViewFavoriteViewSet,
|
IssueViewFavoriteViewSet,
|
||||||
)
|
)
|
||||||
from .cycle import (
|
from .cycle.base import (
|
||||||
CycleViewSet,
|
CycleViewSet,
|
||||||
CycleIssueViewSet,
|
|
||||||
CycleDateCheckEndpoint,
|
CycleDateCheckEndpoint,
|
||||||
CycleFavoriteViewSet,
|
CycleFavoriteViewSet,
|
||||||
TransferCycleIssueEndpoint,
|
TransferCycleIssueEndpoint,
|
||||||
|
CycleArchiveUnarchiveEndpoint,
|
||||||
|
CycleUserPropertiesEndpoint,
|
||||||
)
|
)
|
||||||
from .asset import FileAssetEndpoint, UserAssetsEndpoint, FileAssetViewSet
|
from .cycle.issue import (
|
||||||
from .issue import (
|
CycleIssueViewSet,
|
||||||
|
)
|
||||||
|
|
||||||
|
from .asset.base import FileAssetEndpoint, UserAssetsEndpoint, FileAssetViewSet
|
||||||
|
from .issue.base import (
|
||||||
|
IssueListEndpoint,
|
||||||
IssueViewSet,
|
IssueViewSet,
|
||||||
WorkSpaceIssuesEndpoint,
|
|
||||||
IssueActivityEndpoint,
|
|
||||||
IssueCommentViewSet,
|
|
||||||
IssueUserDisplayPropertyEndpoint,
|
IssueUserDisplayPropertyEndpoint,
|
||||||
LabelViewSet,
|
|
||||||
BulkDeleteIssuesEndpoint,
|
BulkDeleteIssuesEndpoint,
|
||||||
UserWorkSpaceIssues,
|
)
|
||||||
SubIssuesEndpoint,
|
|
||||||
IssueLinkViewSet,
|
from .issue.activity import (
|
||||||
BulkCreateIssueLabelsEndpoint,
|
IssueActivityEndpoint,
|
||||||
IssueAttachmentEndpoint,
|
)
|
||||||
|
|
||||||
|
from .issue.archive import (
|
||||||
IssueArchiveViewSet,
|
IssueArchiveViewSet,
|
||||||
IssueSubscriberViewSet,
|
)
|
||||||
|
|
||||||
|
from .issue.attachment import (
|
||||||
|
IssueAttachmentEndpoint,
|
||||||
|
)
|
||||||
|
|
||||||
|
from .issue.comment import (
|
||||||
|
IssueCommentViewSet,
|
||||||
CommentReactionViewSet,
|
CommentReactionViewSet,
|
||||||
IssueReactionViewSet,
|
)
|
||||||
|
|
||||||
|
from .issue.draft import IssueDraftViewSet
|
||||||
|
|
||||||
|
from .issue.label import (
|
||||||
|
LabelViewSet,
|
||||||
|
BulkCreateIssueLabelsEndpoint,
|
||||||
|
)
|
||||||
|
|
||||||
|
from .issue.link import (
|
||||||
|
IssueLinkViewSet,
|
||||||
|
)
|
||||||
|
|
||||||
|
from .issue.relation import (
|
||||||
IssueRelationViewSet,
|
IssueRelationViewSet,
|
||||||
IssueDraftViewSet,
|
)
|
||||||
|
|
||||||
|
from .issue.reaction import (
|
||||||
|
IssueReactionViewSet,
|
||||||
|
)
|
||||||
|
|
||||||
|
from .issue.sub_issue import (
|
||||||
|
SubIssuesEndpoint,
|
||||||
|
)
|
||||||
|
|
||||||
|
from .issue.subscriber import (
|
||||||
|
IssueSubscriberViewSet,
|
||||||
)
|
)
|
||||||
|
|
||||||
from .auth_extended import (
|
from .auth_extended import (
|
||||||
@ -98,35 +166,22 @@ from .authentication import (
|
|||||||
MagicSignInEndpoint,
|
MagicSignInEndpoint,
|
||||||
)
|
)
|
||||||
|
|
||||||
from .module import (
|
from .module.base import (
|
||||||
ModuleViewSet,
|
ModuleViewSet,
|
||||||
ModuleIssueViewSet,
|
|
||||||
ModuleLinkViewSet,
|
ModuleLinkViewSet,
|
||||||
ModuleFavoriteViewSet,
|
ModuleFavoriteViewSet,
|
||||||
|
ModuleArchiveUnarchiveEndpoint,
|
||||||
|
ModuleUserPropertiesEndpoint,
|
||||||
|
)
|
||||||
|
|
||||||
|
from .module.issue import (
|
||||||
|
ModuleIssueViewSet,
|
||||||
)
|
)
|
||||||
|
|
||||||
from .api import ApiTokenEndpoint
|
from .api import ApiTokenEndpoint
|
||||||
|
|
||||||
from .integration import (
|
|
||||||
WorkspaceIntegrationViewSet,
|
|
||||||
IntegrationViewSet,
|
|
||||||
GithubIssueSyncViewSet,
|
|
||||||
GithubRepositorySyncViewSet,
|
|
||||||
GithubCommentSyncViewSet,
|
|
||||||
GithubRepositoriesEndpoint,
|
|
||||||
BulkCreateGithubIssueSyncEndpoint,
|
|
||||||
SlackProjectSyncViewSet,
|
|
||||||
)
|
|
||||||
|
|
||||||
from .importer import (
|
from .page.base import (
|
||||||
ServiceIssueImportSummaryEndpoint,
|
|
||||||
ImportServiceEndpoint,
|
|
||||||
UpdateServiceImportStatusEndpoint,
|
|
||||||
BulkImportIssuesEndpoint,
|
|
||||||
BulkImportModulesEndpoint,
|
|
||||||
)
|
|
||||||
|
|
||||||
from .page import (
|
|
||||||
PageViewSet,
|
PageViewSet,
|
||||||
PageFavoriteViewSet,
|
PageFavoriteViewSet,
|
||||||
PageLogEndpoint,
|
PageLogEndpoint,
|
||||||
@ -136,16 +191,19 @@ from .page import (
|
|||||||
from .search import GlobalSearchEndpoint, IssueSearchEndpoint
|
from .search import GlobalSearchEndpoint, IssueSearchEndpoint
|
||||||
|
|
||||||
|
|
||||||
from .external import GPTIntegrationEndpoint, ReleaseNotesEndpoint, UnsplashEndpoint
|
from .external.base import (
|
||||||
|
GPTIntegrationEndpoint,
|
||||||
|
UnsplashEndpoint,
|
||||||
|
)
|
||||||
|
|
||||||
from .estimate import (
|
from .estimate.base import (
|
||||||
ProjectEstimatePointEndpoint,
|
ProjectEstimatePointEndpoint,
|
||||||
BulkEstimatePointEndpoint,
|
BulkEstimatePointEndpoint,
|
||||||
)
|
)
|
||||||
|
|
||||||
from .inbox import InboxViewSet, InboxIssueViewSet
|
from .inbox.base import InboxViewSet, InboxIssueViewSet
|
||||||
|
|
||||||
from .analytic import (
|
from .analytic.base import (
|
||||||
AnalyticsEndpoint,
|
AnalyticsEndpoint,
|
||||||
AnalyticViewViewset,
|
AnalyticViewViewset,
|
||||||
SavedAnalyticEndpoint,
|
SavedAnalyticEndpoint,
|
||||||
@ -153,18 +211,23 @@ from .analytic import (
|
|||||||
DefaultAnalyticsEndpoint,
|
DefaultAnalyticsEndpoint,
|
||||||
)
|
)
|
||||||
|
|
||||||
from .notification import (
|
from .notification.base import (
|
||||||
NotificationViewSet,
|
NotificationViewSet,
|
||||||
UnreadNotificationEndpoint,
|
UnreadNotificationEndpoint,
|
||||||
MarkAllReadNotificationViewSet,
|
MarkAllReadNotificationViewSet,
|
||||||
|
UserNotificationPreferenceEndpoint,
|
||||||
)
|
)
|
||||||
|
|
||||||
from .exporter import ExportIssuesEndpoint
|
from .exporter.base import ExportIssuesEndpoint
|
||||||
|
|
||||||
from .config import ConfigurationEndpoint
|
from .config import ConfigurationEndpoint, MobileConfigurationEndpoint
|
||||||
|
|
||||||
from .webhook import (
|
from .webhook.base import (
|
||||||
WebhookEndpoint,
|
WebhookEndpoint,
|
||||||
WebhookLogsEndpoint,
|
WebhookLogsEndpoint,
|
||||||
WebhookSecretRegenerateEndpoint,
|
WebhookSecretRegenerateEndpoint,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
from .dashboard.base import DashboardEndpoint, WidgetsEndpoint
|
||||||
|
|
||||||
|
from .error_404 import custom_404_view
|
||||||
|
@ -1,6 +1,7 @@
|
|||||||
# Django imports
|
# Django imports
|
||||||
from django.db.models import Count, Sum, F, Q
|
from django.db.models import Count, Sum, F
|
||||||
from django.db.models.functions import ExtractMonth
|
from django.db.models.functions import ExtractMonth
|
||||||
|
from django.utils import timezone
|
||||||
|
|
||||||
# Third party imports
|
# Third party imports
|
||||||
from rest_framework import status
|
from rest_framework import status
|
||||||
@ -9,7 +10,7 @@ from rest_framework.response import Response
|
|||||||
# Module imports
|
# Module imports
|
||||||
from plane.app.views import BaseAPIView, BaseViewSet
|
from plane.app.views import BaseAPIView, BaseViewSet
|
||||||
from plane.app.permissions import WorkSpaceAdminPermission
|
from plane.app.permissions import WorkSpaceAdminPermission
|
||||||
from plane.db.models import Issue, AnalyticView, Workspace, State, Label
|
from plane.db.models import Issue, AnalyticView, Workspace
|
||||||
from plane.app.serializers import AnalyticViewSerializer
|
from plane.app.serializers import AnalyticViewSerializer
|
||||||
from plane.utils.analytics_plot import build_graph_plot
|
from plane.utils.analytics_plot import build_graph_plot
|
||||||
from plane.bgtasks.analytic_plot_export import analytic_export_task
|
from plane.bgtasks.analytic_plot_export import analytic_export_task
|
||||||
@ -50,8 +51,8 @@ class AnalyticsEndpoint(BaseAPIView):
|
|||||||
if (
|
if (
|
||||||
not x_axis
|
not x_axis
|
||||||
or not y_axis
|
or not y_axis
|
||||||
or not x_axis in valid_xaxis_segment
|
or x_axis not in valid_xaxis_segment
|
||||||
or not y_axis in valid_yaxis
|
or y_axis not in valid_yaxis
|
||||||
):
|
):
|
||||||
return Response(
|
return Response(
|
||||||
{
|
{
|
||||||
@ -61,7 +62,9 @@ class AnalyticsEndpoint(BaseAPIView):
|
|||||||
)
|
)
|
||||||
|
|
||||||
# If segment is present it cannot be same as x-axis
|
# If segment is present it cannot be same as x-axis
|
||||||
if segment and (segment not in valid_xaxis_segment or x_axis == segment):
|
if segment and (
|
||||||
|
segment not in valid_xaxis_segment or x_axis == segment
|
||||||
|
):
|
||||||
return Response(
|
return Response(
|
||||||
{
|
{
|
||||||
"error": "Both segment and x axis cannot be same and segment should be valid"
|
"error": "Both segment and x axis cannot be same and segment should be valid"
|
||||||
@ -110,7 +113,9 @@ class AnalyticsEndpoint(BaseAPIView):
|
|||||||
if x_axis in ["assignees__id"] or segment in ["assignees__id"]:
|
if x_axis in ["assignees__id"] or segment in ["assignees__id"]:
|
||||||
assignee_details = (
|
assignee_details = (
|
||||||
Issue.issue_objects.filter(
|
Issue.issue_objects.filter(
|
||||||
workspace__slug=slug, **filters, assignees__avatar__isnull=False
|
workspace__slug=slug,
|
||||||
|
**filters,
|
||||||
|
assignees__avatar__isnull=False,
|
||||||
)
|
)
|
||||||
.order_by("assignees__id")
|
.order_by("assignees__id")
|
||||||
.distinct("assignees__id")
|
.distinct("assignees__id")
|
||||||
@ -124,7 +129,9 @@ class AnalyticsEndpoint(BaseAPIView):
|
|||||||
)
|
)
|
||||||
|
|
||||||
cycle_details = {}
|
cycle_details = {}
|
||||||
if x_axis in ["issue_cycle__cycle_id"] or segment in ["issue_cycle__cycle_id"]:
|
if x_axis in ["issue_cycle__cycle_id"] or segment in [
|
||||||
|
"issue_cycle__cycle_id"
|
||||||
|
]:
|
||||||
cycle_details = (
|
cycle_details = (
|
||||||
Issue.issue_objects.filter(
|
Issue.issue_objects.filter(
|
||||||
workspace__slug=slug,
|
workspace__slug=slug,
|
||||||
@ -186,7 +193,9 @@ class AnalyticViewViewset(BaseViewSet):
|
|||||||
|
|
||||||
def get_queryset(self):
|
def get_queryset(self):
|
||||||
return self.filter_queryset(
|
return self.filter_queryset(
|
||||||
super().get_queryset().filter(workspace__slug=self.kwargs.get("slug"))
|
super()
|
||||||
|
.get_queryset()
|
||||||
|
.filter(workspace__slug=self.kwargs.get("slug"))
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
@ -196,7 +205,9 @@ class SavedAnalyticEndpoint(BaseAPIView):
|
|||||||
]
|
]
|
||||||
|
|
||||||
def get(self, request, slug, analytic_id):
|
def get(self, request, slug, analytic_id):
|
||||||
analytic_view = AnalyticView.objects.get(pk=analytic_id, workspace__slug=slug)
|
analytic_view = AnalyticView.objects.get(
|
||||||
|
pk=analytic_id, workspace__slug=slug
|
||||||
|
)
|
||||||
|
|
||||||
filter = analytic_view.query
|
filter = analytic_view.query
|
||||||
queryset = Issue.issue_objects.filter(**filter)
|
queryset = Issue.issue_objects.filter(**filter)
|
||||||
@ -255,8 +266,8 @@ class ExportAnalyticsEndpoint(BaseAPIView):
|
|||||||
if (
|
if (
|
||||||
not x_axis
|
not x_axis
|
||||||
or not y_axis
|
or not y_axis
|
||||||
or not x_axis in valid_xaxis_segment
|
or x_axis not in valid_xaxis_segment
|
||||||
or not y_axis in valid_yaxis
|
or y_axis not in valid_yaxis
|
||||||
):
|
):
|
||||||
return Response(
|
return Response(
|
||||||
{
|
{
|
||||||
@ -266,7 +277,9 @@ class ExportAnalyticsEndpoint(BaseAPIView):
|
|||||||
)
|
)
|
||||||
|
|
||||||
# If segment is present it cannot be same as x-axis
|
# If segment is present it cannot be same as x-axis
|
||||||
if segment and (segment not in valid_xaxis_segment or x_axis == segment):
|
if segment and (
|
||||||
|
segment not in valid_xaxis_segment or x_axis == segment
|
||||||
|
):
|
||||||
return Response(
|
return Response(
|
||||||
{
|
{
|
||||||
"error": "Both segment and x axis cannot be same and segment should be valid"
|
"error": "Both segment and x axis cannot be same and segment should be valid"
|
||||||
@ -293,7 +306,9 @@ class DefaultAnalyticsEndpoint(BaseAPIView):
|
|||||||
|
|
||||||
def get(self, request, slug):
|
def get(self, request, slug):
|
||||||
filters = issue_filters(request.GET, "GET")
|
filters = issue_filters(request.GET, "GET")
|
||||||
base_issues = Issue.issue_objects.filter(workspace__slug=slug, **filters)
|
base_issues = Issue.issue_objects.filter(
|
||||||
|
workspace__slug=slug, **filters
|
||||||
|
)
|
||||||
|
|
||||||
total_issues = base_issues.count()
|
total_issues = base_issues.count()
|
||||||
|
|
||||||
@ -306,7 +321,9 @@ class DefaultAnalyticsEndpoint(BaseAPIView):
|
|||||||
)
|
)
|
||||||
|
|
||||||
open_issues_groups = ["backlog", "unstarted", "started"]
|
open_issues_groups = ["backlog", "unstarted", "started"]
|
||||||
open_issues_queryset = state_groups.filter(state__group__in=open_issues_groups)
|
open_issues_queryset = state_groups.filter(
|
||||||
|
state__group__in=open_issues_groups
|
||||||
|
)
|
||||||
|
|
||||||
open_issues = open_issues_queryset.count()
|
open_issues = open_issues_queryset.count()
|
||||||
open_issues_classified = (
|
open_issues_classified = (
|
||||||
@ -315,8 +332,9 @@ class DefaultAnalyticsEndpoint(BaseAPIView):
|
|||||||
.order_by("state_group")
|
.order_by("state_group")
|
||||||
)
|
)
|
||||||
|
|
||||||
|
current_year = timezone.now().year
|
||||||
issue_completed_month_wise = (
|
issue_completed_month_wise = (
|
||||||
base_issues.filter(completed_at__isnull=False)
|
base_issues.filter(completed_at__year=current_year)
|
||||||
.annotate(month=ExtractMonth("completed_at"))
|
.annotate(month=ExtractMonth("completed_at"))
|
||||||
.values("month")
|
.values("month")
|
||||||
.annotate(count=Count("*"))
|
.annotate(count=Count("*"))
|
||||||
@ -361,10 +379,12 @@ class DefaultAnalyticsEndpoint(BaseAPIView):
|
|||||||
.order_by("-count")
|
.order_by("-count")
|
||||||
)
|
)
|
||||||
|
|
||||||
open_estimate_sum = open_issues_queryset.aggregate(sum=Sum("estimate_point"))[
|
open_estimate_sum = open_issues_queryset.aggregate(
|
||||||
|
sum=Sum("estimate_point")
|
||||||
|
)["sum"]
|
||||||
|
total_estimate_sum = base_issues.aggregate(sum=Sum("estimate_point"))[
|
||||||
"sum"
|
"sum"
|
||||||
]
|
]
|
||||||
total_estimate_sum = base_issues.aggregate(sum=Sum("estimate_point"))["sum"]
|
|
||||||
|
|
||||||
return Response(
|
return Response(
|
||||||
{
|
{
|
@ -43,7 +43,7 @@ class ApiTokenEndpoint(BaseAPIView):
|
|||||||
)
|
)
|
||||||
|
|
||||||
def get(self, request, slug, pk=None):
|
def get(self, request, slug, pk=None):
|
||||||
if pk == None:
|
if pk is None:
|
||||||
api_tokens = APIToken.objects.filter(
|
api_tokens = APIToken.objects.filter(
|
||||||
user=request.user, workspace__slug=slug
|
user=request.user, workspace__slug=slug
|
||||||
)
|
)
|
||||||
@ -71,7 +71,9 @@ class ApiTokenEndpoint(BaseAPIView):
|
|||||||
user=request.user,
|
user=request.user,
|
||||||
pk=pk,
|
pk=pk,
|
||||||
)
|
)
|
||||||
serializer = APITokenSerializer(api_token, data=request.data, partial=True)
|
serializer = APITokenSerializer(
|
||||||
|
api_token, data=request.data, partial=True
|
||||||
|
)
|
||||||
if serializer.is_valid():
|
if serializer.is_valid():
|
||||||
serializer.save()
|
serializer.save()
|
||||||
return Response(serializer.data, status=status.HTTP_200_OK)
|
return Response(serializer.data, status=status.HTTP_200_OK)
|
||||||
|
@ -4,13 +4,17 @@ from rest_framework.response import Response
|
|||||||
from rest_framework.parsers import MultiPartParser, FormParser, JSONParser
|
from rest_framework.parsers import MultiPartParser, FormParser, JSONParser
|
||||||
|
|
||||||
# Module imports
|
# Module imports
|
||||||
from .base import BaseAPIView, BaseViewSet
|
from ..base import BaseAPIView, BaseViewSet
|
||||||
from plane.db.models import FileAsset, Workspace
|
from plane.db.models import FileAsset, Workspace
|
||||||
from plane.app.serializers import FileAssetSerializer
|
from plane.app.serializers import FileAssetSerializer
|
||||||
|
|
||||||
|
|
||||||
class FileAssetEndpoint(BaseAPIView):
|
class FileAssetEndpoint(BaseAPIView):
|
||||||
parser_classes = (MultiPartParser, FormParser, JSONParser,)
|
parser_classes = (
|
||||||
|
MultiPartParser,
|
||||||
|
FormParser,
|
||||||
|
JSONParser,
|
||||||
|
)
|
||||||
|
|
||||||
"""
|
"""
|
||||||
A viewset for viewing and editing task instances.
|
A viewset for viewing and editing task instances.
|
||||||
@ -20,10 +24,18 @@ class FileAssetEndpoint(BaseAPIView):
|
|||||||
asset_key = str(workspace_id) + "/" + asset_key
|
asset_key = str(workspace_id) + "/" + asset_key
|
||||||
files = FileAsset.objects.filter(asset=asset_key)
|
files = FileAsset.objects.filter(asset=asset_key)
|
||||||
if files.exists():
|
if files.exists():
|
||||||
serializer = FileAssetSerializer(files, context={"request": request}, many=True)
|
serializer = FileAssetSerializer(
|
||||||
return Response({"data": serializer.data, "status": True}, status=status.HTTP_200_OK)
|
files, context={"request": request}, many=True
|
||||||
|
)
|
||||||
|
return Response(
|
||||||
|
{"data": serializer.data, "status": True},
|
||||||
|
status=status.HTTP_200_OK,
|
||||||
|
)
|
||||||
else:
|
else:
|
||||||
return Response({"error": "Asset key does not exist", "status": False}, status=status.HTTP_200_OK)
|
return Response(
|
||||||
|
{"error": "Asset key does not exist", "status": False},
|
||||||
|
status=status.HTTP_200_OK,
|
||||||
|
)
|
||||||
|
|
||||||
def post(self, request, slug):
|
def post(self, request, slug):
|
||||||
serializer = FileAssetSerializer(data=request.data)
|
serializer = FileAssetSerializer(data=request.data)
|
||||||
@ -43,7 +55,6 @@ class FileAssetEndpoint(BaseAPIView):
|
|||||||
|
|
||||||
|
|
||||||
class FileAssetViewSet(BaseViewSet):
|
class FileAssetViewSet(BaseViewSet):
|
||||||
|
|
||||||
def restore(self, request, workspace_id, asset_key):
|
def restore(self, request, workspace_id, asset_key):
|
||||||
asset_key = str(workspace_id) + "/" + asset_key
|
asset_key = str(workspace_id) + "/" + asset_key
|
||||||
file_asset = FileAsset.objects.get(asset=asset_key)
|
file_asset = FileAsset.objects.get(asset=asset_key)
|
||||||
@ -56,12 +67,22 @@ class UserAssetsEndpoint(BaseAPIView):
|
|||||||
parser_classes = (MultiPartParser, FormParser)
|
parser_classes = (MultiPartParser, FormParser)
|
||||||
|
|
||||||
def get(self, request, asset_key):
|
def get(self, request, asset_key):
|
||||||
files = FileAsset.objects.filter(asset=asset_key, created_by=request.user)
|
files = FileAsset.objects.filter(
|
||||||
|
asset=asset_key, created_by=request.user
|
||||||
|
)
|
||||||
if files.exists():
|
if files.exists():
|
||||||
serializer = FileAssetSerializer(files, context={"request": request})
|
serializer = FileAssetSerializer(
|
||||||
return Response({"data": serializer.data, "status": True}, status=status.HTTP_200_OK)
|
files, context={"request": request}
|
||||||
|
)
|
||||||
|
return Response(
|
||||||
|
{"data": serializer.data, "status": True},
|
||||||
|
status=status.HTTP_200_OK,
|
||||||
|
)
|
||||||
else:
|
else:
|
||||||
return Response({"error": "Asset key does not exist", "status": False}, status=status.HTTP_200_OK)
|
return Response(
|
||||||
|
{"error": "Asset key does not exist", "status": False},
|
||||||
|
status=status.HTTP_200_OK,
|
||||||
|
)
|
||||||
|
|
||||||
def post(self, request):
|
def post(self, request):
|
||||||
serializer = FileAssetSerializer(data=request.data)
|
serializer = FileAssetSerializer(data=request.data)
|
||||||
@ -70,9 +91,10 @@ class UserAssetsEndpoint(BaseAPIView):
|
|||||||
return Response(serializer.data, status=status.HTTP_201_CREATED)
|
return Response(serializer.data, status=status.HTTP_201_CREATED)
|
||||||
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
|
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
|
||||||
|
|
||||||
|
|
||||||
def delete(self, request, asset_key):
|
def delete(self, request, asset_key):
|
||||||
file_asset = FileAsset.objects.get(asset=asset_key, created_by=request.user)
|
file_asset = FileAsset.objects.get(
|
||||||
|
asset=asset_key, created_by=request.user
|
||||||
|
)
|
||||||
file_asset.is_deleted = True
|
file_asset.is_deleted = True
|
||||||
file_asset.save()
|
file_asset.save()
|
||||||
return Response(status=status.HTTP_204_NO_CONTENT)
|
return Response(status=status.HTTP_204_NO_CONTENT)
|
@ -16,7 +16,6 @@ from django.contrib.auth.hashers import make_password
|
|||||||
from django.utils.http import urlsafe_base64_decode, urlsafe_base64_encode
|
from django.utils.http import urlsafe_base64_decode, urlsafe_base64_encode
|
||||||
from django.core.validators import validate_email
|
from django.core.validators import validate_email
|
||||||
from django.core.exceptions import ValidationError
|
from django.core.exceptions import ValidationError
|
||||||
from django.conf import settings
|
|
||||||
|
|
||||||
## Third Party Imports
|
## Third Party Imports
|
||||||
from rest_framework import status
|
from rest_framework import status
|
||||||
@ -128,7 +127,8 @@ class ForgotPasswordEndpoint(BaseAPIView):
|
|||||||
status=status.HTTP_200_OK,
|
status=status.HTTP_200_OK,
|
||||||
)
|
)
|
||||||
return Response(
|
return Response(
|
||||||
{"error": "Please check the email"}, status=status.HTTP_400_BAD_REQUEST
|
{"error": "Please check the email"},
|
||||||
|
status=status.HTTP_400_BAD_REQUEST,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
@ -167,9 +167,11 @@ class ResetPasswordEndpoint(BaseAPIView):
|
|||||||
}
|
}
|
||||||
|
|
||||||
return Response(data, status=status.HTTP_200_OK)
|
return Response(data, status=status.HTTP_200_OK)
|
||||||
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
|
return Response(
|
||||||
|
serializer.errors, status=status.HTTP_400_BAD_REQUEST
|
||||||
|
)
|
||||||
|
|
||||||
except DjangoUnicodeDecodeError as indentifier:
|
except DjangoUnicodeDecodeError:
|
||||||
return Response(
|
return Response(
|
||||||
{"error": "token is not valid, please check the new one"},
|
{"error": "token is not valid, please check the new one"},
|
||||||
status=status.HTTP_401_UNAUTHORIZED,
|
status=status.HTTP_401_UNAUTHORIZED,
|
||||||
@ -191,7 +193,8 @@ class ChangePasswordEndpoint(BaseAPIView):
|
|||||||
user.is_password_autoset = False
|
user.is_password_autoset = False
|
||||||
user.save()
|
user.save()
|
||||||
return Response(
|
return Response(
|
||||||
{"message": "Password updated successfully"}, status=status.HTTP_200_OK
|
{"message": "Password updated successfully"},
|
||||||
|
status=status.HTTP_200_OK,
|
||||||
)
|
)
|
||||||
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
|
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
|
||||||
|
|
||||||
@ -213,7 +216,8 @@ class SetUserPasswordEndpoint(BaseAPIView):
|
|||||||
# Check password validation
|
# Check password validation
|
||||||
if not password and len(str(password)) < 8:
|
if not password and len(str(password)) < 8:
|
||||||
return Response(
|
return Response(
|
||||||
{"error": "Password is not valid"}, status=status.HTTP_400_BAD_REQUEST
|
{"error": "Password is not valid"},
|
||||||
|
status=status.HTTP_400_BAD_REQUEST,
|
||||||
)
|
)
|
||||||
|
|
||||||
# Set the user password
|
# Set the user password
|
||||||
@ -281,7 +285,9 @@ class MagicGenerateEndpoint(BaseAPIView):
|
|||||||
|
|
||||||
if data["current_attempt"] > 2:
|
if data["current_attempt"] > 2:
|
||||||
return Response(
|
return Response(
|
||||||
{"error": "Max attempts exhausted. Please try again later."},
|
{
|
||||||
|
"error": "Max attempts exhausted. Please try again later."
|
||||||
|
},
|
||||||
status=status.HTTP_400_BAD_REQUEST,
|
status=status.HTTP_400_BAD_REQUEST,
|
||||||
)
|
)
|
||||||
|
|
||||||
@ -339,7 +345,8 @@ class EmailCheckEndpoint(BaseAPIView):
|
|||||||
|
|
||||||
if not email:
|
if not email:
|
||||||
return Response(
|
return Response(
|
||||||
{"error": "Email is required"}, status=status.HTTP_400_BAD_REQUEST
|
{"error": "Email is required"},
|
||||||
|
status=status.HTTP_400_BAD_REQUEST,
|
||||||
)
|
)
|
||||||
|
|
||||||
# validate the email
|
# validate the email
|
||||||
@ -347,7 +354,8 @@ class EmailCheckEndpoint(BaseAPIView):
|
|||||||
validate_email(email)
|
validate_email(email)
|
||||||
except ValidationError:
|
except ValidationError:
|
||||||
return Response(
|
return Response(
|
||||||
{"error": "Email is not valid"}, status=status.HTTP_400_BAD_REQUEST
|
{"error": "Email is not valid"},
|
||||||
|
status=status.HTTP_400_BAD_REQUEST,
|
||||||
)
|
)
|
||||||
|
|
||||||
# Check if the user exists
|
# Check if the user exists
|
||||||
@ -392,20 +400,25 @@ class EmailCheckEndpoint(BaseAPIView):
|
|||||||
email=email,
|
email=email,
|
||||||
user_agent=request.META.get("HTTP_USER_AGENT"),
|
user_agent=request.META.get("HTTP_USER_AGENT"),
|
||||||
ip=request.META.get("REMOTE_ADDR"),
|
ip=request.META.get("REMOTE_ADDR"),
|
||||||
event_name="SIGN_IN",
|
event_name="Sign up",
|
||||||
medium="MAGIC_LINK",
|
medium="Magic link",
|
||||||
first_time=True,
|
first_time=True,
|
||||||
)
|
)
|
||||||
key, token, current_attempt = generate_magic_token(email=email)
|
key, token, current_attempt = generate_magic_token(email=email)
|
||||||
if not current_attempt:
|
if not current_attempt:
|
||||||
return Response(
|
return Response(
|
||||||
{"error": "Max attempts exhausted. Please try again later."},
|
{
|
||||||
|
"error": "Max attempts exhausted. Please try again later."
|
||||||
|
},
|
||||||
status=status.HTTP_400_BAD_REQUEST,
|
status=status.HTTP_400_BAD_REQUEST,
|
||||||
)
|
)
|
||||||
# Trigger the email
|
# Trigger the email
|
||||||
magic_link.delay(email, "magic_" + str(email), token, current_site)
|
magic_link.delay(email, "magic_" + str(email), token, current_site)
|
||||||
return Response(
|
return Response(
|
||||||
{"is_password_autoset": user.is_password_autoset, "is_existing": False},
|
{
|
||||||
|
"is_password_autoset": user.is_password_autoset,
|
||||||
|
"is_existing": False,
|
||||||
|
},
|
||||||
status=status.HTTP_200_OK,
|
status=status.HTTP_200_OK,
|
||||||
)
|
)
|
||||||
|
|
||||||
@ -424,8 +437,8 @@ class EmailCheckEndpoint(BaseAPIView):
|
|||||||
email=email,
|
email=email,
|
||||||
user_agent=request.META.get("HTTP_USER_AGENT"),
|
user_agent=request.META.get("HTTP_USER_AGENT"),
|
||||||
ip=request.META.get("REMOTE_ADDR"),
|
ip=request.META.get("REMOTE_ADDR"),
|
||||||
event_name="SIGN_IN",
|
event_name="Sign in",
|
||||||
medium="MAGIC_LINK",
|
medium="Magic link",
|
||||||
first_time=False,
|
first_time=False,
|
||||||
)
|
)
|
||||||
|
|
||||||
@ -433,7 +446,9 @@ class EmailCheckEndpoint(BaseAPIView):
|
|||||||
key, token, current_attempt = generate_magic_token(email=email)
|
key, token, current_attempt = generate_magic_token(email=email)
|
||||||
if not current_attempt:
|
if not current_attempt:
|
||||||
return Response(
|
return Response(
|
||||||
{"error": "Max attempts exhausted. Please try again later."},
|
{
|
||||||
|
"error": "Max attempts exhausted. Please try again later."
|
||||||
|
},
|
||||||
status=status.HTTP_400_BAD_REQUEST,
|
status=status.HTTP_400_BAD_REQUEST,
|
||||||
)
|
)
|
||||||
|
|
||||||
@ -452,8 +467,8 @@ class EmailCheckEndpoint(BaseAPIView):
|
|||||||
email=email,
|
email=email,
|
||||||
user_agent=request.META.get("HTTP_USER_AGENT"),
|
user_agent=request.META.get("HTTP_USER_AGENT"),
|
||||||
ip=request.META.get("REMOTE_ADDR"),
|
ip=request.META.get("REMOTE_ADDR"),
|
||||||
event_name="SIGN_IN",
|
event_name="Sign in",
|
||||||
medium="EMAIL",
|
medium="Email",
|
||||||
first_time=False,
|
first_time=False,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@ -7,7 +7,6 @@ import json
|
|||||||
from django.utils import timezone
|
from django.utils import timezone
|
||||||
from django.core.exceptions import ValidationError
|
from django.core.exceptions import ValidationError
|
||||||
from django.core.validators import validate_email
|
from django.core.validators import validate_email
|
||||||
from django.conf import settings
|
|
||||||
from django.contrib.auth.hashers import make_password
|
from django.contrib.auth.hashers import make_password
|
||||||
|
|
||||||
# Third party imports
|
# Third party imports
|
||||||
@ -65,7 +64,7 @@ class SignUpEndpoint(BaseAPIView):
|
|||||||
email = email.strip().lower()
|
email = email.strip().lower()
|
||||||
try:
|
try:
|
||||||
validate_email(email)
|
validate_email(email)
|
||||||
except ValidationError as e:
|
except ValidationError:
|
||||||
return Response(
|
return Response(
|
||||||
{"error": "Please provide a valid email address."},
|
{"error": "Please provide a valid email address."},
|
||||||
status=status.HTTP_400_BAD_REQUEST,
|
status=status.HTTP_400_BAD_REQUEST,
|
||||||
@ -73,7 +72,7 @@ class SignUpEndpoint(BaseAPIView):
|
|||||||
|
|
||||||
# get configuration values
|
# get configuration values
|
||||||
# Get configuration values
|
# Get configuration values
|
||||||
ENABLE_SIGNUP, = get_configuration_value(
|
(ENABLE_SIGNUP,) = get_configuration_value(
|
||||||
[
|
[
|
||||||
{
|
{
|
||||||
"key": "ENABLE_SIGNUP",
|
"key": "ENABLE_SIGNUP",
|
||||||
@ -151,7 +150,7 @@ class SignInEndpoint(BaseAPIView):
|
|||||||
email = email.strip().lower()
|
email = email.strip().lower()
|
||||||
try:
|
try:
|
||||||
validate_email(email)
|
validate_email(email)
|
||||||
except ValidationError as e:
|
except ValidationError:
|
||||||
return Response(
|
return Response(
|
||||||
{"error": "Please provide a valid email address."},
|
{"error": "Please provide a valid email address."},
|
||||||
status=status.HTTP_400_BAD_REQUEST,
|
status=status.HTTP_400_BAD_REQUEST,
|
||||||
@ -173,7 +172,7 @@ class SignInEndpoint(BaseAPIView):
|
|||||||
|
|
||||||
# Create the user
|
# Create the user
|
||||||
else:
|
else:
|
||||||
ENABLE_SIGNUP, = get_configuration_value(
|
(ENABLE_SIGNUP,) = get_configuration_value(
|
||||||
[
|
[
|
||||||
{
|
{
|
||||||
"key": "ENABLE_SIGNUP",
|
"key": "ENABLE_SIGNUP",
|
||||||
@ -238,9 +237,11 @@ class SignInEndpoint(BaseAPIView):
|
|||||||
[
|
[
|
||||||
WorkspaceMember(
|
WorkspaceMember(
|
||||||
workspace_id=project_member_invite.workspace_id,
|
workspace_id=project_member_invite.workspace_id,
|
||||||
role=project_member_invite.role
|
role=(
|
||||||
if project_member_invite.role in [5, 10, 15]
|
project_member_invite.role
|
||||||
else 15,
|
if project_member_invite.role in [5, 10, 15]
|
||||||
|
else 15
|
||||||
|
),
|
||||||
member=user,
|
member=user,
|
||||||
created_by_id=project_member_invite.created_by_id,
|
created_by_id=project_member_invite.created_by_id,
|
||||||
)
|
)
|
||||||
@ -254,9 +255,11 @@ class SignInEndpoint(BaseAPIView):
|
|||||||
[
|
[
|
||||||
ProjectMember(
|
ProjectMember(
|
||||||
workspace_id=project_member_invite.workspace_id,
|
workspace_id=project_member_invite.workspace_id,
|
||||||
role=project_member_invite.role
|
role=(
|
||||||
if project_member_invite.role in [5, 10, 15]
|
project_member_invite.role
|
||||||
else 15,
|
if project_member_invite.role in [5, 10, 15]
|
||||||
|
else 15
|
||||||
|
),
|
||||||
member=user,
|
member=user,
|
||||||
created_by_id=project_member_invite.created_by_id,
|
created_by_id=project_member_invite.created_by_id,
|
||||||
)
|
)
|
||||||
@ -274,8 +277,8 @@ class SignInEndpoint(BaseAPIView):
|
|||||||
email=email,
|
email=email,
|
||||||
user_agent=request.META.get("HTTP_USER_AGENT"),
|
user_agent=request.META.get("HTTP_USER_AGENT"),
|
||||||
ip=request.META.get("REMOTE_ADDR"),
|
ip=request.META.get("REMOTE_ADDR"),
|
||||||
event_name="SIGN_IN",
|
event_name="Sign in",
|
||||||
medium="EMAIL",
|
medium="Email",
|
||||||
first_time=False,
|
first_time=False,
|
||||||
)
|
)
|
||||||
|
|
||||||
@ -325,7 +328,7 @@ class MagicSignInEndpoint(BaseAPIView):
|
|||||||
)
|
)
|
||||||
|
|
||||||
user_token = request.data.get("token", "").strip()
|
user_token = request.data.get("token", "").strip()
|
||||||
key = request.data.get("key", False).strip().lower()
|
key = request.data.get("key", "").strip().lower()
|
||||||
|
|
||||||
if not key or user_token == "":
|
if not key or user_token == "":
|
||||||
return Response(
|
return Response(
|
||||||
@ -343,21 +346,14 @@ class MagicSignInEndpoint(BaseAPIView):
|
|||||||
|
|
||||||
if str(token) == str(user_token):
|
if str(token) == str(user_token):
|
||||||
user = User.objects.get(email=email)
|
user = User.objects.get(email=email)
|
||||||
if not user.is_active:
|
|
||||||
return Response(
|
|
||||||
{
|
|
||||||
"error": "Your account has been deactivated. Please contact your site administrator."
|
|
||||||
},
|
|
||||||
status=status.HTTP_403_FORBIDDEN,
|
|
||||||
)
|
|
||||||
# Send event
|
# Send event
|
||||||
auth_events.delay(
|
auth_events.delay(
|
||||||
user=user.id,
|
user=user.id,
|
||||||
email=email,
|
email=email,
|
||||||
user_agent=request.META.get("HTTP_USER_AGENT"),
|
user_agent=request.META.get("HTTP_USER_AGENT"),
|
||||||
ip=request.META.get("REMOTE_ADDR"),
|
ip=request.META.get("REMOTE_ADDR"),
|
||||||
event_name="SIGN_IN",
|
event_name="Sign in",
|
||||||
medium="MAGIC_LINK",
|
medium="Magic link",
|
||||||
first_time=False,
|
first_time=False,
|
||||||
)
|
)
|
||||||
|
|
||||||
@ -371,8 +367,10 @@ class MagicSignInEndpoint(BaseAPIView):
|
|||||||
user.save()
|
user.save()
|
||||||
|
|
||||||
# Check if user has any accepted invites for workspace and add them to workspace
|
# Check if user has any accepted invites for workspace and add them to workspace
|
||||||
workspace_member_invites = WorkspaceMemberInvite.objects.filter(
|
workspace_member_invites = (
|
||||||
email=user.email, accepted=True
|
WorkspaceMemberInvite.objects.filter(
|
||||||
|
email=user.email, accepted=True
|
||||||
|
)
|
||||||
)
|
)
|
||||||
|
|
||||||
WorkspaceMember.objects.bulk_create(
|
WorkspaceMember.objects.bulk_create(
|
||||||
@ -397,9 +395,11 @@ class MagicSignInEndpoint(BaseAPIView):
|
|||||||
[
|
[
|
||||||
WorkspaceMember(
|
WorkspaceMember(
|
||||||
workspace_id=project_member_invite.workspace_id,
|
workspace_id=project_member_invite.workspace_id,
|
||||||
role=project_member_invite.role
|
role=(
|
||||||
if project_member_invite.role in [5, 10, 15]
|
project_member_invite.role
|
||||||
else 15,
|
if project_member_invite.role in [5, 10, 15]
|
||||||
|
else 15
|
||||||
|
),
|
||||||
member=user,
|
member=user,
|
||||||
created_by_id=project_member_invite.created_by_id,
|
created_by_id=project_member_invite.created_by_id,
|
||||||
)
|
)
|
||||||
@ -413,9 +413,11 @@ class MagicSignInEndpoint(BaseAPIView):
|
|||||||
[
|
[
|
||||||
ProjectMember(
|
ProjectMember(
|
||||||
workspace_id=project_member_invite.workspace_id,
|
workspace_id=project_member_invite.workspace_id,
|
||||||
role=project_member_invite.role
|
role=(
|
||||||
if project_member_invite.role in [5, 10, 15]
|
project_member_invite.role
|
||||||
else 15,
|
if project_member_invite.role in [5, 10, 15]
|
||||||
|
else 15
|
||||||
|
),
|
||||||
member=user,
|
member=user,
|
||||||
created_by_id=project_member_invite.created_by_id,
|
created_by_id=project_member_invite.created_by_id,
|
||||||
)
|
)
|
||||||
@ -438,7 +440,9 @@ class MagicSignInEndpoint(BaseAPIView):
|
|||||||
|
|
||||||
else:
|
else:
|
||||||
return Response(
|
return Response(
|
||||||
{"error": "Your login code was incorrect. Please try again."},
|
{
|
||||||
|
"error": "Your login code was incorrect. Please try again."
|
||||||
|
},
|
||||||
status=status.HTTP_400_BAD_REQUEST,
|
status=status.HTTP_400_BAD_REQUEST,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@ -1,30 +1,27 @@
|
|||||||
# Python imports
|
# Python imports
|
||||||
import zoneinfo
|
import zoneinfo
|
||||||
import json
|
from django.conf import settings
|
||||||
|
from django.core.exceptions import ObjectDoesNotExist, ValidationError
|
||||||
|
from django.db import IntegrityError
|
||||||
|
|
||||||
# Django imports
|
# Django imports
|
||||||
from django.urls import resolve
|
from django.urls import resolve
|
||||||
from django.conf import settings
|
|
||||||
from django.utils import timezone
|
from django.utils import timezone
|
||||||
from django.db import IntegrityError
|
from django_filters.rest_framework import DjangoFilterBackend
|
||||||
from django.core.exceptions import ObjectDoesNotExist, ValidationError
|
|
||||||
from django.core.serializers.json import DjangoJSONEncoder
|
|
||||||
|
|
||||||
# Third part imports
|
# Third part imports
|
||||||
from rest_framework import status
|
from rest_framework import status
|
||||||
from rest_framework import status
|
|
||||||
from rest_framework.viewsets import ModelViewSet
|
|
||||||
from rest_framework.response import Response
|
|
||||||
from rest_framework.exceptions import APIException
|
from rest_framework.exceptions import APIException
|
||||||
from rest_framework.views import APIView
|
|
||||||
from rest_framework.filters import SearchFilter
|
from rest_framework.filters import SearchFilter
|
||||||
from rest_framework.permissions import IsAuthenticated
|
from rest_framework.permissions import IsAuthenticated
|
||||||
from sentry_sdk import capture_exception
|
from rest_framework.response import Response
|
||||||
from django_filters.rest_framework import DjangoFilterBackend
|
from rest_framework.views import APIView
|
||||||
|
from rest_framework.viewsets import ModelViewSet
|
||||||
|
|
||||||
# Module imports
|
# Module imports
|
||||||
from plane.utils.paginator import BasePaginator
|
|
||||||
from plane.bgtasks.webhook_task import send_webhook
|
from plane.bgtasks.webhook_task import send_webhook
|
||||||
|
from plane.utils.exception_logger import log_exception
|
||||||
|
from plane.utils.paginator import BasePaginator
|
||||||
|
|
||||||
|
|
||||||
class TimezoneMixin:
|
class TimezoneMixin:
|
||||||
@ -46,7 +43,9 @@ class WebhookMixin:
|
|||||||
bulk = False
|
bulk = False
|
||||||
|
|
||||||
def finalize_response(self, request, response, *args, **kwargs):
|
def finalize_response(self, request, response, *args, **kwargs):
|
||||||
response = super().finalize_response(request, response, *args, **kwargs)
|
response = super().finalize_response(
|
||||||
|
request, response, *args, **kwargs
|
||||||
|
)
|
||||||
|
|
||||||
# Check for the case should webhook be sent
|
# Check for the case should webhook be sent
|
||||||
if (
|
if (
|
||||||
@ -62,6 +61,7 @@ class WebhookMixin:
|
|||||||
action=self.request.method,
|
action=self.request.method,
|
||||||
slug=self.workspace_slug,
|
slug=self.workspace_slug,
|
||||||
bulk=self.bulk,
|
bulk=self.bulk,
|
||||||
|
current_site=request.META.get("HTTP_ORIGIN"),
|
||||||
)
|
)
|
||||||
|
|
||||||
return response
|
return response
|
||||||
@ -87,8 +87,10 @@ class BaseViewSet(TimezoneMixin, ModelViewSet, BasePaginator):
|
|||||||
try:
|
try:
|
||||||
return self.model.objects.all()
|
return self.model.objects.all()
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
capture_exception(e)
|
log_exception(e)
|
||||||
raise APIException("Please check the view", status.HTTP_400_BAD_REQUEST)
|
raise APIException(
|
||||||
|
"Please check the view", status.HTTP_400_BAD_REQUEST
|
||||||
|
)
|
||||||
|
|
||||||
def handle_exception(self, exc):
|
def handle_exception(self, exc):
|
||||||
"""
|
"""
|
||||||
@ -99,6 +101,7 @@ class BaseViewSet(TimezoneMixin, ModelViewSet, BasePaginator):
|
|||||||
response = super().handle_exception(exc)
|
response = super().handle_exception(exc)
|
||||||
return response
|
return response
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
|
print(e) if settings.DEBUG else print("Server Error")
|
||||||
if isinstance(e, IntegrityError):
|
if isinstance(e, IntegrityError):
|
||||||
return Response(
|
return Response(
|
||||||
{"error": "The payload is not valid"},
|
{"error": "The payload is not valid"},
|
||||||
@ -112,23 +115,23 @@ class BaseViewSet(TimezoneMixin, ModelViewSet, BasePaginator):
|
|||||||
)
|
)
|
||||||
|
|
||||||
if isinstance(e, ObjectDoesNotExist):
|
if isinstance(e, ObjectDoesNotExist):
|
||||||
model_name = str(exc).split(" matching query does not exist.")[0]
|
|
||||||
return Response(
|
return Response(
|
||||||
{"error": f"{model_name} does not exist."},
|
{"error": "The required object does not exist."},
|
||||||
status=status.HTTP_404_NOT_FOUND,
|
status=status.HTTP_404_NOT_FOUND,
|
||||||
)
|
)
|
||||||
|
|
||||||
if isinstance(e, KeyError):
|
if isinstance(e, KeyError):
|
||||||
capture_exception(e)
|
log_exception(e)
|
||||||
return Response(
|
return Response(
|
||||||
{"error": f"key {e} does not exist"},
|
{"error": "The required key does not exist."},
|
||||||
status=status.HTTP_400_BAD_REQUEST,
|
status=status.HTTP_400_BAD_REQUEST,
|
||||||
)
|
)
|
||||||
|
|
||||||
print(e) if settings.DEBUG else print("Server Error")
|
log_exception(e)
|
||||||
capture_exception(e)
|
return Response(
|
||||||
return Response({"error": "Something went wrong please try again later"}, status=status.HTTP_500_INTERNAL_SERVER_ERROR)
|
{"error": "Something went wrong please try again later"},
|
||||||
|
status=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||||
|
)
|
||||||
|
|
||||||
def dispatch(self, request, *args, **kwargs):
|
def dispatch(self, request, *args, **kwargs):
|
||||||
try:
|
try:
|
||||||
@ -159,6 +162,24 @@ class BaseViewSet(TimezoneMixin, ModelViewSet, BasePaginator):
|
|||||||
if resolve(self.request.path_info).url_name == "project":
|
if resolve(self.request.path_info).url_name == "project":
|
||||||
return self.kwargs.get("pk", None)
|
return self.kwargs.get("pk", None)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def fields(self):
|
||||||
|
fields = [
|
||||||
|
field
|
||||||
|
for field in self.request.GET.get("fields", "").split(",")
|
||||||
|
if field
|
||||||
|
]
|
||||||
|
return fields if fields else None
|
||||||
|
|
||||||
|
@property
|
||||||
|
def expand(self):
|
||||||
|
expand = [
|
||||||
|
expand
|
||||||
|
for expand in self.request.GET.get("expand", "").split(",")
|
||||||
|
if expand
|
||||||
|
]
|
||||||
|
return expand if expand else None
|
||||||
|
|
||||||
|
|
||||||
class BaseAPIView(TimezoneMixin, APIView, BasePaginator):
|
class BaseAPIView(TimezoneMixin, APIView, BasePaginator):
|
||||||
permission_classes = [
|
permission_classes = [
|
||||||
@ -201,20 +222,22 @@ class BaseAPIView(TimezoneMixin, APIView, BasePaginator):
|
|||||||
)
|
)
|
||||||
|
|
||||||
if isinstance(e, ObjectDoesNotExist):
|
if isinstance(e, ObjectDoesNotExist):
|
||||||
model_name = str(exc).split(" matching query does not exist.")[0]
|
|
||||||
return Response(
|
return Response(
|
||||||
{"error": f"{model_name} does not exist."},
|
{"error": "The required object does not exist."},
|
||||||
status=status.HTTP_404_NOT_FOUND,
|
status=status.HTTP_404_NOT_FOUND,
|
||||||
)
|
)
|
||||||
|
|
||||||
if isinstance(e, KeyError):
|
if isinstance(e, KeyError):
|
||||||
return Response({"error": f"key {e} does not exist"}, status=status.HTTP_400_BAD_REQUEST)
|
return Response(
|
||||||
|
{"error": "The required key does not exist."},
|
||||||
if settings.DEBUG:
|
status=status.HTTP_400_BAD_REQUEST,
|
||||||
print(e)
|
)
|
||||||
capture_exception(e)
|
|
||||||
return Response({"error": "Something went wrong please try again later"}, status=status.HTTP_500_INTERNAL_SERVER_ERROR)
|
|
||||||
|
|
||||||
|
log_exception(e)
|
||||||
|
return Response(
|
||||||
|
{"error": "Something went wrong please try again later"},
|
||||||
|
status=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||||
|
)
|
||||||
|
|
||||||
def dispatch(self, request, *args, **kwargs):
|
def dispatch(self, request, *args, **kwargs):
|
||||||
try:
|
try:
|
||||||
@ -239,3 +262,21 @@ class BaseAPIView(TimezoneMixin, APIView, BasePaginator):
|
|||||||
@property
|
@property
|
||||||
def project_id(self):
|
def project_id(self):
|
||||||
return self.kwargs.get("project_id", None)
|
return self.kwargs.get("project_id", None)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def fields(self):
|
||||||
|
fields = [
|
||||||
|
field
|
||||||
|
for field in self.request.GET.get("fields", "").split(",")
|
||||||
|
if field
|
||||||
|
]
|
||||||
|
return fields if fields else None
|
||||||
|
|
||||||
|
@property
|
||||||
|
def expand(self):
|
||||||
|
expand = [
|
||||||
|
expand
|
||||||
|
for expand in self.request.GET.get("expand", "").split(",")
|
||||||
|
if expand
|
||||||
|
]
|
||||||
|
return expand if expand else None
|
||||||
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue
Block a user