mirror of
https://github.com/open-webui/open-webui.git
synced 2026-03-09 23:35:09 -05:00
Compare commits
1516 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
99dd7fb5a8 | ||
|
|
e9e49babf8 | ||
|
|
6b620d93a9 | ||
|
|
c5d974606f | ||
|
|
587dab23f0 | ||
|
|
a4f29e1e55 | ||
|
|
205402e096 | ||
|
|
42a58da487 | ||
|
|
d160d0351f | ||
|
|
744139d2ce | ||
|
|
a28ef8acc5 | ||
|
|
9c2d592c73 | ||
|
|
160e63e509 | ||
|
|
056950b4f9 | ||
|
|
d7151fe6fa | ||
|
|
cde33002c7 | ||
|
|
0f4b6cdb67 | ||
|
|
d0662e2518 | ||
|
|
ec37d801be | ||
|
|
b5d7e57d3c | ||
|
|
076f9fd9c0 | ||
|
|
73ab84efe6 | ||
|
|
0c6e9e3e86 | ||
|
|
c1a054de4a | ||
|
|
51265b4683 | ||
|
|
d4c42dcfa9 | ||
|
|
d85480b4d6 | ||
|
|
ee2f8d3552 | ||
|
|
0e56ef20cb | ||
|
|
b16c4bc1df | ||
|
|
68a47f6a9f | ||
|
|
affb0e5c37 | ||
|
|
15c3bdf7af | ||
|
|
42914a162f | ||
|
|
928a5e747c | ||
|
|
a494ef8a99 | ||
|
|
8d2089e5b8 | ||
|
|
4557f4d165 | ||
|
|
b9af35be4c | ||
|
|
a41a5cc0c8 | ||
|
|
4a1bf6d6c4 | ||
|
|
0d924a1cba | ||
|
|
01651ee0a6 | ||
|
|
3531e009ce | ||
|
|
6edbbde2eb | ||
|
|
8514ed148a | ||
|
|
2897c25b99 | ||
|
|
58261a7b83 | ||
|
|
40d5082b9f | ||
|
|
6536c5b7f7 | ||
|
|
139136e700 | ||
|
|
5101b440a8 | ||
|
|
75980010a9 | ||
|
|
f6893edcc2 | ||
|
|
ce85400817 | ||
|
|
bc95e62600 | ||
|
|
4c691c0edb | ||
|
|
5499b5acc8 | ||
|
|
0a765744b8 | ||
|
|
00e9362c2c | ||
|
|
6d52f913d2 | ||
|
|
2d99f275a3 | ||
|
|
170ec2f9d0 | ||
|
|
1b5ac834ef | ||
|
|
c57ef980fb | ||
|
|
04b324996c | ||
|
|
b31d314638 | ||
|
|
424062d75f | ||
|
|
00b2038efb | ||
|
|
24e7ae8767 | ||
|
|
8d38af8ddd | ||
|
|
a0692a434d | ||
|
|
a89d29db9c | ||
|
|
b759f488b4 | ||
|
|
645657e307 | ||
|
|
3c8944cb12 | ||
|
|
9f285fb2fb | ||
|
|
8497db9f43 | ||
|
|
ac09d59a3f | ||
|
|
ee16177924 | ||
|
|
9153525c84 | ||
|
|
a7aa669038 | ||
|
|
41bd16acbe | ||
|
|
ae44445464 | ||
|
|
09f7cdc272 | ||
|
|
b11c48ac16 | ||
|
|
e0b7d95429 | ||
|
|
c03cccfd14 | ||
|
|
39e3afd55d | ||
|
|
fc71f441c4 | ||
|
|
117bdfcb70 | ||
|
|
3dd10f2ca0 | ||
|
|
e2d4a69750 | ||
|
|
24885a2e38 | ||
|
|
b4e73c7f19 | ||
|
|
f99facf383 | ||
|
|
d105be9ca2 | ||
|
|
e1baa9cc3f | ||
|
|
ec17bbc867 | ||
|
|
f1b0d7eb41 | ||
|
|
fc6dc43a19 | ||
|
|
8d71323009 | ||
|
|
ded22b3204 | ||
|
|
e3b1b717be | ||
|
|
2bb82f258a | ||
|
|
6cb41a38a6 | ||
|
|
47e377967e | ||
|
|
f27c0f7dcf | ||
|
|
cd8271eb95 | ||
|
|
e43e91edd3 | ||
|
|
7984980619 | ||
|
|
cb86e09005 | ||
|
|
822024f44e | ||
|
|
f02ef01e0c | ||
|
|
d5c1c2f0a7 | ||
|
|
145a7bbda5 | ||
|
|
de1e29eed0 | ||
|
|
438b277be0 | ||
|
|
2f5c65bd1f | ||
|
|
4b357a7b62 | ||
|
|
3f598ee5c9 | ||
|
|
c023afac8c | ||
|
|
9936583477 | ||
|
|
768b7e139c | ||
|
|
b529212a2d | ||
|
|
b38ed4221a | ||
|
|
254c6ca709 | ||
|
|
e378f70f34 | ||
|
|
d23252b8a9 | ||
|
|
30eb43f5b8 | ||
|
|
4d46bfe03b | ||
|
|
c5787a2b55 | ||
|
|
67647c8747 | ||
|
|
d14443a4ba | ||
|
|
da9a6c1078 | ||
|
|
e8591b57b4 | ||
|
|
15cbb66e5e | ||
|
|
05c18cd664 | ||
|
|
b9b670fcfb | ||
|
|
a0b58e244e | ||
|
|
875e75b8c2 | ||
|
|
a850a60a23 | ||
|
|
8102bdbed1 | ||
|
|
97b6fb4766 | ||
|
|
146a9d7f05 | ||
|
|
c5952d62ad | ||
|
|
a922f4b2e7 | ||
|
|
03282da45c | ||
|
|
e8c629a2e2 | ||
|
|
e530914328 | ||
|
|
7476bcaa2b | ||
|
|
86999157de | ||
|
|
54b843c367 | ||
|
|
8330fcdb5c | ||
|
|
d795940ced | ||
|
|
2db0f58dcb | ||
|
|
3069452210 | ||
|
|
7ce51f2b4c | ||
|
|
335b6b6c7a | ||
|
|
953a8285f7 | ||
|
|
73b33c3781 | ||
|
|
60dd3230ae | ||
|
|
3436523b79 | ||
|
|
211c41843c | ||
|
|
e252ca1dc4 | ||
|
|
6d25d23326 | ||
|
|
82edd0e3d9 | ||
|
|
f6e7af346e | ||
|
|
7d322a7238 | ||
|
|
3c1afa97af | ||
|
|
545f2a0fe2 | ||
|
|
e153108b51 | ||
|
|
971e54d7b4 | ||
|
|
c993cc5515 | ||
|
|
fb84fc8e77 | ||
|
|
ce917eba81 | ||
|
|
81f62d5bff | ||
|
|
0556479cbe | ||
|
|
99f70cac8b | ||
|
|
94c4aa8ddb | ||
|
|
ca6a624534 | ||
|
|
28d6425f87 | ||
|
|
6b43cc97d2 | ||
|
|
6c0d3ce736 | ||
|
|
0107a70343 | ||
|
|
f46b95300b | ||
|
|
5e96922eba | ||
|
|
a3728e6957 | ||
|
|
353dc83542 | ||
|
|
2fc8fa1869 | ||
|
|
c2d1a3164e | ||
|
|
79cb9383d9 | ||
|
|
670441f548 | ||
|
|
988a5e2b8d | ||
|
|
8648a330f2 | ||
|
|
cad31f6f2b | ||
|
|
8ce5c2eaf0 | ||
|
|
001788b6e4 | ||
|
|
f0fbc586c8 | ||
|
|
11588af34c | ||
|
|
2962aa9f06 | ||
|
|
f821de9470 | ||
|
|
590dc0895f | ||
|
|
e6f4da2bfc | ||
|
|
c171e624eb | ||
|
|
b877bc0086 | ||
|
|
351c29917b | ||
|
|
6e4820cad8 | ||
|
|
7ffa3cb022 | ||
|
|
42d048741c | ||
|
|
dff9254e34 | ||
|
|
e6fea74b60 | ||
|
|
c9c79852a5 | ||
|
|
186923210b | ||
|
|
6336d34b59 | ||
|
|
f54ab7e551 | ||
|
|
4dcbf1af07 | ||
|
|
86adcf33b0 | ||
|
|
87d2738864 | ||
|
|
3578c85e39 | ||
|
|
b402061546 | ||
|
|
d8b513023c | ||
|
|
36a541d6b0 | ||
|
|
d5bf32f240 | ||
|
|
e421be5759 | ||
|
|
29c39d44e1 | ||
|
|
7b97d7a718 | ||
|
|
9df9f4a990 | ||
|
|
dea12360f4 | ||
|
|
a942c30ca8 | ||
|
|
ede71740d2 | ||
|
|
8e807bcf8a | ||
|
|
ea070e34f9 | ||
|
|
de442792ca | ||
|
|
9ad07ad0ce | ||
|
|
b888ee17ff | ||
|
|
e6802c7e98 | ||
|
|
b1554be3f2 | ||
|
|
57d54160d3 | ||
|
|
8eebd6bce1 | ||
|
|
c797c2e18b | ||
|
|
dedb26fd5c | ||
|
|
08ff494754 | ||
|
|
eef9045dcc | ||
|
|
2f2af94d73 | ||
|
|
2af9727bd3 | ||
|
|
b5f46023d4 | ||
|
|
0131afe667 | ||
|
|
69d0472898 | ||
|
|
86c961e342 | ||
|
|
bf0043881a | ||
|
|
98ba3f8428 | ||
|
|
24f149f686 | ||
|
|
b4cd503a02 | ||
|
|
97bb9d41a6 | ||
|
|
2f4c04055c | ||
|
|
b01f9e8ec3 | ||
|
|
73a251fc49 | ||
|
|
cf24a65caa | ||
|
|
9d2d53bfb5 | ||
|
|
6703cacb99 | ||
|
|
d8a30bd6ae | ||
|
|
4c7651c113 | ||
|
|
2afc5f3339 | ||
|
|
93dab86e8d | ||
|
|
f8bb77324d | ||
|
|
29e8e2d938 | ||
|
|
90b7754cd6 | ||
|
|
bc75870289 | ||
|
|
effa77379e | ||
|
|
8eb45acf10 | ||
|
|
1db1ef7c26 | ||
|
|
0b2c7046cd | ||
|
|
466eea1a4c | ||
|
|
f0179270e2 | ||
|
|
86bc3023b3 | ||
|
|
6f07afc79b | ||
|
|
7a0933a72f | ||
|
|
c5921dea58 | ||
|
|
79c834d0e4 | ||
|
|
33c3dbd9fa | ||
|
|
f0f4de59eb | ||
|
|
6233494ed1 | ||
|
|
dce91a8557 | ||
|
|
12516c8a45 | ||
|
|
1294ba9d61 | ||
|
|
ee079df8ed | ||
|
|
e0e249c1b9 | ||
|
|
a4a5614de5 | ||
|
|
55f14e37ea | ||
|
|
adb1bfcaa8 | ||
|
|
67885c71dc | ||
|
|
ee8b8220f0 | ||
|
|
ebec6cd426 | ||
|
|
6f3f5ff922 | ||
|
|
5c45643028 | ||
|
|
586e005f0f | ||
|
|
28892fe2d7 | ||
|
|
8a0da6d376 | ||
|
|
797afd0b72 | ||
|
|
92605fd59f | ||
|
|
5ffd216fca | ||
|
|
dff3732fcd | ||
|
|
8ae605ec4b | ||
|
|
5273dc4535 | ||
|
|
112cbdccbb | ||
|
|
5dc05eac67 | ||
|
|
9c1820f785 | ||
|
|
c41261e72b | ||
|
|
bb97535cad | ||
|
|
333317a7ce | ||
|
|
9fbff16a08 | ||
|
|
f47c9c69e3 | ||
|
|
0bebc898c8 | ||
|
|
9d4d96429f | ||
|
|
5c48fce382 | ||
|
|
e576ddce67 | ||
|
|
c5b670cccd | ||
|
|
5a96fcbeaf | ||
|
|
0a08495fec | ||
|
|
5837dcbfd4 | ||
|
|
11d7aec61d | ||
|
|
30d3d28b9f | ||
|
|
1cad157071 | ||
|
|
428fd202c5 | ||
|
|
db35ea0ae1 | ||
|
|
1c5b6987e2 | ||
|
|
5867ebb1ee | ||
|
|
11afce7895 | ||
|
|
63d297756a | ||
|
|
ba2df1c33a | ||
|
|
9658c2559a | ||
|
|
acb5dcf30a | ||
|
|
7f586f1c3d | ||
|
|
b2c492ce1f | ||
|
|
4adc57fd34 | ||
|
|
c748d33192 | ||
|
|
edc15d0d7c | ||
|
|
df824db042 | ||
|
|
e8babe62bc | ||
|
|
741230bcdb | ||
|
|
89c77f05a8 | ||
|
|
ad31bd51fd | ||
|
|
7e253df175 | ||
|
|
d7a71f3b34 | ||
|
|
36c9371227 | ||
|
|
c157004e07 | ||
|
|
451f1bae15 | ||
|
|
34150fc3ed | ||
|
|
54dc94317c | ||
|
|
ce5143c535 | ||
|
|
a2e889c8bb | ||
|
|
2c59f2dcaf | ||
|
|
b56f77ed47 | ||
|
|
b185524a8c | ||
|
|
878a570a2c | ||
|
|
b38e2fab32 | ||
|
|
3516eea189 | ||
|
|
a3f2b7045c | ||
|
|
37fdb0ea2e | ||
|
|
e66619262a | ||
|
|
d7a00af576 | ||
|
|
a04f22d55f | ||
|
|
caeca9d300 | ||
|
|
0da0e12096 | ||
|
|
d48f54f7b2 | ||
|
|
3391a855f0 | ||
|
|
a4870f4c91 | ||
|
|
7f37b9340d | ||
|
|
187ea38beb | ||
|
|
07a556df55 | ||
|
|
0feaf6f649 | ||
|
|
595cf191fb | ||
|
|
dcab991d44 | ||
|
|
457360dae7 | ||
|
|
9bd5567552 | ||
|
|
d2a1f4a93d | ||
|
|
69bf610b17 | ||
|
|
89a29653b8 | ||
|
|
345090c769 | ||
|
|
16dd352524 | ||
|
|
8c677ff7a1 | ||
|
|
929e665c5e | ||
|
|
4d45083b5d | ||
|
|
3b88ba8812 | ||
|
|
133ff406d7 | ||
|
|
d05c2f56ea | ||
|
|
3c5b216612 | ||
|
|
f099b277c8 | ||
|
|
e7ae9a2107 | ||
|
|
b242460874 | ||
|
|
3fcdca2f28 | ||
|
|
885b9f1ece | ||
|
|
09f34a7561 | ||
|
|
cf544b9e60 | ||
|
|
5a78f1d915 | ||
|
|
854024cdf8 | ||
|
|
fe872aa3fc | ||
|
|
d9ccef8150 | ||
|
|
958d882ff9 | ||
|
|
48e7f47558 | ||
|
|
73c291193b | ||
|
|
209948af6f | ||
|
|
9fc813cfa6 | ||
|
|
b105efa05f | ||
|
|
86caca495b | ||
|
|
4eb47716e2 | ||
|
|
3f892583c3 | ||
|
|
ed9fbe153b | ||
|
|
d255f5cf8d | ||
|
|
ad7bc624c2 | ||
|
|
6e8e0454e4 | ||
|
|
7cebcf064a | ||
|
|
1fc1bf5f0d | ||
|
|
bc29d5d3c3 | ||
|
|
fe18aebdd9 | ||
|
|
57df49274c | ||
|
|
9193c9bd7d | ||
|
|
656e75372c | ||
|
|
6f9080dfe0 | ||
|
|
86f822fd9a | ||
|
|
8b3972c6df | ||
|
|
58ca5e42c2 | ||
|
|
cd62d89dd9 | ||
|
|
3686cf7365 | ||
|
|
aaa4350eb4 | ||
|
|
2113c2f57f | ||
|
|
ee22ba9676 | ||
|
|
466974f344 | ||
|
|
95616e92d7 | ||
|
|
c8c41e07e9 | ||
|
|
d195b83c9e | ||
|
|
0bff6d38ee | ||
|
|
b0bf6b5d31 | ||
|
|
7abdf8e342 | ||
|
|
cfb7357103 | ||
|
|
05c15b017d | ||
|
|
c8e609c3d1 | ||
|
|
4623fdfce2 | ||
|
|
1c1e6a7172 | ||
|
|
297835628a | ||
|
|
7b6723a955 | ||
|
|
a40eb87c31 | ||
|
|
53954e4c05 | ||
|
|
50b1a3471c | ||
|
|
dc6e68bd60 | ||
|
|
da1e88a427 | ||
|
|
d23f05fe0a | ||
|
|
90effa925e | ||
|
|
8a3c9b9a3e | ||
|
|
a7f543d737 | ||
|
|
4b00036dbf | ||
|
|
babfc97c90 | ||
|
|
913620ff0c | ||
|
|
e6265897c8 | ||
|
|
91e1b548a9 | ||
|
|
8ad44cd690 | ||
|
|
5f74cfaa51 | ||
|
|
9ef3fb0bc7 | ||
|
|
7d1890c7d1 | ||
|
|
eb2aaea6cb | ||
|
|
9706b76b36 | ||
|
|
059ac466e0 | ||
|
|
75780256d1 | ||
|
|
b5efe15f73 | ||
|
|
be657f45e4 | ||
|
|
f295e26f25 | ||
|
|
0591cd82d4 | ||
|
|
70ca6527f6 | ||
|
|
cdd024db81 | ||
|
|
3082d0de7a | ||
|
|
f5e10704c0 | ||
|
|
c90cd6019a | ||
|
|
c26d7ff463 | ||
|
|
b21af908a7 | ||
|
|
097e90f1de | ||
|
|
ed47e24494 | ||
|
|
aeaf761ecd | ||
|
|
6d2eaaf602 | ||
|
|
1815cfe99d | ||
|
|
539e95a206 | ||
|
|
de59ecf8a3 | ||
|
|
2981212dfa | ||
|
|
7ee07df26a | ||
|
|
008febb6d7 | ||
|
|
6417ccef00 | ||
|
|
198749aed1 | ||
|
|
46bd97c100 | ||
|
|
1086a0e662 | ||
|
|
9586749314 | ||
|
|
54552c3d13 | ||
|
|
ed337b94e1 | ||
|
|
0f287c8a09 | ||
|
|
dc2e75017d | ||
|
|
c3862bc387 | ||
|
|
6fdfa62845 | ||
|
|
e0fc3e89a7 | ||
|
|
ed5669410b | ||
|
|
6d3d80b347 | ||
|
|
d84368a8f6 | ||
|
|
5adf9ed445 | ||
|
|
49e9275c5b | ||
|
|
c746fd94cb | ||
|
|
0bfe28711a | ||
|
|
c09e51c1bf | ||
|
|
3dd1d1bc4a | ||
|
|
a860e98ab9 | ||
|
|
ed1a2ab5e8 | ||
|
|
bbbd94f69c | ||
|
|
2e067b0541 | ||
|
|
a0a79fbc5b | ||
|
|
ecd6a135cd | ||
|
|
9c18cf204d | ||
|
|
a571ae4ec2 | ||
|
|
1558f64c48 | ||
|
|
1739313c13 | ||
|
|
1b8acc42b4 | ||
|
|
1397bfa84c | ||
|
|
c09af435ac | ||
|
|
f5004fd9d4 | ||
|
|
fb58e842b7 | ||
|
|
dcf8d9c7eb | ||
|
|
a61c54531c | ||
|
|
e184a65dea | ||
|
|
fafeec3d86 | ||
|
|
55d0ecc879 | ||
|
|
614f3d5f80 | ||
|
|
46328333c7 | ||
|
|
582036fb3f | ||
|
|
28602b12ea | ||
|
|
f5b6785e53 | ||
|
|
81440460f2 | ||
|
|
82ac2e4edb | ||
|
|
fc44924256 | ||
|
|
9ca2350a13 | ||
|
|
12a6216477 | ||
|
|
fb5fb27ebe | ||
|
|
cb0f759420 | ||
|
|
378223aedb | ||
|
|
61b147441c | ||
|
|
1f9b5b6456 | ||
|
|
4ca870bf6d | ||
|
|
52a3ab5333 | ||
|
|
ff22e2156d | ||
|
|
e0c775f6e3 | ||
|
|
7036166535 | ||
|
|
2e231982a3 | ||
|
|
fe7e7abf15 | ||
|
|
3e7a163660 | ||
|
|
0319e63999 | ||
|
|
0bd88090bb | ||
|
|
ed3e1397ca | ||
|
|
0ad35ffad9 | ||
|
|
1f488a0072 | ||
|
|
493745a70b | ||
|
|
c400f40663 | ||
|
|
bc6113f4ba | ||
|
|
b6703be859 | ||
|
|
62f30ce098 | ||
|
|
9f812e7022 | ||
|
|
a909aa1c20 | ||
|
|
e3889522d6 | ||
|
|
79c005a041 | ||
|
|
a6c797d4c2 | ||
|
|
0e50c8bb31 | ||
|
|
9ad5ffb8c1 | ||
|
|
f052010d8e | ||
|
|
ee6e41b144 | ||
|
|
2e267b420a | ||
|
|
0c618b8145 | ||
|
|
8f41db2f2e | ||
|
|
a53537ccde | ||
|
|
5017ca90ff | ||
|
|
f3ee07a8a2 | ||
|
|
263cc71dd3 | ||
|
|
8f5e426a13 | ||
|
|
87700d02a7 | ||
|
|
8f22e911e0 | ||
|
|
1647cbef21 | ||
|
|
528ac51ba9 | ||
|
|
f751d22a20 | ||
|
|
650ca95784 | ||
|
|
6ddd8c7241 | ||
|
|
d1b8af6220 | ||
|
|
332ef045ee | ||
|
|
0876c9b5ef | ||
|
|
ebc7da6f82 | ||
|
|
1fe1c27220 | ||
|
|
8013c152d0 | ||
|
|
630a78cead | ||
|
|
17c772831d | ||
|
|
6147f41589 | ||
|
|
5e70afc054 | ||
|
|
75fae69def | ||
|
|
90ef9f751a | ||
|
|
544251a7fd | ||
|
|
05970157f6 | ||
|
|
d834bd2a18 | ||
|
|
e9b68524e8 | ||
|
|
b291271df3 | ||
|
|
9dd76b72b4 | ||
|
|
12977e07f3 | ||
|
|
29467a7057 | ||
|
|
b862dff185 | ||
|
|
6747478f67 | ||
|
|
124a17e826 | ||
|
|
57360b7a61 | ||
|
|
2d40efe9d6 | ||
|
|
79b9c8a677 | ||
|
|
8e4776ada1 | ||
|
|
5b2e1ca7cd | ||
|
|
0a7373dae1 | ||
|
|
c9d948f284 | ||
|
|
dc92178641 | ||
|
|
2fc07fd6a2 | ||
|
|
78413d0c2e | ||
|
|
1c01e52f7c | ||
|
|
15a3c6b171 | ||
|
|
d394f8b7be | ||
|
|
325ca98773 | ||
|
|
9a691c0387 | ||
|
|
351b1dbf31 | ||
|
|
a2eadb30f5 | ||
|
|
2e7e346e19 | ||
|
|
7c145ce5d5 | ||
|
|
f19b61307d | ||
|
|
a8ec73d01e | ||
|
|
318f61161e | ||
|
|
1a26e67611 | ||
|
|
6974cb248c | ||
|
|
dd5da10d2a | ||
|
|
9c4b55c86a | ||
|
|
4aca1e86ad | ||
|
|
b2f09e4623 | ||
|
|
bdb2ce9448 | ||
|
|
973c08babd | ||
|
|
7f51ef1838 | ||
|
|
08969ecf89 | ||
|
|
3e012f0219 | ||
|
|
5933d7a216 | ||
|
|
1b7d363d32 | ||
|
|
c2732a0990 | ||
|
|
c5eb0a9732 | ||
|
|
bf57dd808e | ||
|
|
d353ea449a | ||
|
|
c85cdbde46 | ||
|
|
fb083237cd | ||
|
|
a0fb4a9b84 | ||
|
|
5c9dd25459 | ||
|
|
0907c32e10 | ||
|
|
4752df9bd8 | ||
|
|
7b9b29253f | ||
|
|
126aff7a7d | ||
|
|
6ca1de3601 | ||
|
|
fb8af637e1 | ||
|
|
ab983887cc | ||
|
|
75c1f47500 | ||
|
|
3da981d9cd | ||
|
|
8a0e30a901 | ||
|
|
736cbb961f | ||
|
|
8d8c2752dc | ||
|
|
d78a7d431d | ||
|
|
91ca5c424b | ||
|
|
7e1c55d2ed | ||
|
|
3a2c241450 | ||
|
|
13b6b43cea | ||
|
|
c7a0e45bea | ||
|
|
6bff5a4d09 | ||
|
|
37e0d47082 | ||
|
|
e6b91036e1 | ||
|
|
79a83adc89 | ||
|
|
ffd598c5d7 | ||
|
|
d255251e5f | ||
|
|
49fe04a627 | ||
|
|
21c919988d | ||
|
|
1c4b6b9cd9 | ||
|
|
3899405864 | ||
|
|
209828c7c3 | ||
|
|
7152af949b | ||
|
|
8206c47a47 | ||
|
|
f7aba20d79 | ||
|
|
6afc686e17 | ||
|
|
677c36c3aa | ||
|
|
6d764ee55e | ||
|
|
1d8b3b8c51 | ||
|
|
92dd173b27 | ||
|
|
f2ec020b64 | ||
|
|
d784d5c367 | ||
|
|
b3517c63e8 | ||
|
|
550075bba4 | ||
|
|
c93a10388b | ||
|
|
5a168ecc2a | ||
|
|
276ce3374d | ||
|
|
e77c3ab043 | ||
|
|
d2e2e535dd | ||
|
|
90ec458c4c | ||
|
|
9636913de0 | ||
|
|
e039b4ec54 | ||
|
|
9d2ed3d2be | ||
|
|
b8b994a820 | ||
|
|
00eb022450 | ||
|
|
2428878f42 | ||
|
|
af57a2c153 | ||
|
|
1b349016ff | ||
|
|
c1b4fbf5c2 | ||
|
|
a52e8cd537 | ||
|
|
5b7cf88915 | ||
|
|
e1103305f5 | ||
|
|
6e9db3e3c8 | ||
|
|
eab30781e0 | ||
|
|
c30c876659 | ||
|
|
4ead3c5b80 | ||
|
|
f0f176b80f | ||
|
|
5b3ee30ca9 | ||
|
|
e13614e11b | ||
|
|
464b6a329e | ||
|
|
44d768ecf3 | ||
|
|
0bd9d59c78 | ||
|
|
be74a4c9c1 | ||
|
|
719f4da1dc | ||
|
|
fbc9634ffc | ||
|
|
619c81472b | ||
|
|
d8f71e1d7f | ||
|
|
1715446b13 | ||
|
|
8204f06485 | ||
|
|
4c92a0f571 | ||
|
|
1d225dd804 | ||
|
|
15bd5ebd7b | ||
|
|
3af50f08bd | ||
|
|
4fe1f2487d | ||
|
|
a4bc0b2829 | ||
|
|
7b8f923981 | ||
|
|
be41994d40 | ||
|
|
5361896411 | ||
|
|
c754c53906 | ||
|
|
78d6647885 | ||
|
|
4eeb669ac3 | ||
|
|
26465f3e92 | ||
|
|
9d25207b83 | ||
|
|
cc19b8049a | ||
|
|
3a163b6392 | ||
|
|
405d0561df | ||
|
|
ee33b4e2a3 | ||
|
|
888479aaf0 | ||
|
|
82cda6e522 | ||
|
|
119a7f1933 | ||
|
|
6c4445d545 | ||
|
|
92b1acd6fb | ||
|
|
1767b64135 | ||
|
|
e030f261d1 | ||
|
|
d501ece247 | ||
|
|
534e4c90ca | ||
|
|
40f2c3521b | ||
|
|
07b1327708 | ||
|
|
525095b3de | ||
|
|
f84513e856 | ||
|
|
bf423b8577 | ||
|
|
ba20c71963 | ||
|
|
7bbc57f225 | ||
|
|
e703e172e2 | ||
|
|
3ff52fd1ad | ||
|
|
e19406cdd7 | ||
|
|
30e65b33f6 | ||
|
|
3f1255b39e | ||
|
|
71743b25fe | ||
|
|
c7e93b32c5 | ||
|
|
3cee507687 | ||
|
|
ff651ddc36 | ||
|
|
c0738cef26 | ||
|
|
a44e9a8dda | ||
|
|
38b9a63fa5 | ||
|
|
2d60e42258 | ||
|
|
60ac69eb27 | ||
|
|
504d910557 | ||
|
|
e48d66f918 | ||
|
|
0bfbace9aa | ||
|
|
019cf8199f | ||
|
|
c5f85eed92 | ||
|
|
21719ccdf1 | ||
|
|
299b3d72cf | ||
|
|
85e9e231ed | ||
|
|
f382a78e31 | ||
|
|
377cc427b6 | ||
|
|
7ec72679f0 | ||
|
|
31b311c3c9 | ||
|
|
e24ab4c6d2 | ||
|
|
6739983cf1 | ||
|
|
ff00815b61 | ||
|
|
8f6a927be3 | ||
|
|
f6add92702 | ||
|
|
deedfdceae | ||
|
|
e268ee5675 | ||
|
|
33d8d818bd | ||
|
|
9a81a37008 | ||
|
|
8e620b0c2c | ||
|
|
214546399a | ||
|
|
fdd27aa321 | ||
|
|
4ebff2c5ce | ||
|
|
8266d343bf | ||
|
|
36ddb19023 | ||
|
|
ebc410d8d4 | ||
|
|
1772db4712 | ||
|
|
40d7d7d6dd | ||
|
|
0d01ea5f2f | ||
|
|
822c47c171 | ||
|
|
fdf02c4e86 | ||
|
|
398bc96b1a | ||
|
|
ad82eae6a9 | ||
|
|
d7b64ff447 | ||
|
|
93cb2be35d | ||
|
|
8f1b9bdf8a | ||
|
|
f69956bda8 | ||
|
|
e06667ead8 | ||
|
|
35f64cc53f | ||
|
|
12f95555fc | ||
|
|
84c1810b6e | ||
|
|
ba39f9bf56 | ||
|
|
8eb82265d0 | ||
|
|
ac277e8e9e | ||
|
|
b1521cacad | ||
|
|
5978e7c9a6 | ||
|
|
fd5e8b4fcf | ||
|
|
291b6dd744 | ||
|
|
73ff524a8f | ||
|
|
c2b5bf2130 | ||
|
|
8532f9da03 | ||
|
|
2cfe6830df | ||
|
|
44355a4bdc | ||
|
|
47a858393b | ||
|
|
d3e80f515d | ||
|
|
c5b93ca631 | ||
|
|
3335eee1b9 | ||
|
|
29175405a6 | ||
|
|
d53062a9b0 | ||
|
|
96ef15362a | ||
|
|
d6d6098378 | ||
|
|
620e629edc | ||
|
|
c292fd89f9 | ||
|
|
c346130774 | ||
|
|
768717aaf9 | ||
|
|
d055e1f888 | ||
|
|
5f84145a2d | ||
|
|
47a9395a22 | ||
|
|
8dbac0f7e3 | ||
|
|
6b463164f4 | ||
|
|
5f15e9ee68 | ||
|
|
54d63ece6f | ||
|
|
e35883ca9c | ||
|
|
19e49e43cb | ||
|
|
e429b5548c | ||
|
|
839dacc4a4 | ||
|
|
4225591a26 | ||
|
|
b4adffc3af | ||
|
|
8fc178ae91 | ||
|
|
580ad46036 | ||
|
|
f47dffe6e1 | ||
|
|
00f6b4bf09 | ||
|
|
9126ceac08 | ||
|
|
ff8a2da751 | ||
|
|
2f028b45fe | ||
|
|
d3ef9d980b | ||
|
|
cb81dfe4ba | ||
|
|
426de76690 | ||
|
|
16a767e04e | ||
|
|
2adaf9ba3d | ||
|
|
f381850bb2 | ||
|
|
273787fe78 | ||
|
|
70d16c3904 | ||
|
|
ee6b1376c3 | ||
|
|
aed8e2156f | ||
|
|
5d848ad130 | ||
|
|
98928f6bd7 | ||
|
|
692f04d457 | ||
|
|
e1ea0c23eb | ||
|
|
657d443a3e | ||
|
|
95985e7bbb | ||
|
|
3a0a1aca11 | ||
|
|
41926172d3 | ||
|
|
585b9eb84a | ||
|
|
ffd7d74f77 | ||
|
|
578d52b89d | ||
|
|
e599d5db3c | ||
|
|
79b5430a9e | ||
|
|
9b05fe3c54 | ||
|
|
5fe2795db8 | ||
|
|
732f730213 | ||
|
|
c89bb01db9 | ||
|
|
1cbbf75807 | ||
|
|
bd81fc8bff | ||
|
|
d4df552076 | ||
|
|
b7ad82757d | ||
|
|
d12b6cda4e | ||
|
|
83855b713b | ||
|
|
bd09b6dbad | ||
|
|
eb9ad47ef8 | ||
|
|
5b46a252ff | ||
|
|
39c57c0e94 | ||
|
|
619dbbe9f5 | ||
|
|
9be73ea94a | ||
|
|
4e43663448 | ||
|
|
cd117f5b67 | ||
|
|
2dad9b9432 | ||
|
|
b0bc36f2af | ||
|
|
1688f5ecaf | ||
|
|
cba2d31175 | ||
|
|
a2b77fd072 | ||
|
|
9b83e57372 | ||
|
|
687cae9b79 | ||
|
|
f8fffdd288 | ||
|
|
60d6279055 | ||
|
|
7b330d1490 | ||
|
|
7078af635c | ||
|
|
e1b57d80a4 | ||
|
|
8426874426 | ||
|
|
70dd790afc | ||
|
|
e99cba53fe | ||
|
|
628d7ae72d | ||
|
|
ff737a9e25 | ||
|
|
5c16631ec5 | ||
|
|
d52eaf19d6 | ||
|
|
27dd6ef14e | ||
|
|
ed75f72358 | ||
|
|
276d629a14 | ||
|
|
aedd77b81d | ||
|
|
1053863175 | ||
|
|
f448341211 | ||
|
|
9fc6b999d0 | ||
|
|
ff2fff857a | ||
|
|
dd4cf102cc | ||
|
|
5d3a89dd25 | ||
|
|
b4f1a0b5a6 | ||
|
|
afa42dd2e4 | ||
|
|
be44af4680 | ||
|
|
e6b6f42139 | ||
|
|
4d9677e808 | ||
|
|
eacb69074e | ||
|
|
6477bf37fe | ||
|
|
1743d3c6c1 | ||
|
|
e723b2a4c6 | ||
|
|
67f704c98d | ||
|
|
8d92093570 | ||
|
|
1ccac9111b | ||
|
|
98984166f9 | ||
|
|
d1bf18eeb0 | ||
|
|
2018a6c000 | ||
|
|
56152230f8 | ||
|
|
7e8cf5504a | ||
|
|
3c03d5069d | ||
|
|
705508a674 | ||
|
|
771482c6e6 | ||
|
|
d25ccfba5f | ||
|
|
614c219010 | ||
|
|
63b3076d64 | ||
|
|
8e3ad45ce4 | ||
|
|
984e0c533e | ||
|
|
5065291f72 | ||
|
|
8c273ba58a | ||
|
|
7a9c0946a4 | ||
|
|
7e0a26ef4e | ||
|
|
65b7c9898b | ||
|
|
67f95ddfdc | ||
|
|
d1dbb9a3be | ||
|
|
bb087a5989 | ||
|
|
a0d24105fc | ||
|
|
a5c9160b7d | ||
|
|
0e2cdbe9fb | ||
|
|
3c55a46484 | ||
|
|
d6b68f405e | ||
|
|
351bbdb36c | ||
|
|
896baf021b | ||
|
|
499e5e4f60 | ||
|
|
3ad003bccb | ||
|
|
af92184c93 | ||
|
|
06debb322b | ||
|
|
b7f0759485 | ||
|
|
bc6f23f82f | ||
|
|
cb9e76c7f9 | ||
|
|
b38986a0aa | ||
|
|
db0c576f48 | ||
|
|
40d7e5089d | ||
|
|
9b8d42c670 | ||
|
|
bed3b71860 | ||
|
|
253791b92c | ||
|
|
5dd6ae6ec4 | ||
|
|
902f30c123 | ||
|
|
fa8d7bd9c6 | ||
|
|
d0df2cbe53 | ||
|
|
27baa00afb | ||
|
|
869063c743 | ||
|
|
2f9f568dd9 | ||
|
|
937ce5f797 | ||
|
|
1b612209a3 | ||
|
|
d00d3341af | ||
|
|
3199e26500 | ||
|
|
bc06e7a282 | ||
|
|
1eec9e2b59 | ||
|
|
d46f652f7e | ||
|
|
d75d638b9a | ||
|
|
b64c9d966a | ||
|
|
8a928e5356 | ||
|
|
adf958559b | ||
|
|
82b35492af | ||
|
|
a7ea07036e | ||
|
|
c1d3481c41 | ||
|
|
bc0baa35e6 | ||
|
|
f549cb1f87 | ||
|
|
823093eea6 | ||
|
|
939bfd153e | ||
|
|
b943b7d337 | ||
|
|
a1f3ece528 | ||
|
|
8a4b3e6bc9 | ||
|
|
a9c497612b | ||
|
|
7df997c992 | ||
|
|
bebc0d2073 | ||
|
|
857fc3ce99 | ||
|
|
47e9c12fc2 | ||
|
|
5970dadead | ||
|
|
7dc4cb30b2 | ||
|
|
fb122e67d0 | ||
|
|
ca9874757f | ||
|
|
65d5545cf0 | ||
|
|
e872f5dc78 | ||
|
|
b4ad64586a | ||
|
|
9661fee554 | ||
|
|
adf26789b8 | ||
|
|
209e246e6f | ||
|
|
ed2a1e7db9 | ||
|
|
143ac08c35 | ||
|
|
53f03f6556 | ||
|
|
62f1933e3c | ||
|
|
0b30dc357c | ||
|
|
d5b595a842 | ||
|
|
d05ba042c0 | ||
|
|
eed2d735a1 | ||
|
|
675403d26d | ||
|
|
8be6e16513 | ||
|
|
c7fc17da69 | ||
|
|
4775fe43d8 | ||
|
|
2a04dd0f9b | ||
|
|
61ee4bd629 | ||
|
|
dbd661a417 | ||
|
|
26700ac4ac | ||
|
|
b0defe5524 | ||
|
|
91cd8c7608 | ||
|
|
62b01c5f8e | ||
|
|
b33ab6c5fd | ||
|
|
0886b3a0a4 | ||
|
|
522afbb0a0 | ||
|
|
d5f13dd9e0 | ||
|
|
3d6d8c91dd | ||
|
|
4354f270ce | ||
|
|
28087ccf40 | ||
|
|
eb0e683b47 | ||
|
|
1023ff8454 | ||
|
|
9401f6c821 | ||
|
|
827c419251 | ||
|
|
82db64a700 | ||
|
|
f1fae805a2 | ||
|
|
272e2386dd | ||
|
|
67e1c0a10b | ||
|
|
f88a86f9b0 | ||
|
|
601982f52b | ||
|
|
fc839011f6 | ||
|
|
b407f24950 | ||
|
|
d15f57cfff | ||
|
|
82fbfd69a5 | ||
|
|
8e6ea49e0e | ||
|
|
83a3e53d8d | ||
|
|
698976add0 | ||
|
|
cc1d3c48e0 | ||
|
|
e510c8b11f | ||
|
|
666086a806 | ||
|
|
9bdbe88bda | ||
|
|
50db51ebe0 | ||
|
|
0beaab51ae | ||
|
|
0e30b0f9b4 | ||
|
|
214722d39e | ||
|
|
7f6dae41f0 | ||
|
|
9aa8eff44e | ||
|
|
2544f7eaf0 | ||
|
|
1a6ce1d5d9 | ||
|
|
98eaec22d4 | ||
|
|
2e40719f4e | ||
|
|
71a2bd2fea | ||
|
|
1c20db775c | ||
|
|
e2ef36b582 | ||
|
|
90a064972c | ||
|
|
f4a0b845be | ||
|
|
1f22aa99d8 | ||
|
|
4c9ea084d5 | ||
|
|
a3094bcd1b | ||
|
|
5c8fb4b3d5 | ||
|
|
ff46fe2b4a | ||
|
|
8d6a424604 | ||
|
|
8dfbdbd883 | ||
|
|
f533c9750b | ||
|
|
2f841f9f5a | ||
|
|
02d5bca44d | ||
|
|
14eda1bf5b | ||
|
|
2d4cdc5be9 | ||
|
|
d038e831dc | ||
|
|
9fe62fc80d | ||
|
|
bfb12a7851 | ||
|
|
4617f3a4e2 | ||
|
|
97e331632c | ||
|
|
4cf777ab5a | ||
|
|
05c0423d6e | ||
|
|
377efc8a0c | ||
|
|
b35bbaade2 | ||
|
|
c9107fa87f | ||
|
|
0c1fab09ff | ||
|
|
8b67959695 | ||
|
|
21e81d78d9 | ||
|
|
31623ff330 | ||
|
|
7edfffdcc5 | ||
|
|
3b868be77b | ||
|
|
c3271e84ef | ||
|
|
1cac9fce46 | ||
|
|
d0869bbfbc | ||
|
|
ca4beb413b | ||
|
|
7b31ef60fb | ||
|
|
73576dfcaf | ||
|
|
8a411decac | ||
|
|
fa786c8e05 | ||
|
|
85fc35492d | ||
|
|
1c94f4dd71 | ||
|
|
74169b0320 | ||
|
|
83574ccf6c | ||
|
|
a0be3822bf | ||
|
|
004700c125 | ||
|
|
19ee110b7d | ||
|
|
998616c0fd | ||
|
|
9204498420 | ||
|
|
b1957e5cfe | ||
|
|
92a88df484 | ||
|
|
f2f713023d | ||
|
|
cf86ba7786 | ||
|
|
94502d6494 | ||
|
|
5e3f9ec757 | ||
|
|
d6fc0ccf65 | ||
|
|
a988c53949 | ||
|
|
3afd66d50f | ||
|
|
a9801147b8 | ||
|
|
f3d488fb0c | ||
|
|
28e3701187 | ||
|
|
8f6369374d | ||
|
|
e5cfa6501b | ||
|
|
175ffc5c66 | ||
|
|
c0441ab2b8 | ||
|
|
6d1bd3ab66 | ||
|
|
1779e6fecc | ||
|
|
03d5a670f6 | ||
|
|
76806a998f | ||
|
|
9bcbf5e9b3 | ||
|
|
6bbb755997 | ||
|
|
aec7cd572c | ||
|
|
856759d350 | ||
|
|
5f92f7e41f | ||
|
|
890824ced4 | ||
|
|
2cd1207e73 | ||
|
|
1803c7adeb | ||
|
|
fa8bb51b81 | ||
|
|
9bea1950dc | ||
|
|
df0bb4b4b0 | ||
|
|
fdfa96ba51 | ||
|
|
ff94a8e57f | ||
|
|
41369aefdf | ||
|
|
d5ad5c5422 | ||
|
|
1357237a3e | ||
|
|
4946f08671 | ||
|
|
665c851f66 | ||
|
|
115503098e | ||
|
|
b7cda48a03 | ||
|
|
7780640938 | ||
|
|
92488c254d | ||
|
|
bccc6e08cc | ||
|
|
a8c15e1b54 | ||
|
|
953beb369c | ||
|
|
9232e03102 | ||
|
|
c386d0b1a5 | ||
|
|
08efabc696 | ||
|
|
560b9228fd | ||
|
|
d3bcfd4d5f | ||
|
|
3b21547c83 | ||
|
|
62c475f6b4 | ||
|
|
5367d5ec3b | ||
|
|
01a5569ee9 | ||
|
|
f75908ebe3 | ||
|
|
928326103c | ||
|
|
8a11f12ffd | ||
|
|
a6b5bf8df2 | ||
|
|
a26f4306a4 | ||
|
|
0ead3ed37a | ||
|
|
45363a2abb | ||
|
|
a7b6d5507f | ||
|
|
2706b8c59c | ||
|
|
fd1612935d | ||
|
|
7e1923fcfe | ||
|
|
22117e06b5 | ||
|
|
718d69b148 | ||
|
|
30e03cbac4 | ||
|
|
0525dd2bb5 | ||
|
|
6589450bd6 | ||
|
|
f4df49e600 | ||
|
|
bc6d06b49e | ||
|
|
025f0f390e | ||
|
|
0c0a860538 | ||
|
|
8994728d8b | ||
|
|
693dc3107a | ||
|
|
63c0772135 | ||
|
|
fb9b7275ad | ||
|
|
7a024fbe1e | ||
|
|
9dade91ef5 | ||
|
|
35fa278b37 | ||
|
|
6a21a77ee9 | ||
|
|
628310b12b | ||
|
|
ef28330c1a | ||
|
|
69c4687a53 | ||
|
|
09cba5b87a | ||
|
|
600409682e | ||
|
|
689b05a73d | ||
|
|
062649e483 | ||
|
|
bbeed7cd85 | ||
|
|
6c49a15e20 | ||
|
|
18a6b00083 | ||
|
|
7c841c9f63 | ||
|
|
97d73d3d33 | ||
|
|
b2e682e263 | ||
|
|
29cbdbcadd | ||
|
|
ebca735f5e | ||
|
|
775478534a | ||
|
|
b01d72ade3 | ||
|
|
b6da4baa97 | ||
|
|
7fa9f381e1 | ||
|
|
a9673c793a | ||
|
|
b148865ee8 | ||
|
|
f4f7adb377 | ||
|
|
b96239fb0b | ||
|
|
7fc049a513 | ||
|
|
faeabfb3d4 | ||
|
|
de6b5a7bbe | ||
|
|
efd4b03f78 | ||
|
|
e98a20fce9 | ||
|
|
5a0e1c5f75 | ||
|
|
c224f1105e | ||
|
|
95185aaaec | ||
|
|
cb7ee6212e | ||
|
|
d78c35c9ba | ||
|
|
73998a70cc | ||
|
|
3967c34261 | ||
|
|
f30428754f | ||
|
|
56c9552ab3 | ||
|
|
e414ba2d8c | ||
|
|
b1355e16bc | ||
|
|
882b76cefa | ||
|
|
63d82dbece | ||
|
|
f568389235 | ||
|
|
814473878f | ||
|
|
a44bae2d3a | ||
|
|
fd0370d801 | ||
|
|
58cf1be20c | ||
|
|
072945c40b | ||
|
|
7c7d407f34 | ||
|
|
c63a2dfbcd | ||
|
|
e1022b3a28 | ||
|
|
0022902e8d | ||
|
|
9be7c8b969 | ||
|
|
913b454fc3 | ||
|
|
ff1ea70dfa | ||
|
|
550386f52a | ||
|
|
7a1fecbdb3 | ||
|
|
8a99eaa68f | ||
|
|
e442b3b169 | ||
|
|
553293f4d5 | ||
|
|
48503573c1 | ||
|
|
4519ddd0e9 | ||
|
|
8b3d5e8b80 | ||
|
|
0a5a2e67e8 | ||
|
|
7b91be21b4 | ||
|
|
591962d906 | ||
|
|
64c0157271 | ||
|
|
f5c0e670aa | ||
|
|
83de28bac2 | ||
|
|
c6885a8bcf | ||
|
|
5d8dbff486 | ||
|
|
0939cb5ed6 | ||
|
|
153a2876b4 | ||
|
|
61503a654c | ||
|
|
c268a4e217 | ||
|
|
70ab7735ba | ||
|
|
25de3e753d | ||
|
|
670672c067 | ||
|
|
85f8a80389 | ||
|
|
673b893a8a | ||
|
|
d8d2f3529f | ||
|
|
63ba8145b9 | ||
|
|
99db82a161 | ||
|
|
a3089e0472 | ||
|
|
9186abf9c1 | ||
|
|
97808ba1c3 | ||
|
|
589420c208 | ||
|
|
3bf7d569c6 | ||
|
|
14187b027d | ||
|
|
06f067fda9 | ||
|
|
9b5dfe64b7 | ||
|
|
1c89c91f07 | ||
|
|
ed5761f18f | ||
|
|
16ec25d296 | ||
|
|
abe17ab4b5 | ||
|
|
14f0e6a2ba | ||
|
|
f1f2f034f8 | ||
|
|
63418a583a | ||
|
|
4031cb9eda | ||
|
|
ee6b8c5b72 | ||
|
|
11aecbe79a | ||
|
|
a9bace0f97 | ||
|
|
7e4fed2451 | ||
|
|
ac8a16ec5b | ||
|
|
95d016dea4 | ||
|
|
abe36a3e67 | ||
|
|
89ebbed67b | ||
|
|
ec075e2612 | ||
|
|
e2b7296786 | ||
|
|
6fcd40d4d8 | ||
|
|
847ca66001 | ||
|
|
85b4129219 | ||
|
|
c36f83df5b | ||
|
|
bb026cdd9c | ||
|
|
8843898a8c | ||
|
|
f036aa0a48 | ||
|
|
1e928e463f | ||
|
|
36e895c135 | ||
|
|
3447f233fa | ||
|
|
f3e8dd1f2e | ||
|
|
3f0b7b29db | ||
|
|
95cf90d787 | ||
|
|
f3f6941205 | ||
|
|
4a21c5c5e7 | ||
|
|
180f2b9a83 | ||
|
|
94eb91063c | ||
|
|
dc4c6b3b14 | ||
|
|
50d53c6f8d | ||
|
|
a93b0ac143 | ||
|
|
ed92205d7d | ||
|
|
acaf135a2f | ||
|
|
bf6c6afb21 | ||
|
|
f5994e3a44 | ||
|
|
ccf5bd1492 | ||
|
|
510a0f3a2f | ||
|
|
8a620cab44 | ||
|
|
fcffb0adf2 | ||
|
|
efcb56f0dc | ||
|
|
86ee19178e | ||
|
|
1fc306acd0 | ||
|
|
da1d5ad917 | ||
|
|
b45db22a17 | ||
|
|
19455ab04e | ||
|
|
71b1661f00 | ||
|
|
99764bfd29 | ||
|
|
063e006446 | ||
|
|
436f009dab | ||
|
|
71a66ed4cf | ||
|
|
29bef261be | ||
|
|
e2291f7148 | ||
|
|
8b5aed7a2b | ||
|
|
95057d2368 | ||
|
|
20dadf9b5a | ||
|
|
3d6ac3a7db | ||
|
|
bd47bbbce9 | ||
|
|
ddebfc7413 | ||
|
|
c5310e84db | ||
|
|
0c8301e79c | ||
|
|
73faa8dc80 | ||
|
|
ee526b4b07 | ||
|
|
454f59d59a | ||
|
|
2e3146263c | ||
|
|
bcbcd5fde9 | ||
|
|
83a596612a | ||
|
|
2b896989b8 | ||
|
|
27109d22e4 | ||
|
|
22d8f8f1ef | ||
|
|
fd26e5635d | ||
|
|
b350b0023f | ||
|
|
330eb0fbb1 | ||
|
|
d79d3f1352 | ||
|
|
44966db505 | ||
|
|
1e8abea753 | ||
|
|
21d8ff61bb | ||
|
|
9652c8f8af | ||
|
|
de8f5b9c13 | ||
|
|
c89df923c5 | ||
|
|
556bc8669a | ||
|
|
89ba98e927 | ||
|
|
6c8a15fae2 | ||
|
|
68d8fd69c0 | ||
|
|
b31de299e4 | ||
|
|
9d7037b730 | ||
|
|
5edc211392 | ||
|
|
1329eea5e5 | ||
|
|
28022b056b | ||
|
|
cbadf39d7d | ||
|
|
ec99ac7121 | ||
|
|
dfa5041b6f | ||
|
|
0fa85c5c64 | ||
|
|
5abe1076ed | ||
|
|
7feda56e7b | ||
|
|
a54d3ad512 | ||
|
|
cdf9f5a4ec | ||
|
|
a472bfe6dc | ||
|
|
f88d994c35 | ||
|
|
ad92aded84 | ||
|
|
b4de0a52f8 | ||
|
|
bd23dac92e | ||
|
|
528df12bf1 | ||
|
|
a933319adb | ||
|
|
13c03bfd7d | ||
|
|
18965dcdac | ||
|
|
a4a7d678f9 | ||
|
|
fd422d2e3c | ||
|
|
ce7a1a73ac | ||
|
|
32874a816d | ||
|
|
3164354c0b | ||
|
|
f96aaf1177 | ||
|
|
589c79f3c2 | ||
|
|
a0f6864216 | ||
|
|
a4cc2c5c48 | ||
|
|
0550d12106 | ||
|
|
845628c100 | ||
|
|
7bcdc10539 | ||
|
|
d2f10d50bf | ||
|
|
446b2a334a | ||
|
|
b0fd90ada9 | ||
|
|
6bfd48e412 | ||
|
|
07ffd6e231 | ||
|
|
c675beeda4 | ||
|
|
fe0cf9506c | ||
|
|
7c81509804 | ||
|
|
65923006a8 | ||
|
|
b02f6db475 | ||
|
|
7f394f3f00 | ||
|
|
fc8524bbfd | ||
|
|
c9505531fd | ||
|
|
1b809fe42e | ||
|
|
a1b4df1b85 | ||
|
|
079f37a2d6 | ||
|
|
0980066363 | ||
|
|
a3b6654cbb | ||
|
|
f4328325be | ||
|
|
7badff49d8 | ||
|
|
176c689f8d | ||
|
|
fa20b1dc09 | ||
|
|
2ca9989d20 | ||
|
|
3420818c52 | ||
|
|
0ae6ca608c | ||
|
|
536b40890a | ||
|
|
d5337917db | ||
|
|
15f3ebba93 | ||
|
|
e71f55e58f | ||
|
|
fe747382c1 | ||
|
|
c4946d42e0 | ||
|
|
c1823b4b73 | ||
|
|
862a30842c | ||
|
|
cbb0940ff8 | ||
|
|
bd8df3583d | ||
|
|
5f36807dbe | ||
|
|
442f50303a | ||
|
|
4560f3b1ae | ||
|
|
59d2c670ba | ||
|
|
02577f6a45 | ||
|
|
094fdd8943 | ||
|
|
17169dff1f | ||
|
|
28e3e6e8cb | ||
|
|
4f47053e93 | ||
|
|
9025e9dbb1 | ||
|
|
eee1dad217 | ||
|
|
769df698be | ||
|
|
4ef042e966 | ||
|
|
92062ff722 | ||
|
|
623aa08b9e | ||
|
|
a529343b2b | ||
|
|
2161903163 | ||
|
|
3de9a1a130 | ||
|
|
587c1a3ca2 | ||
|
|
d224566957 | ||
|
|
8ea1a10525 | ||
|
|
b6d6094018 | ||
|
|
e5e1bac242 | ||
|
|
8c2ba7f7ea | ||
|
|
dc6ca61548 | ||
|
|
723caf2a09 | ||
|
|
439cb66672 | ||
|
|
dbd5b4c9f1 | ||
|
|
4dd404ac3b | ||
|
|
ba370438b2 | ||
|
|
f73a60d96c | ||
|
|
afe1f13c5b | ||
|
|
0554cc6128 | ||
|
|
5a6ece9513 | ||
|
|
4042219b3e | ||
|
|
fdc89cbcee | ||
|
|
6df6170c44 | ||
|
|
d598d4bb93 | ||
|
|
790bdcf9fc | ||
|
|
2efcda837c | ||
|
|
e86688284a | ||
|
|
ff9d899f9c | ||
|
|
a68b918cbb | ||
|
|
9fb70969d7 | ||
|
|
0c9119d619 | ||
|
|
556141cdd8 | ||
|
|
60003c976a | ||
|
|
23f1bee7bd | ||
|
|
589efcdc5f | ||
|
|
3befadb29f | ||
|
|
e63d5778a8 | ||
|
|
a53c2a8c6b |
4
.github/ISSUE_TEMPLATE/bug_report.md
vendored
4
.github/ISSUE_TEMPLATE/bug_report.md
vendored
@@ -8,6 +8,10 @@ assignees: ''
|
||||
|
||||
# Bug Report
|
||||
|
||||
**Important: Before submitting a bug report, please check whether a similar issue or feature request has already been posted in the Issues or Discussions section. It's likely we're already tracking it. In case of uncertainty, initiate a discussion post first. This helps us all to efficiently focus on improving the project.**
|
||||
|
||||
**Let's collaborate respectfully. If you bring negativity, please understand our capacity to engage may be limited. If you're open to learning and communicating constructively, we're more than happy to assist you. Remember, Open WebUI is a volunteer-driven project maintained by a single maintainer, supported by our amazing contributors who also manage full-time jobs. We respect your time; please respect ours. If you have an issue, We highly encourage you to submit a pull request or to fork the project. We actively work to prevent contributor burnout to preserve the quality and continuity of Open WebUI.**
|
||||
|
||||
## Installation Method
|
||||
|
||||
[Describe the method you used to install the project, e.g., git clone, Docker, pip, etc.]
|
||||
|
||||
6
.github/ISSUE_TEMPLATE/feature_request.md
vendored
6
.github/ISSUE_TEMPLATE/feature_request.md
vendored
@@ -6,6 +6,12 @@ labels: ''
|
||||
assignees: ''
|
||||
---
|
||||
|
||||
# Feature Request
|
||||
|
||||
**Important: Before submitting a feature request, please check whether a similar issue or feature request has already been posted in the Issues or Discussions section. It's likely we're already tracking it. In case of uncertainty, initiate a discussion post first. This helps us all to efficiently focus on improving the project.**
|
||||
|
||||
**Let's collaborate respectfully. If you bring negativity, please understand our capacity to engage may be limited. If you're open to learning and communicating constructively, we're more than happy to assist you. Remember, Open WebUI is a volunteer-driven project maintained by a single maintainer, supported by our amazing contributors who also manage full-time jobs. We respect your time; please respect ours. If you have an issue, We highly encourage you to submit a pull request or to fork the project. We actively work to prevent contributor burnout to preserve the quality and continuity of Open WebUI.**
|
||||
|
||||
**Is your feature request related to a problem? Please describe.**
|
||||
A clear and concise description of what the problem is. Ex. I'm always frustrated when [...]
|
||||
|
||||
|
||||
4
.github/dependabot.yml
vendored
4
.github/dependabot.yml
vendored
@@ -3,10 +3,10 @@ updates:
|
||||
- package-ecosystem: pip
|
||||
directory: '/backend'
|
||||
schedule:
|
||||
interval: weekly
|
||||
interval: monthly
|
||||
target-branch: 'dev'
|
||||
- package-ecosystem: 'github-actions'
|
||||
directory: '/'
|
||||
schedule:
|
||||
# Check for updates to GitHub Actions every week
|
||||
interval: 'weekly'
|
||||
interval: monthly
|
||||
|
||||
108
.github/workflows/build-release.yml
vendored
108
.github/workflows/build-release.yml
vendored
@@ -10,61 +10,63 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Check for changes in package.json
|
||||
run: |
|
||||
git diff --cached --diff-filter=d package.json || {
|
||||
echo "No changes to package.json"
|
||||
exit 1
|
||||
}
|
||||
|
||||
- name: Get version number from package.json
|
||||
id: get_version
|
||||
run: |
|
||||
VERSION=$(jq -r '.version' package.json)
|
||||
echo "::set-output name=version::$VERSION"
|
||||
- name: Check for changes in package.json
|
||||
run: |
|
||||
git diff --cached --diff-filter=d package.json || {
|
||||
echo "No changes to package.json"
|
||||
exit 1
|
||||
}
|
||||
|
||||
- name: Extract latest CHANGELOG entry
|
||||
id: changelog
|
||||
run: |
|
||||
CHANGELOG_CONTENT=$(awk 'BEGIN {print_section=0;} /^## \[/ {if (print_section == 0) {print_section=1;} else {exit;}} print_section {print;}' CHANGELOG.md)
|
||||
CHANGELOG_ESCAPED=$(echo "$CHANGELOG_CONTENT" | sed ':a;N;$!ba;s/\n/%0A/g')
|
||||
echo "Extracted latest release notes from CHANGELOG.md:"
|
||||
echo -e "$CHANGELOG_CONTENT"
|
||||
echo "::set-output name=content::$CHANGELOG_ESCAPED"
|
||||
- name: Get version number from package.json
|
||||
id: get_version
|
||||
run: |
|
||||
VERSION=$(jq -r '.version' package.json)
|
||||
echo "::set-output name=version::$VERSION"
|
||||
|
||||
- name: Create GitHub release
|
||||
uses: actions/github-script@v7
|
||||
with:
|
||||
github-token: ${{ secrets.GITHUB_TOKEN }}
|
||||
script: |
|
||||
const changelog = `${{ steps.changelog.outputs.content }}`;
|
||||
const release = await github.rest.repos.createRelease({
|
||||
owner: context.repo.owner,
|
||||
repo: context.repo.repo,
|
||||
tag_name: `v${{ steps.get_version.outputs.version }}`,
|
||||
name: `v${{ steps.get_version.outputs.version }}`,
|
||||
body: changelog,
|
||||
})
|
||||
console.log(`Created release ${release.data.html_url}`)
|
||||
- name: Extract latest CHANGELOG entry
|
||||
id: changelog
|
||||
run: |
|
||||
CHANGELOG_CONTENT=$(awk 'BEGIN {print_section=0;} /^## \[/ {if (print_section == 0) {print_section=1;} else {exit;}} print_section {print;}' CHANGELOG.md)
|
||||
CHANGELOG_ESCAPED=$(echo "$CHANGELOG_CONTENT" | sed ':a;N;$!ba;s/\n/%0A/g')
|
||||
echo "Extracted latest release notes from CHANGELOG.md:"
|
||||
echo -e "$CHANGELOG_CONTENT"
|
||||
echo "::set-output name=content::$CHANGELOG_ESCAPED"
|
||||
|
||||
- name: Upload package to GitHub release
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: package
|
||||
path: .
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
- name: Create GitHub release
|
||||
uses: actions/github-script@v7
|
||||
with:
|
||||
github-token: ${{ secrets.GITHUB_TOKEN }}
|
||||
script: |
|
||||
const changelog = `${{ steps.changelog.outputs.content }}`;
|
||||
const release = await github.rest.repos.createRelease({
|
||||
owner: context.repo.owner,
|
||||
repo: context.repo.repo,
|
||||
tag_name: `v${{ steps.get_version.outputs.version }}`,
|
||||
name: `v${{ steps.get_version.outputs.version }}`,
|
||||
body: changelog,
|
||||
})
|
||||
console.log(`Created release ${release.data.html_url}`)
|
||||
|
||||
- name: Trigger Docker build workflow
|
||||
uses: actions/github-script@v7
|
||||
with:
|
||||
script: |
|
||||
github.rest.actions.createWorkflowDispatch({
|
||||
owner: context.repo.owner,
|
||||
repo: context.repo.repo,
|
||||
workflow_id: 'docker-build.yaml',
|
||||
ref: 'v${{ steps.get_version.outputs.version }}',
|
||||
})
|
||||
- name: Upload package to GitHub release
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: package
|
||||
path: |
|
||||
.
|
||||
!.git
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Trigger Docker build workflow
|
||||
uses: actions/github-script@v7
|
||||
with:
|
||||
script: |
|
||||
github.rest.actions.createWorkflowDispatch({
|
||||
owner: context.repo.owner,
|
||||
repo: context.repo.repo,
|
||||
workflow_id: 'docker-build.yaml',
|
||||
ref: 'v${{ steps.get_version.outputs.version }}',
|
||||
})
|
||||
|
||||
7
.github/workflows/docker-build.yaml
vendored
7
.github/workflows/docker-build.yaml
vendored
@@ -312,7 +312,7 @@ jobs:
|
||||
|
||||
merge-main-images:
|
||||
runs-on: ubuntu-latest
|
||||
needs: [ build-main-image ]
|
||||
needs: [build-main-image]
|
||||
steps:
|
||||
# GitHub Packages requires the entire repository name to be in lowercase
|
||||
# although the repository owner has a lowercase username, this prevents some people from running actions after forking
|
||||
@@ -364,10 +364,9 @@ jobs:
|
||||
run: |
|
||||
docker buildx imagetools inspect ${{ env.FULL_IMAGE_NAME }}:${{ steps.meta.outputs.version }}
|
||||
|
||||
|
||||
merge-cuda-images:
|
||||
runs-on: ubuntu-latest
|
||||
needs: [ build-cuda-image ]
|
||||
needs: [build-cuda-image]
|
||||
steps:
|
||||
# GitHub Packages requires the entire repository name to be in lowercase
|
||||
# although the repository owner has a lowercase username, this prevents some people from running actions after forking
|
||||
@@ -423,7 +422,7 @@ jobs:
|
||||
|
||||
merge-ollama-images:
|
||||
runs-on: ubuntu-latest
|
||||
needs: [ build-ollama-image ]
|
||||
needs: [build-ollama-image]
|
||||
steps:
|
||||
# GitHub Packages requires the entire repository name to be in lowercase
|
||||
# although the repository owner has a lowercase username, this prevents some people from running actions after forking
|
||||
|
||||
2
.github/workflows/format-backend.yaml
vendored
2
.github/workflows/format-backend.yaml
vendored
@@ -23,7 +23,7 @@ jobs:
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- name: Set up Python
|
||||
uses: actions/setup-python@v4
|
||||
uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
|
||||
|
||||
4
.github/workflows/format-build-frontend.yaml
vendored
4
.github/workflows/format-build-frontend.yaml
vendored
@@ -21,7 +21,7 @@ jobs:
|
||||
- name: Setup Node.js
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: '20' # Or specify any other version you want to use
|
||||
node-version: '22' # Or specify any other version you want to use
|
||||
|
||||
- name: Install Dependencies
|
||||
run: npm install
|
||||
@@ -48,7 +48,7 @@ jobs:
|
||||
- name: Setup Node.js
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: '20'
|
||||
node-version: '22'
|
||||
|
||||
- name: Install Dependencies
|
||||
run: npm ci
|
||||
|
||||
11
.github/workflows/integration-test.yml
vendored
11
.github/workflows/integration-test.yml
vendored
@@ -85,7 +85,7 @@ jobs:
|
||||
# - uses: actions/checkout@v4
|
||||
|
||||
# - name: Set up Python
|
||||
# uses: actions/setup-python@v4
|
||||
# uses: actions/setup-python@v5
|
||||
# with:
|
||||
# python-version: ${{ matrix.python-version }}
|
||||
|
||||
@@ -157,7 +157,7 @@ jobs:
|
||||
GLOBAL_LOG_LEVEL: debug
|
||||
run: |
|
||||
cd backend
|
||||
uvicorn main:app --port "8080" --forwarded-allow-ips '*' &
|
||||
uvicorn open_webui.main:app --port "8080" --forwarded-allow-ips '*' &
|
||||
UVICORN_PID=$!
|
||||
# Wait up to 40 seconds for the server to start
|
||||
for i in {1..40}; do
|
||||
@@ -182,9 +182,12 @@ jobs:
|
||||
WEBUI_SECRET_KEY: secret-key
|
||||
GLOBAL_LOG_LEVEL: debug
|
||||
DATABASE_URL: postgresql://postgres:postgres@localhost:5432/postgres
|
||||
DATABASE_POOL_SIZE: 10
|
||||
DATABASE_POOL_MAX_OVERFLOW: 10
|
||||
DATABASE_POOL_TIMEOUT: 30
|
||||
run: |
|
||||
cd backend
|
||||
uvicorn main:app --port "8081" --forwarded-allow-ips '*' &
|
||||
uvicorn open_webui.main:app --port "8081" --forwarded-allow-ips '*' &
|
||||
UVICORN_PID=$!
|
||||
# Wait up to 20 seconds for the server to start
|
||||
for i in {1..20}; do
|
||||
@@ -230,7 +233,7 @@ jobs:
|
||||
# DATABASE_URL: mysql://root:mysql@localhost:3306/mysql
|
||||
# run: |
|
||||
# cd backend
|
||||
# uvicorn main:app --port "8083" --forwarded-allow-ips '*' &
|
||||
# uvicorn open_webui.main:app --port "8083" --forwarded-allow-ips '*' &
|
||||
# UVICORN_PID=$!
|
||||
# # Wait up to 20 seconds for the server to start
|
||||
# for i in {1..20}; do
|
||||
|
||||
2
.github/workflows/lint-backend.disabled
vendored
2
.github/workflows/lint-backend.disabled
vendored
@@ -16,7 +16,7 @@ jobs:
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- name: Use Python
|
||||
uses: actions/setup-python@v4
|
||||
uses: actions/setup-python@v5
|
||||
- name: Use Bun
|
||||
uses: oven-sh/setup-bun@v1
|
||||
- name: Install dependencies
|
||||
|
||||
1
.github/workflows/release-pypi.yml
vendored
1
.github/workflows/release-pypi.yml
vendored
@@ -4,6 +4,7 @@ on:
|
||||
push:
|
||||
branches:
|
||||
- main # or whatever branch you want to use
|
||||
- pypi-release
|
||||
|
||||
jobs:
|
||||
release:
|
||||
|
||||
368
CHANGELOG.md
368
CHANGELOG.md
@@ -5,6 +5,374 @@ All notable changes to this project will be documented in this file.
|
||||
The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.1.0/),
|
||||
and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html).
|
||||
|
||||
## [0.3.33] - 2024-10-24
|
||||
|
||||
### Added
|
||||
|
||||
- **🏆 Evaluation Leaderboard**: Easily track your performance through a new leaderboard system where your ratings contribute to a real-time ranking based on the Elo system. Sibling responses (regenerations, many model chats) are required for your ratings to count in the leaderboard. Additionally, you can opt-in to share your feedback history and be part of the community-wide leaderboard. Expect further improvements as we refine the algorithm—help us build the best community leaderboard!
|
||||
- **⚔️ Arena Model Evaluation**: Enable blind A/B testing of models directly from Admin Settings > Evaluation for a true side-by-side comparison. Ideal for pinpointing the best model for your needs.
|
||||
- **🎯 Topic-Based Leaderboard**: Discover more accurate rankings with experimental topic-based reranking, which adjusts leaderboard standings based on tag similarity in feedback. Get more relevant insights based on specific topics!
|
||||
- **📁 Folders Support for Chats**: Organize your chats better by grouping them into folders. Drag and drop chats between folders and export them seamlessly for easy sharing or analysis.
|
||||
- **📤 Easy Chat Import via Drag & Drop**: Save time by simply dragging and dropping chat exports (JSON) directly onto the sidebar to import them into your workspace—streamlined, efficient, and intuitive!
|
||||
- **📚 Enhanced Knowledge Collection**: Now, you can reference individual files from a knowledge collection—ideal for more precise Retrieval-Augmented Generations (RAG) queries and document analysis.
|
||||
- **🏷️ Enhanced Tagging System**: Tags now take up less space! Utilize the new 'tag:' query system to manage, search, and organize your conversations more effectively without cluttering the interface.
|
||||
- **🧠 Auto-Tagging for Chats**: Your conversations are now automatically tagged for improved organization, mirroring the efficiency of auto-generated titles.
|
||||
- **🔍 Backend Chat Query System**: Chat filtering has become more efficient, now handled through the backend\*\* instead of your browser, improving search performance and accuracy.
|
||||
- **🎮 Revamped Playground**: Experience a refreshed and optimized Playground for smoother testing, tweaks, and experimentation of your models and tools.
|
||||
- **🧩 Token-Based Text Splitter**: Introducing token-based text splitting (tiktoken), giving you more precise control over how text is processed. Previously, only character-based splitting was available.
|
||||
- **🔢 Ollama Batch Embeddings**: Leverage new batch embedding support for improved efficiency and performance with Ollama embedding models.
|
||||
- **🔍 Enhanced Add Text Content Modal**: Enjoy a cleaner, more intuitive workflow for adding and curating knowledge content with an upgraded input modal from our Knowledge workspace.
|
||||
- **🖋️ Rich Text Input for Chats**: Make your chat inputs more dynamic with support for rich text formatting. Your conversations just got a lot more polished and professional.
|
||||
- **⚡ Faster Whisper Model Configurability**: Customize your local faster whisper model directly from the WebUI.
|
||||
- **☁️ Experimental S3 Support**: Enable stateless WebUI instances with S3 support, greatly enhancing scalability and balancing heavy workloads.
|
||||
- **🔕 Disable Update Toast**: Now you can streamline your workspace even further—choose to disable update notifications for a more focused experience.
|
||||
- **🌟 RAG Citation Relevance Percentage**: Easily assess citation accuracy with the addition of relevance percentages in RAG results.
|
||||
- **⚙️ Mermaid Copy Button**: Mermaid diagrams now come with a handy copy button, simplifying the extraction and use of diagram contents directly in your workflow.
|
||||
- **🎨 UI Redesign**: Major interface redesign that will make navigation smoother, keep your focus where it matters, and ensure a modern look.
|
||||
|
||||
### Fixed
|
||||
|
||||
- **🎙️ Voice Note Mic Stopping Issue**: Fixed the issue where the microphone stayed active after ending a voice note recording, ensuring your audio workflow runs smoothly.
|
||||
|
||||
### Removed
|
||||
|
||||
- **👋 Goodbye Sidebar Tags**: Sidebar tag clutter is gone. We’ve shifted tag buttons to more effective query-based tag filtering for a sleeker, more agile interface.
|
||||
|
||||
## [0.3.32] - 2024-10-06
|
||||
|
||||
### Added
|
||||
|
||||
- **🔢 Workspace Enhancements**: Added a display count for models, prompts, tools, and functions in the workspace, providing a clear overview and easier management.
|
||||
|
||||
### Fixed
|
||||
|
||||
- **🖥️ Web and YouTube Attachment Fix**: Resolved an issue where attaching web links and YouTube videos was malfunctioning, ensuring seamless integration and display within chats.
|
||||
- **📞 Call Mode Activation on Landing Page**: Fixed a bug where call mode was not operational from the landing page.
|
||||
|
||||
### Changed
|
||||
|
||||
- **🔄 URL Parameter Refinement**: Updated the 'tool_ids' URL parameter to 'tools' or 'tool-ids' for more intuitive and consistent user experience.
|
||||
- **🎨 Floating Buttons Styling Update**: Refactored the styling of floating buttons to intelligently adjust to the left side when there isn't enough room on the right, improving interface usability and aesthetic.
|
||||
- **🔧 Enhanced Accessibility for Floating Buttons**: Implemented the ability to close floating buttons with the 'Esc' key, making workflow smoother and more efficient for users navigating via keyboard.
|
||||
- **🖇️ Updated Information URL**: Information URLs now direct users to a general release page rather than a version-specific URL, ensuring access to the latest and relevant details all in one place.
|
||||
- **📦 Library Dependencies Update**: Upgraded dependencies to ensure compatibility and performance optimization for pip installs.
|
||||
|
||||
## [0.3.31] - 2024-10-06
|
||||
|
||||
### Added
|
||||
|
||||
- **📚 Knowledge Feature**: Reimagined documents feature, now more performant with a better UI for enhanced organization; includes streamlined API integration for Retrieval-Augmented Generation (RAG). Detailed documentation forthcoming: https://docs.openwebui.com/
|
||||
- **🌐 New Landing Page**: Freshly designed landing page; toggle between the new UI and the classic chat UI from Settings > Interface for a personalized experience.
|
||||
- **📁 Full Document Retrieval Mode**: Toggle between full document retrieval or traditional snippets by clicking on the file item. This mode enhances document capabilities and supports comprehensive tasks like summarization by utilizing the entire content instead of RAG.
|
||||
- **📄 Extracted File Content Display**: View extracted content directly by clicking on the file item, simplifying file analysis.
|
||||
- **🎨 Artifacts Feature**: Render web content and SVGs directly in the interface, supporting quick iterations and live changes.
|
||||
- **🖊️ Editable Code Blocks**: Supercharged code blocks now allow live editing directly in the LLM response, with live reloads supported by artifacts.
|
||||
- **🔧 Code Block Enhancements**: Introduced a floating copy button in code blocks to facilitate easier code copying without scrolling.
|
||||
- **🔍 SVG Pan/Zoom**: Enhanced interaction with SVG images, including Mermaid diagrams, via new pan and zoom capabilities.
|
||||
- **🔍 Text Select Quick Actions**: New floating buttons appear when text is highlighted in LLM responses, offering deeper interactions like "Ask a Question" or "Explain".
|
||||
- **🗃️ Database Pool Configuration**: Enhanced database handling to support scalable user growth.
|
||||
- **🔊 Experimental Audio Compression**: Compress audio files to navigate around the 25MB limit for OpenAI's speech-to-text processing.
|
||||
- **🔍 Query Embedding**: Adjusted embedding behavior to enhance system performance by not repeating query embedding.
|
||||
- **💾 Lazy Load Optimizations**: Implemented lazy loading of large dependencies to minimize initial memory usage, boosting performance.
|
||||
- **🍏 Apple Touch Icon Support**: Optimizes the display of icons for web bookmarks on Apple mobile devices.
|
||||
- **🔽 Expandable Content Markdown Support**: Introducing 'details', 'summary' tag support for creating expandable content sections in markdown, facilitating cleaner, organized documentation and interactive content display.
|
||||
|
||||
### Fixed
|
||||
|
||||
- **🔘 Action Button Issue**: Resolved a bug where action buttons were not functioning, enhancing UI reliability.
|
||||
- **🔄 Multi-Model Chat Loop**: Fixed an infinite loop issue in multi-model chat environments, ensuring smoother chat operations.
|
||||
- **📄 Chat PDF/TXT Export Issue**: Resolved problems with exporting chat logs to PDF and TXT formats.
|
||||
- **🔊 Call to Text-to-Speech Issues**: Rectified problems with text-to-speech functions to improve audio interactions.
|
||||
|
||||
### Changed
|
||||
|
||||
- **⚙️ Endpoint Renaming**: Renamed 'rag' endpoints to 'retrieval' for clearer function description.
|
||||
- **🎨 Styling and Interface Updates**: Multiple refinements across the platform to enhance visual appeal and user interaction.
|
||||
|
||||
### Removed
|
||||
|
||||
- **🗑️ Deprecated 'DOCS_DIR'**: Removed the outdated 'docs_dir' variable in favor of more direct file management solutions, with direct file directory syncing and API uploads for a more integrated experience.
|
||||
|
||||
## [0.3.30] - 2024-09-26
|
||||
|
||||
### Fixed
|
||||
|
||||
- **🍞 Update Available Toast Dismissal**: Enhanced user experience by ensuring that once the update available notification is dismissed, it won't reappear for 24 hours.
|
||||
- **📋 Ollama /embed Form Data**: Adjusted the integration inaccuracies in the /embed form data to ensure it perfectly matches with Ollama's specifications.
|
||||
- **🔧 O1 Max Completion Tokens Issue**: Resolved compatibility issues with OpenAI's o1 models max_completion_tokens param to ensure smooth operation.
|
||||
- **🔄 Pip Install Database Issue**: Fixed a critical issue where database changes during pip installations were reverting and not saving chat logs, now ensuring data persistence and reliability in chat operations.
|
||||
- **🏷️ Chat Rename Tab Update**: Fixed the functionality to change the web browser's tab title simultaneously when a chat is renamed, keeping tab titles consistent.
|
||||
|
||||
## [0.3.29] - 2023-09-25
|
||||
|
||||
### Fixed
|
||||
|
||||
- **🔧 KaTeX Rendering Improvement**: Resolved specific corner cases in KaTeX rendering to enhance the display of complex mathematical notation.
|
||||
- **📞 'Call' URL Parameter Fix**: Corrected functionality for 'call' URL search parameter ensuring reliable activation of voice calls through URL triggers.
|
||||
- **🔄 Configuration Reset Fix**: Fixed the RESET_CONFIG_ON_START to ensure settings revert to default correctly upon each startup, improving reliability in configuration management.
|
||||
- **🌍 Filter Outlet Hook Fix**: Addressed issues in the filter outlet hook, ensuring all filter functions operate as intended.
|
||||
|
||||
## [0.3.28] - 2024-09-24
|
||||
|
||||
### Fixed
|
||||
|
||||
- **🔍 Web Search Functionality**: Corrected an issue where the web search option was not functioning properly.
|
||||
|
||||
## [0.3.27] - 2024-09-24
|
||||
|
||||
### Fixed
|
||||
|
||||
- **🔄 Periodic Cleanup Error Resolved**: Fixed a critical RuntimeError related to the 'periodic_usage_pool_cleanup' coroutine, ensuring smooth and efficient performance post-pip install, correcting a persisting issue from version 0.3.26.
|
||||
- **📊 Enhanced LaTeX Rendering**: Improved rendering for LaTeX content, enhancing clarity and visual presentation in documents and mathematical models.
|
||||
|
||||
## [0.3.26] - 2024-09-24
|
||||
|
||||
### Fixed
|
||||
|
||||
- **🔄 Event Loop Error Resolution**: Addressed a critical error where a missing running event loop caused 'periodic_usage_pool_cleanup' to fail with pip installs. This fix ensures smoother and more reliable updates and installations, enhancing overall system stability.
|
||||
|
||||
## [0.3.25] - 2024-09-24
|
||||
|
||||
### Fixed
|
||||
|
||||
- **🖼️ Image Generation Functionality**: Resolved an issue where image generation was not functioning, restoring full capability for visual content creation.
|
||||
- **⚖️ Rate Response Corrections**: Addressed a problem where rate responses were not working, ensuring reliable feedback mechanisms are operational.
|
||||
|
||||
## [0.3.24] - 2024-09-24
|
||||
|
||||
### Added
|
||||
|
||||
- **🚀 Rendering Optimization**: Significantly improved message rendering performance, enhancing user experience and webui responsiveness.
|
||||
- **💖 Favorite Response Feature in Chat Overview**: Users can now mark responses as favorite directly from the chat overview, enhancing ease of retrieval and organization of preferred responses.
|
||||
- **💬 Create Message Pairs with Shortcut**: Implemented creation of new message pairs using Cmd/Ctrl+Shift+Enter, making conversation editing faster and more intuitive.
|
||||
- **🌍 Expanded User Prompt Variables**: Added weekday, timezone, and language information variables to user prompts to match system prompt variables.
|
||||
- **🎵 Enhanced Audio Support**: Now includes support for 'audio/x-m4a' files, broadening compatibility with audio content within the platform.
|
||||
- **🔏 Model URL Search Parameter**: Added an ability to select a model directly via URL parameters, streamlining navigation and model access.
|
||||
- **📄 Enhanced PDF Citations**: PDF citations now open at the associated page, streamlining reference checks and document handling.
|
||||
- **🔧Use of Redis in Sockets**: Enhanced socket implementation to fully support Redis, enabling effective stateless instances suitable for scalable load balancing.
|
||||
- **🌍 Stream Individual Model Responses**: Allows specific models to have individualized streaming settings, enhancing performance and customization.
|
||||
- **🕒 Display Model Hash and Last Modified Timestamp for Ollama Models**: Provides critical model details directly in the Models workspace for enhanced tracking.
|
||||
- **❗ Update Info Notification for Admins**: Ensures administrators receive immediate updates upon login, keeping them informed of the latest changes and system statuses.
|
||||
|
||||
### Fixed
|
||||
|
||||
- **🗑️ Temporary File Handling On Windows**: Fixed an issue causing errors when accessing a temporary file being used by another process, Tools & Functions should now work as intended.
|
||||
- **🔓 Authentication Toggle Issue**: Resolved the malfunction where setting 'WEBUI_AUTH=False' did not appropriately disable authentication, ensuring that user experience and system security settings function as configured.
|
||||
- **🔧 Save As Copy Issue for Many Model Chats**: Resolved an error preventing users from save messages as copies in many model chats.
|
||||
- **🔒 Sidebar Closure on Mobile**: Resolved an issue where the mobile sidebar remained open after menu engagement, improving user interface responsivity and comfort.
|
||||
- **🛡️ Tooltip XSS Vulnerability**: Resolved a cross-site scripting (XSS) issue within tooltips, ensuring enhanced security and data integrity during user interactions.
|
||||
|
||||
### Changed
|
||||
|
||||
- **↩️ Deprecated Interface Stream Response Settings**: Moved to advanced parameters to streamline interface settings and enhance user clarity.
|
||||
- **⚙️ Renamed 'speedRate' to 'playbackRate'**: Standardizes terminology, improving usability and understanding in media settings.
|
||||
|
||||
## [0.3.23] - 2024-09-21
|
||||
|
||||
### Added
|
||||
|
||||
- **🚀 WebSocket Redis Support**: Enhanced load balancing capabilities for multiple instance setups, promoting better performance and reliability in WebUI.
|
||||
- **🔧 Adjustable Chat Controls**: Introduced width-adjustable chat controls, enabling a personalized and more comfortable user interface.
|
||||
- **🌎 i18n Updates**: Improved and updated the Chinese translations.
|
||||
|
||||
### Fixed
|
||||
|
||||
- **🌐 Task Model Unloading Issue**: Modified task handling to use the Ollama /api/chat endpoint instead of OpenAI compatible endpoint, ensuring models stay loaded and ready with custom parameters, thus minimizing delays in task execution.
|
||||
- **📝 Title Generation Fix for OpenAI Compatible APIs**: Resolved an issue preventing the generation of titles, enhancing consistency and reliability when using multiple API providers.
|
||||
- **🗃️ RAG Duplicate Collection Issue**: Fixed a bug causing repeated processing of the same uploaded file. Now utilizes indexed files to prevent unnecessary duplications, optimizing resource usage.
|
||||
- **🖼️ Image Generation Enhancement**: Refactored OpenAI image generation endpoint to be asynchronous, preventing the WebUI from becoming unresponsive during processing, thus enhancing user experience.
|
||||
- **🔓 Downgrade Authlib**: Reverted Authlib to version 1.3.1 to address and resolve issues concerning OAuth functionality.
|
||||
|
||||
### Changed
|
||||
|
||||
- **🔍 Improved Message Interaction**: Enhanced the message node interface to allow for easier focus redirection with a simple click, streamlining user interaction.
|
||||
- **✨ Styling Refactor**: Updated WebUI styling for a cleaner, more modern look, enhancing user experience across the platform.
|
||||
|
||||
## [0.3.22] - 2024-09-19
|
||||
|
||||
### Added
|
||||
|
||||
- **⭐ Chat Overview**: Introducing a node-based interactive messages diagram for improved visualization of conversation flows.
|
||||
- **🔗 Multiple Vector DB Support**: Now supports multiple vector databases, including the newly added Milvus support. Community contributions for additional database support are highly encouraged!
|
||||
- **📡 Experimental Non-Stream Chat Completion**: Experimental feature allowing the use of OpenAI o1 models, which do not support streaming, ensuring more versatile model deployment.
|
||||
- **🔍 Experimental Colbert-AI Reranker Integration**: Added support for "jinaai/jina-colbert-v2" as a reranker, enhancing search relevance and accuracy. Note: it may not function at all on low-spec computers.
|
||||
- **🕸️ ENABLE_WEBSOCKET_SUPPORT**: Added environment variable for instances to ignore websocket upgrades, stabilizing connections on platforms with websocket issues.
|
||||
- **🔊 Azure Speech Service Integration**: Added support for Azure Speech services for Text-to-Speech (TTS).
|
||||
- **🎚️ Customizable Playback Speed**: Playback speed control is now available in Call mode settings, allowing users to adjust audio playback speed to their preferences.
|
||||
- **🧠 Enhanced Error Messaging**: System now displays helpful error messages directly to users during chat completion issues.
|
||||
- **📂 Save Model as Transparent PNG**: Model profile images are now saved as PNGs, supporting transparency and improving visual integration.
|
||||
- **📱 iPhone Compatibility Adjustments**: Added padding to accommodate the iPhone navigation bar, improving UI display on these devices.
|
||||
- **🔗 Secure Response Headers**: Implemented security response headers, bolstering web application security.
|
||||
- **🔧 Enhanced AUTOMATIC1111 Settings**: Users can now configure 'CFG Scale', 'Sampler', and 'Scheduler' parameters directly in the admin settings, enhancing workflow flexibility without source code modifications.
|
||||
- **🌍 i18n Updates**: Enhanced translations for Chinese, Ukrainian, Russian, and French, fostering a better localized experience.
|
||||
|
||||
### Fixed
|
||||
|
||||
- **🛠️ Chat Message Deletion**: Resolved issues with chat message deletion, ensuring a smoother user interaction and system stability.
|
||||
- **🔢 Ordered List Numbering**: Fixed the incorrect ordering in lists.
|
||||
|
||||
### Changed
|
||||
|
||||
- **🎨 Transparent Icon Handling**: Allowed model icons to be displayed on transparent backgrounds, improving UI aesthetics.
|
||||
- **📝 Improved RAG Template**: Enhanced Retrieval-Augmented Generation template, optimizing context handling and error checking for more precise operation.
|
||||
|
||||
## [0.3.21] - 2024-09-08
|
||||
|
||||
### Added
|
||||
|
||||
- **📊 Document Count Display**: Now displays the total number of documents directly within the dashboard.
|
||||
- **🚀 Ollama Embed API Endpoint**: Enabled /api/embed endpoint proxy support.
|
||||
|
||||
### Fixed
|
||||
|
||||
- **🐳 Docker Launch Issue**: Resolved the problem preventing Open-WebUI from launching correctly when using Docker.
|
||||
|
||||
### Changed
|
||||
|
||||
- **🔍 Enhanced Search Prompts**: Improved the search query generation prompts for better accuracy and user interaction, enhancing the overall search experience.
|
||||
|
||||
## [0.3.20] - 2024-09-07
|
||||
|
||||
### Added
|
||||
|
||||
- **🌐 Translation Update**: Updated Catalan translations to improve user experience for Catalan speakers.
|
||||
|
||||
### Fixed
|
||||
|
||||
- **📄 PDF Download**: Resolved a configuration issue with fonts directory, ensuring PDFs are now downloaded with the correct formatting.
|
||||
- **🛠️ Installation of Tools & Functions Requirements**: Fixed a bug where necessary requirements for tools and functions were not properly installing.
|
||||
- **🔗 Inline Image Link Rendering**: Enabled rendering of images directly from links in chat.
|
||||
- **📞 Post-Call User Interface Cleanup**: Adjusted UI behavior to automatically close chat controls after a voice call ends, reducing screen clutter.
|
||||
- **🎙️ Microphone Deactivation Post-Call**: Addressed an issue where the microphone remained active after calls.
|
||||
- **✍️ Markdown Spacing Correction**: Corrected spacing in Markdown rendering, ensuring text appears neatly and as expected.
|
||||
- **🔄 Message Re-rendering**: Fixed an issue causing all response messages to re-render with each new message, now improving chat performance.
|
||||
|
||||
### Changed
|
||||
|
||||
- **🌐 Refined Web Search Integration**: Deprecated the Search Query Generation Prompt threshold; introduced a toggle button for "Enable Web Search Query Generation" allowing users to opt-in to using web search more judiciously.
|
||||
- **📝 Default Prompt Templates Update**: Emptied environment variable templates for search and title generation now default to the Open WebUI default prompt templates, simplifying configuration efforts.
|
||||
|
||||
## [0.3.19] - 2024-09-05
|
||||
|
||||
### Added
|
||||
|
||||
- **🌐 Translation Update**: Improved Chinese translations.
|
||||
|
||||
### Fixed
|
||||
|
||||
- **📂 DATA_DIR Overriding**: Fixed an issue to avoid overriding DATA_DIR, preventing errors when directories are set identically, ensuring smoother operation and data management.
|
||||
- **🛠️ Frontmatter Extraction**: Fixed the extraction process for frontmatter in tools and functions.
|
||||
|
||||
### Changed
|
||||
|
||||
- **🎨 UI Styling**: Refined the user interface styling for enhanced visual coherence and user experience.
|
||||
|
||||
## [0.3.18] - 2024-09-04
|
||||
|
||||
### Added
|
||||
|
||||
- **🛠️ Direct Database Execution for Tools & Functions**: Enhanced the execution of Python files for tools and functions, now directly loading from the database for a more streamlined backend process.
|
||||
|
||||
### Fixed
|
||||
|
||||
- **🔄 Automatic Rewrite of Import Statements in Tools & Functions**: Tool and function scripts that import 'utils', 'apps', 'main', 'config' will now automatically rename these with 'open_webui.', ensuring compatibility and consistency across different modules.
|
||||
- **🎨 Styling Adjustments**: Minor fixes in the visual styling to improve user experience and interface consistency.
|
||||
|
||||
## [0.3.17] - 2024-09-04
|
||||
|
||||
### Added
|
||||
|
||||
- **🔄 Import/Export Configuration**: Users can now import and export webui configurations from admin settings > Database, simplifying setup replication across systems.
|
||||
- **🌍 Web Search via URL Parameter**: Added support for activating web search directly through URL by setting 'web-search=true'.
|
||||
- **🌐 SearchApi Integration**: Added support for SearchApi as an alternative web search provider, enhancing search capabilities within the platform.
|
||||
- **🔍 Literal Type Support in Tools**: Tools now support the Literal type.
|
||||
- **🌍 Updated Translations**: Improved translations for Chinese, Ukrainian, and Catalan.
|
||||
|
||||
### Fixed
|
||||
|
||||
- **🔧 Pip Install Issue**: Resolved the issue where pip install failed due to missing 'alembic.ini', ensuring smoother installation processes.
|
||||
- **🌃 Automatic Theme Update**: Fixed an issue where the color theme did not update dynamically with system changes.
|
||||
- **🛠️ User Agent in ComfyUI**: Added default headers in ComfyUI to fix access issues, improving reliability in network communications.
|
||||
- **🔄 Missing Chat Completion Response Headers**: Ensured proper return of proxied response headers during chat completion, improving API reliability.
|
||||
- **🔗 Websocket Connection Prioritization**: Modified socket.io configuration to prefer websockets and more reliably fallback to polling, enhancing connection stability.
|
||||
- **🎭 Accessibility Enhancements**: Added missing ARIA labels for buttons, improving accessibility for visually impaired users.
|
||||
- **⚖️ Advanced Parameter**: Fixed an issue ensuring that advanced parameters are correctly applied in all scenarios, ensuring consistent behavior of user-defined settings.
|
||||
|
||||
### Changed
|
||||
|
||||
- **🔁 Namespace Reorganization**: Reorganized all Python files under the 'open_webui' namespace to streamline the project structure and improve maintainability. Tools and functions importing from 'utils' should now use 'open_webui.utils'.
|
||||
- **🚧 Dependency Updates**: Updated several backend dependencies like 'aiohttp', 'authlib', 'duckduckgo-search', 'flask-cors', and 'langchain' to their latest versions, enhancing performance and security.
|
||||
|
||||
## [0.3.16] - 2024-08-27
|
||||
|
||||
### Added
|
||||
|
||||
- **🚀 Config DB Migration**: Migrated configuration handling from config.json to the database, enabling high-availability setups and load balancing across multiple Open WebUI instances.
|
||||
- **🔗 Call Mode Activation via URL**: Added a 'call=true' URL search parameter enabling direct shortcuts to activate call mode, enhancing user interaction on mobile devices.
|
||||
- **✨ TTS Content Control**: Added functionality to control how message content is segmented for Text-to-Speech (TTS) generation requests, allowing for more flexible speech output options.
|
||||
- **😄 Show Knowledge Search Status**: Enhanced model usage transparency by displaying status when working with knowledge-augmented models, helping users understand the system's state during queries.
|
||||
- **👆 Click-to-Copy for Codespan**: Enhanced interactive experience in the WebUI by allowing users to click to copy content from code spans directly.
|
||||
- **🚫 API User Blocking via Model Filter**: Introduced the ability to block API users based on customized model filters, enhancing security and control over API access.
|
||||
- **🎬 Call Overlay Styling**: Adjusted call overlay styling on large screens to not cover the entire interface, but only the chat control area, for a more unobtrusive interaction experience.
|
||||
|
||||
### Fixed
|
||||
|
||||
- **🔧 LaTeX Rendering Issue**: Addressed an issue that affected the correct rendering of LaTeX.
|
||||
- **📁 File Leak Prevention**: Resolved the issue of uploaded files mistakenly being accessible across user chats.
|
||||
- **🔧 Pipe Functions with '**files**' Param**: Fixed issues with '**files**' parameter not functioning correctly in pipe functions.
|
||||
- **📝 Markdown Processing for RAG**: Fixed issues with processing Markdown in files.
|
||||
- **🚫 Duplicate System Prompts**: Fixed bugs causing system prompts to duplicate.
|
||||
|
||||
### Changed
|
||||
|
||||
- **🔋 Wakelock Permission**: Optimized the activation of wakelock to only engage during call mode, conserving device resources and improving battery performance during idle periods.
|
||||
- **🔍 Content-Type for Ollama Chats**: Added 'application/x-ndjson' content-type to '/api/chat' endpoint responses to match raw Ollama responses.
|
||||
- **✋ Disable Signups Conditionally**: Implemented conditional logic to disable sign-ups when 'ENABLE_LOGIN_FORM' is set to false.
|
||||
|
||||
## [0.3.15] - 2024-08-21
|
||||
|
||||
### Added
|
||||
|
||||
- **🔗 Temporary Chat Activation**: Integrated a new URL parameter 'temporary-chat=true' to enable temporary chat sessions directly through the URL.
|
||||
- **🌄 ComfyUI Seed Node Support**: Introduced seed node support in ComfyUI for image generation, allowing users to specify node IDs for randomized seed assignment.
|
||||
|
||||
### Fixed
|
||||
|
||||
- **🛠️ Tools and Functions**: Resolved a critical issue where Tools and Functions were not properly functioning, restoring full capability and reliability to these essential features.
|
||||
- **🔘 Chat Action Button in Many Model Chat**: Fixed the malfunctioning of chat action buttons in many model chat environments, ensuring a smoother and more responsive user interaction.
|
||||
- **⏪ Many Model Chat Compatibility**: Restored backward compatibility for many model chats.
|
||||
|
||||
## [0.3.14] - 2024-08-21
|
||||
|
||||
### Added
|
||||
|
||||
- **🛠️ Custom ComfyUI Workflow**: Deprecating several older environment variables, this enhancement introduces a new, customizable workflow for a more tailored user experience.
|
||||
- **🔀 Merge Responses in Many Model Chat**: Enhances the dialogue by merging responses from multiple models into a single, coherent reply, improving the interaction quality in many model chats.
|
||||
- **✅ Multiple Instances of Same Model in Chats**: Enhanced many model chat to support adding multiple instances of the same model.
|
||||
- **🔧 Quick Actions in Model Workspace**: Enhanced Shift key quick actions for hiding/unhiding and deleting models, facilitating a smoother workflow.
|
||||
- **🗨️ Markdown Rendering in User Messages**: User messages are now rendered in Markdown, enhancing readability and interaction.
|
||||
- **💬 Temporary Chat Feature**: Introduced a temporary chat feature, deprecating the old chat history setting to enhance user interaction flexibility.
|
||||
- **🖋️ User Message Editing**: Enhanced the user chat editing feature to allow saving changes without sending, providing more flexibility in message management.
|
||||
- **🛡️ Security Enhancements**: Various security improvements implemented across the platform to ensure safer user experiences.
|
||||
- **🌍 Updated Translations**: Enhanced translations for Chinese, Ukrainian, and Bahasa Malaysia, improving localization and user comprehension.
|
||||
|
||||
### Fixed
|
||||
|
||||
- **📑 Mermaid Rendering Issue**: Addressed issues with Mermaid chart rendering to ensure clean and clear visual data representation.
|
||||
- **🎭 PWA Icon Maskability**: Fixed the Progressive Web App icon to be maskable, ensuring proper display on various device home screens.
|
||||
- **🔀 Cloned Model Chat Freezing Issue**: Fixed a bug where cloning many model chats would cause freezing, enhancing stability and responsiveness.
|
||||
- **🔍 Generic Error Handling and Refinements**: Various minor fixes and refinements to address previously untracked issues, ensuring smoother operations.
|
||||
|
||||
### Changed
|
||||
|
||||
- **🖼️ Image Generation Refactor**: Overhauled image generation processes for improved efficiency and quality.
|
||||
- **🔨 Refactor Tool and Function Calling**: Refactored tool and function calling mechanisms for improved clarity and maintainability.
|
||||
- **🌐 Backend Library Updates**: Updated critical backend libraries including SQLAlchemy, uvicorn[standard], faster-whisper, bcrypt, and boto3 for enhanced performance and security.
|
||||
|
||||
### Removed
|
||||
|
||||
- **🚫 Deprecated ComfyUI Environment Variables**: Removed several outdated environment variables related to ComfyUI settings, simplifying configuration management.
|
||||
|
||||
## [0.3.13] - 2024-08-14
|
||||
|
||||
### Added
|
||||
|
||||
27
Dockerfile
27
Dockerfile
@@ -1,6 +1,6 @@
|
||||
# syntax=docker/dockerfile:1
|
||||
# Initialize device type args
|
||||
# use build args in the docker build commmand with --build-arg="BUILDARG=true"
|
||||
# use build args in the docker build command with --build-arg="BUILDARG=true"
|
||||
ARG USE_CUDA=false
|
||||
ARG USE_OLLAMA=false
|
||||
# Tested with cu117 for CUDA 11 and cu121 for CUDA 12 (default)
|
||||
@@ -11,13 +11,17 @@ ARG USE_CUDA_VER=cu121
|
||||
# IMPORTANT: If you change the embedding model (sentence-transformers/all-MiniLM-L6-v2) and vice versa, you aren't able to use RAG Chat with your previous documents loaded in the WebUI! You need to re-embed them.
|
||||
ARG USE_EMBEDDING_MODEL=sentence-transformers/all-MiniLM-L6-v2
|
||||
ARG USE_RERANKING_MODEL=""
|
||||
|
||||
# Tiktoken encoding name; models to use can be found at https://huggingface.co/models?library=tiktoken
|
||||
ARG USE_TIKTOKEN_ENCODING_NAME="cl100k_base"
|
||||
|
||||
ARG BUILD_HASH=dev-build
|
||||
# Override at your own risk - non-root configurations are untested
|
||||
ARG UID=0
|
||||
ARG GID=0
|
||||
|
||||
######## WebUI frontend ########
|
||||
FROM --platform=$BUILDPLATFORM node:21-alpine3.19 as build
|
||||
FROM --platform=$BUILDPLATFORM node:22-alpine3.20 AS build
|
||||
ARG BUILD_HASH
|
||||
|
||||
WORKDIR /app
|
||||
@@ -30,7 +34,7 @@ ENV APP_BUILD_HASH=${BUILD_HASH}
|
||||
RUN npm run build
|
||||
|
||||
######## WebUI backend ########
|
||||
FROM python:3.11-slim-bookworm as base
|
||||
FROM python:3.11-slim-bookworm AS base
|
||||
|
||||
# Use args
|
||||
ARG USE_CUDA
|
||||
@@ -72,13 +76,21 @@ ENV RAG_EMBEDDING_MODEL="$USE_EMBEDDING_MODEL_DOCKER" \
|
||||
RAG_RERANKING_MODEL="$USE_RERANKING_MODEL_DOCKER" \
|
||||
SENTENCE_TRANSFORMERS_HOME="/app/backend/data/cache/embedding/models"
|
||||
|
||||
## Tiktoken model settings ##
|
||||
ENV TIKTOKEN_ENCODING_NAME="$USE_TIKTOKEN_ENCODING_NAME" \
|
||||
TIKTOKEN_CACHE_DIR="/app/backend/data/cache/tiktoken"
|
||||
|
||||
## Hugging Face download cache ##
|
||||
ENV HF_HOME="/app/backend/data/cache/embedding/models"
|
||||
|
||||
## Torch Extensions ##
|
||||
# ENV TORCH_EXTENSIONS_DIR="/.cache/torch_extensions"
|
||||
|
||||
#### Other models ##########################################################
|
||||
|
||||
WORKDIR /app/backend
|
||||
|
||||
ENV HOME /root
|
||||
ENV HOME=/root
|
||||
# Create user and group if not root
|
||||
RUN if [ $UID -ne 0 ]; then \
|
||||
if [ $GID -ne 0 ]; then \
|
||||
@@ -96,7 +108,7 @@ RUN chown -R $UID:$GID /app $HOME
|
||||
RUN if [ "$USE_OLLAMA" = "true" ]; then \
|
||||
apt-get update && \
|
||||
# Install pandoc and netcat
|
||||
apt-get install -y --no-install-recommends pandoc netcat-openbsd curl && \
|
||||
apt-get install -y --no-install-recommends git build-essential pandoc netcat-openbsd curl && \
|
||||
apt-get install -y --no-install-recommends gcc python3-dev && \
|
||||
# for RAG OCR
|
||||
apt-get install -y --no-install-recommends ffmpeg libsm6 libxext6 && \
|
||||
@@ -109,7 +121,7 @@ RUN if [ "$USE_OLLAMA" = "true" ]; then \
|
||||
else \
|
||||
apt-get update && \
|
||||
# Install pandoc, netcat and gcc
|
||||
apt-get install -y --no-install-recommends pandoc gcc netcat-openbsd curl jq && \
|
||||
apt-get install -y --no-install-recommends git build-essential pandoc gcc netcat-openbsd curl jq && \
|
||||
apt-get install -y --no-install-recommends gcc python3-dev && \
|
||||
# for RAG OCR
|
||||
apt-get install -y --no-install-recommends ffmpeg libsm6 libxext6 && \
|
||||
@@ -127,11 +139,13 @@ RUN pip3 install uv && \
|
||||
uv pip install --system -r requirements.txt --no-cache-dir && \
|
||||
python -c "import os; from sentence_transformers import SentenceTransformer; SentenceTransformer(os.environ['RAG_EMBEDDING_MODEL'], device='cpu')" && \
|
||||
python -c "import os; from faster_whisper import WhisperModel; WhisperModel(os.environ['WHISPER_MODEL'], device='cpu', compute_type='int8', download_root=os.environ['WHISPER_MODEL_DIR'])"; \
|
||||
python -c "import os; import tiktoken; tiktoken.get_encoding(os.environ['TIKTOKEN_ENCODING_NAME'])"; \
|
||||
else \
|
||||
pip3 install torch torchvision torchaudio --index-url https://download.pytorch.org/whl/cpu --no-cache-dir && \
|
||||
uv pip install --system -r requirements.txt --no-cache-dir && \
|
||||
python -c "import os; from sentence_transformers import SentenceTransformer; SentenceTransformer(os.environ['RAG_EMBEDDING_MODEL'], device='cpu')" && \
|
||||
python -c "import os; from faster_whisper import WhisperModel; WhisperModel(os.environ['WHISPER_MODEL'], device='cpu', compute_type='int8', download_root=os.environ['WHISPER_MODEL_DIR'])"; \
|
||||
python -c "import os; import tiktoken; tiktoken.get_encoding(os.environ['TIKTOKEN_ENCODING_NAME'])"; \
|
||||
fi; \
|
||||
chown -R $UID:$GID /app/backend/data/
|
||||
|
||||
@@ -157,5 +171,6 @@ USER $UID:$GID
|
||||
|
||||
ARG BUILD_HASH
|
||||
ENV WEBUI_BUILD_VERSION=${BUILD_HASH}
|
||||
ENV DOCKER=true
|
||||
|
||||
CMD [ "bash", "start.sh"]
|
||||
|
||||
34
README.md
34
README.md
@@ -1,4 +1,4 @@
|
||||
# Open WebUI (Formerly Ollama WebUI) 👋
|
||||
# Open WebUI 👋
|
||||
|
||||

|
||||

|
||||
@@ -37,7 +37,7 @@ Open WebUI is an [extensible](https://github.com/open-webui/pipelines), feature-
|
||||
|
||||
- 📚 **Local RAG Integration**: Dive into the future of chat interactions with groundbreaking Retrieval Augmented Generation (RAG) support. This feature seamlessly integrates document interactions into your chat experience. You can load documents directly into the chat or add files to your document library, effortlessly accessing them using the `#` command before a query.
|
||||
|
||||
- 🔍 **Web Search for RAG**: Perform web searches using providers like `SearXNG`, `Google PSE`, `Brave Search`, `serpstack`, `serper`, `Serply`, `DuckDuckGo` and `TavilySearch` and inject the results directly into your chat experience.
|
||||
- 🔍 **Web Search for RAG**: Perform web searches using providers like `SearXNG`, `Google PSE`, `Brave Search`, `serpstack`, `serper`, `Serply`, `DuckDuckGo`, `TavilySearch` and `SearchApi` and inject the results directly into your chat experience.
|
||||
|
||||
- 🌐 **Web Browsing Capability**: Seamlessly integrate websites into your chat experience using the `#` command followed by a URL. This feature allows you to incorporate web content directly into your conversations, enhancing the richness and depth of your interactions.
|
||||
|
||||
@@ -59,11 +59,31 @@ Don't forget to explore our sibling project, [Open WebUI Community](https://open
|
||||
|
||||
## How to Install 🚀
|
||||
|
||||
> [!NOTE]
|
||||
> Please note that for certain Docker environments, additional configurations might be needed. If you encounter any connection issues, our detailed guide on [Open WebUI Documentation](https://docs.openwebui.com/) is ready to assist you.
|
||||
### Installation via Python pip 🐍
|
||||
|
||||
Open WebUI can be installed using pip, the Python package installer. Before proceeding, ensure you're using **Python 3.11** to avoid compatibility issues.
|
||||
|
||||
1. **Install Open WebUI**:
|
||||
Open your terminal and run the following command to install Open WebUI:
|
||||
|
||||
```bash
|
||||
pip install open-webui
|
||||
```
|
||||
|
||||
2. **Running Open WebUI**:
|
||||
After installation, you can start Open WebUI by executing:
|
||||
|
||||
```bash
|
||||
open-webui serve
|
||||
```
|
||||
|
||||
This will start the Open WebUI server, which you can access at [http://localhost:8080](http://localhost:8080)
|
||||
|
||||
### Quick Start with Docker 🐳
|
||||
|
||||
> [!NOTE]
|
||||
> Please note that for certain Docker environments, additional configurations might be needed. If you encounter any connection issues, our detailed guide on [Open WebUI Documentation](https://docs.openwebui.com/) is ready to assist you.
|
||||
|
||||
> [!WARNING]
|
||||
> When using Docker to install Open WebUI, make sure to include the `-v open-webui:/app/backend/data` in your Docker command. This step is crucial as it ensures your database is properly mounted and prevents any loss of data.
|
||||
|
||||
@@ -86,7 +106,7 @@ Don't forget to explore our sibling project, [Open WebUI Community](https://open
|
||||
docker run -d -p 3000:8080 -e OLLAMA_BASE_URL=https://example.com -v open-webui:/app/backend/data --name open-webui --restart always ghcr.io/open-webui/open-webui:main
|
||||
```
|
||||
|
||||
- **To run Open WebUI with Nvidia GPU support**, use this command:
|
||||
- **To run Open WebUI with Nvidia GPU support**, use this command:
|
||||
|
||||
```bash
|
||||
docker run -d -p 3000:8080 --gpus all --add-host=host.docker.internal:host-gateway -v open-webui:/app/backend/data --name open-webui --restart always ghcr.io/open-webui/open-webui:cuda
|
||||
@@ -150,7 +170,7 @@ docker run --rm --volume /var/run/docker.sock:/var/run/docker.sock containrrr/wa
|
||||
|
||||
In the last part of the command, replace `open-webui` with your container name if it is different.
|
||||
|
||||
Check our Migration Guide available in our [Open WebUI Documentation](https://docs.openwebui.com/migration/).
|
||||
Check our Migration Guide available in our [Open WebUI Documentation](https://docs.openwebui.com/tutorials/migration/).
|
||||
|
||||
### Using the Dev Branch 🌙
|
||||
|
||||
@@ -200,4 +220,4 @@ If you have any questions, suggestions, or need assistance, please open an issue
|
||||
|
||||
---
|
||||
|
||||
Created by [Timothy J. Baek](https://github.com/tjbck) - Let's make Open WebUI even more amazing together! 💪
|
||||
Created by [Timothy Jaeryang Baek](https://github.com/tjbck) - Let's make Open WebUI even more amazing together! 💪
|
||||
|
||||
@@ -18,7 +18,7 @@ If you're experiencing connection issues, it’s often due to the WebUI docker c
|
||||
docker run -d --network=host -v open-webui:/app/backend/data -e OLLAMA_BASE_URL=http://127.0.0.1:11434 --name open-webui --restart always ghcr.io/open-webui/open-webui:main
|
||||
```
|
||||
|
||||
### Error on Slow Reponses for Ollama
|
||||
### Error on Slow Responses for Ollama
|
||||
|
||||
Open WebUI has a default timeout of 5 minutes for Ollama to finish generating the response. If needed, this can be adjusted via the environment variable AIOHTTP_CLIENT_TIMEOUT, which sets the timeout in seconds.
|
||||
|
||||
|
||||
6
backend/.gitignore
vendored
6
backend/.gitignore
vendored
@@ -8,9 +8,5 @@ _test
|
||||
Pipfile
|
||||
!/data
|
||||
/data/*
|
||||
!/data/litellm
|
||||
/data/litellm/*
|
||||
!data/litellm/config.yaml
|
||||
|
||||
!data/config.json
|
||||
/open_webui/data/*
|
||||
.webui_secret_key
|
||||
@@ -1,409 +0,0 @@
|
||||
import asyncio
|
||||
import websocket # NOTE: websocket-client (https://github.com/websocket-client/websocket-client)
|
||||
import json
|
||||
import urllib.request
|
||||
import urllib.parse
|
||||
import random
|
||||
import logging
|
||||
|
||||
from config import SRC_LOG_LEVELS
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
log.setLevel(SRC_LOG_LEVELS["COMFYUI"])
|
||||
|
||||
from pydantic import BaseModel
|
||||
|
||||
from typing import Optional
|
||||
|
||||
COMFYUI_DEFAULT_PROMPT = """
|
||||
{
|
||||
"3": {
|
||||
"inputs": {
|
||||
"seed": 0,
|
||||
"steps": 20,
|
||||
"cfg": 8,
|
||||
"sampler_name": "euler",
|
||||
"scheduler": "normal",
|
||||
"denoise": 1,
|
||||
"model": [
|
||||
"4",
|
||||
0
|
||||
],
|
||||
"positive": [
|
||||
"6",
|
||||
0
|
||||
],
|
||||
"negative": [
|
||||
"7",
|
||||
0
|
||||
],
|
||||
"latent_image": [
|
||||
"5",
|
||||
0
|
||||
]
|
||||
},
|
||||
"class_type": "KSampler",
|
||||
"_meta": {
|
||||
"title": "KSampler"
|
||||
}
|
||||
},
|
||||
"4": {
|
||||
"inputs": {
|
||||
"ckpt_name": "model.safetensors"
|
||||
},
|
||||
"class_type": "CheckpointLoaderSimple",
|
||||
"_meta": {
|
||||
"title": "Load Checkpoint"
|
||||
}
|
||||
},
|
||||
"5": {
|
||||
"inputs": {
|
||||
"width": 512,
|
||||
"height": 512,
|
||||
"batch_size": 1
|
||||
},
|
||||
"class_type": "EmptyLatentImage",
|
||||
"_meta": {
|
||||
"title": "Empty Latent Image"
|
||||
}
|
||||
},
|
||||
"6": {
|
||||
"inputs": {
|
||||
"text": "Prompt",
|
||||
"clip": [
|
||||
"4",
|
||||
1
|
||||
]
|
||||
},
|
||||
"class_type": "CLIPTextEncode",
|
||||
"_meta": {
|
||||
"title": "CLIP Text Encode (Prompt)"
|
||||
}
|
||||
},
|
||||
"7": {
|
||||
"inputs": {
|
||||
"text": "Negative Prompt",
|
||||
"clip": [
|
||||
"4",
|
||||
1
|
||||
]
|
||||
},
|
||||
"class_type": "CLIPTextEncode",
|
||||
"_meta": {
|
||||
"title": "CLIP Text Encode (Prompt)"
|
||||
}
|
||||
},
|
||||
"8": {
|
||||
"inputs": {
|
||||
"samples": [
|
||||
"3",
|
||||
0
|
||||
],
|
||||
"vae": [
|
||||
"4",
|
||||
2
|
||||
]
|
||||
},
|
||||
"class_type": "VAEDecode",
|
||||
"_meta": {
|
||||
"title": "VAE Decode"
|
||||
}
|
||||
},
|
||||
"9": {
|
||||
"inputs": {
|
||||
"filename_prefix": "ComfyUI",
|
||||
"images": [
|
||||
"8",
|
||||
0
|
||||
]
|
||||
},
|
||||
"class_type": "SaveImage",
|
||||
"_meta": {
|
||||
"title": "Save Image"
|
||||
}
|
||||
}
|
||||
}
|
||||
"""
|
||||
|
||||
FLUX_DEFAULT_PROMPT = """
|
||||
{
|
||||
"5": {
|
||||
"inputs": {
|
||||
"width": 1024,
|
||||
"height": 1024,
|
||||
"batch_size": 1
|
||||
},
|
||||
"class_type": "EmptyLatentImage"
|
||||
},
|
||||
"6": {
|
||||
"inputs": {
|
||||
"text": "Input Text Here",
|
||||
"clip": [
|
||||
"11",
|
||||
0
|
||||
]
|
||||
},
|
||||
"class_type": "CLIPTextEncode"
|
||||
},
|
||||
"8": {
|
||||
"inputs": {
|
||||
"samples": [
|
||||
"13",
|
||||
0
|
||||
],
|
||||
"vae": [
|
||||
"10",
|
||||
0
|
||||
]
|
||||
},
|
||||
"class_type": "VAEDecode"
|
||||
},
|
||||
"9": {
|
||||
"inputs": {
|
||||
"filename_prefix": "ComfyUI",
|
||||
"images": [
|
||||
"8",
|
||||
0
|
||||
]
|
||||
},
|
||||
"class_type": "SaveImage"
|
||||
},
|
||||
"10": {
|
||||
"inputs": {
|
||||
"vae_name": "ae.safetensors"
|
||||
},
|
||||
"class_type": "VAELoader"
|
||||
},
|
||||
"11": {
|
||||
"inputs": {
|
||||
"clip_name1": "clip_l.safetensors",
|
||||
"clip_name2": "t5xxl_fp16.safetensors",
|
||||
"type": "flux"
|
||||
},
|
||||
"class_type": "DualCLIPLoader"
|
||||
},
|
||||
"12": {
|
||||
"inputs": {
|
||||
"unet_name": "flux1-dev.safetensors",
|
||||
"weight_dtype": "default"
|
||||
},
|
||||
"class_type": "UNETLoader"
|
||||
},
|
||||
"13": {
|
||||
"inputs": {
|
||||
"noise": [
|
||||
"25",
|
||||
0
|
||||
],
|
||||
"guider": [
|
||||
"22",
|
||||
0
|
||||
],
|
||||
"sampler": [
|
||||
"16",
|
||||
0
|
||||
],
|
||||
"sigmas": [
|
||||
"17",
|
||||
0
|
||||
],
|
||||
"latent_image": [
|
||||
"5",
|
||||
0
|
||||
]
|
||||
},
|
||||
"class_type": "SamplerCustomAdvanced"
|
||||
},
|
||||
"16": {
|
||||
"inputs": {
|
||||
"sampler_name": "euler"
|
||||
},
|
||||
"class_type": "KSamplerSelect"
|
||||
},
|
||||
"17": {
|
||||
"inputs": {
|
||||
"scheduler": "simple",
|
||||
"steps": 20,
|
||||
"denoise": 1,
|
||||
"model": [
|
||||
"12",
|
||||
0
|
||||
]
|
||||
},
|
||||
"class_type": "BasicScheduler"
|
||||
},
|
||||
"22": {
|
||||
"inputs": {
|
||||
"model": [
|
||||
"12",
|
||||
0
|
||||
],
|
||||
"conditioning": [
|
||||
"6",
|
||||
0
|
||||
]
|
||||
},
|
||||
"class_type": "BasicGuider"
|
||||
},
|
||||
"25": {
|
||||
"inputs": {
|
||||
"noise_seed": 778937779713005
|
||||
},
|
||||
"class_type": "RandomNoise"
|
||||
}
|
||||
}
|
||||
"""
|
||||
|
||||
|
||||
def queue_prompt(prompt, client_id, base_url):
|
||||
log.info("queue_prompt")
|
||||
p = {"prompt": prompt, "client_id": client_id}
|
||||
data = json.dumps(p).encode("utf-8")
|
||||
req = urllib.request.Request(f"{base_url}/prompt", data=data)
|
||||
return json.loads(urllib.request.urlopen(req).read())
|
||||
|
||||
|
||||
def get_image(filename, subfolder, folder_type, base_url):
|
||||
log.info("get_image")
|
||||
data = {"filename": filename, "subfolder": subfolder, "type": folder_type}
|
||||
url_values = urllib.parse.urlencode(data)
|
||||
with urllib.request.urlopen(f"{base_url}/view?{url_values}") as response:
|
||||
return response.read()
|
||||
|
||||
|
||||
def get_image_url(filename, subfolder, folder_type, base_url):
|
||||
log.info("get_image")
|
||||
data = {"filename": filename, "subfolder": subfolder, "type": folder_type}
|
||||
url_values = urllib.parse.urlencode(data)
|
||||
return f"{base_url}/view?{url_values}"
|
||||
|
||||
|
||||
def get_history(prompt_id, base_url):
|
||||
log.info("get_history")
|
||||
with urllib.request.urlopen(f"{base_url}/history/{prompt_id}") as response:
|
||||
return json.loads(response.read())
|
||||
|
||||
|
||||
def get_images(ws, prompt, client_id, base_url):
|
||||
prompt_id = queue_prompt(prompt, client_id, base_url)["prompt_id"]
|
||||
output_images = []
|
||||
while True:
|
||||
out = ws.recv()
|
||||
if isinstance(out, str):
|
||||
message = json.loads(out)
|
||||
if message["type"] == "executing":
|
||||
data = message["data"]
|
||||
if data["node"] is None and data["prompt_id"] == prompt_id:
|
||||
break # Execution is done
|
||||
else:
|
||||
continue # previews are binary data
|
||||
|
||||
history = get_history(prompt_id, base_url)[prompt_id]
|
||||
for o in history["outputs"]:
|
||||
for node_id in history["outputs"]:
|
||||
node_output = history["outputs"][node_id]
|
||||
if "images" in node_output:
|
||||
for image in node_output["images"]:
|
||||
url = get_image_url(
|
||||
image["filename"], image["subfolder"], image["type"], base_url
|
||||
)
|
||||
output_images.append({"url": url})
|
||||
return {"data": output_images}
|
||||
|
||||
|
||||
class ImageGenerationPayload(BaseModel):
|
||||
prompt: str
|
||||
negative_prompt: Optional[str] = ""
|
||||
steps: Optional[int] = None
|
||||
seed: Optional[int] = None
|
||||
width: int
|
||||
height: int
|
||||
n: int = 1
|
||||
cfg_scale: Optional[float] = None
|
||||
sampler: Optional[str] = None
|
||||
scheduler: Optional[str] = None
|
||||
sd3: Optional[bool] = None
|
||||
flux: Optional[bool] = None
|
||||
flux_weight_dtype: Optional[str] = None
|
||||
flux_fp8_clip: Optional[bool] = None
|
||||
|
||||
|
||||
async def comfyui_generate_image(
|
||||
model: str, payload: ImageGenerationPayload, client_id, base_url
|
||||
):
|
||||
ws_url = base_url.replace("http://", "ws://").replace("https://", "wss://")
|
||||
|
||||
comfyui_prompt = json.loads(COMFYUI_DEFAULT_PROMPT)
|
||||
|
||||
if payload.cfg_scale:
|
||||
comfyui_prompt["3"]["inputs"]["cfg"] = payload.cfg_scale
|
||||
|
||||
if payload.sampler:
|
||||
comfyui_prompt["3"]["inputs"]["sampler"] = payload.sampler
|
||||
|
||||
if payload.scheduler:
|
||||
comfyui_prompt["3"]["inputs"]["scheduler"] = payload.scheduler
|
||||
|
||||
if payload.sd3:
|
||||
comfyui_prompt["5"]["class_type"] = "EmptySD3LatentImage"
|
||||
|
||||
if payload.steps:
|
||||
comfyui_prompt["3"]["inputs"]["steps"] = payload.steps
|
||||
|
||||
comfyui_prompt["4"]["inputs"]["ckpt_name"] = model
|
||||
comfyui_prompt["7"]["inputs"]["text"] = payload.negative_prompt
|
||||
comfyui_prompt["3"]["inputs"]["seed"] = (
|
||||
payload.seed if payload.seed else random.randint(0, 18446744073709551614)
|
||||
)
|
||||
|
||||
# as Flux uses a completely different workflow, we must treat it specially
|
||||
if payload.flux:
|
||||
comfyui_prompt = json.loads(FLUX_DEFAULT_PROMPT)
|
||||
comfyui_prompt["12"]["inputs"]["unet_name"] = model
|
||||
comfyui_prompt["25"]["inputs"]["noise_seed"] = (
|
||||
payload.seed if payload.seed else random.randint(0, 18446744073709551614)
|
||||
)
|
||||
|
||||
if payload.sampler:
|
||||
comfyui_prompt["16"]["inputs"]["sampler_name"] = payload.sampler
|
||||
|
||||
if payload.steps:
|
||||
comfyui_prompt["17"]["inputs"]["steps"] = payload.steps
|
||||
|
||||
if payload.scheduler:
|
||||
comfyui_prompt["17"]["inputs"]["scheduler"] = payload.scheduler
|
||||
|
||||
if payload.flux_weight_dtype:
|
||||
comfyui_prompt["12"]["inputs"]["weight_dtype"] = payload.flux_weight_dtype
|
||||
|
||||
if payload.flux_fp8_clip:
|
||||
comfyui_prompt["11"]["inputs"][
|
||||
"clip_name2"
|
||||
] = "t5xxl_fp8_e4m3fn.safetensors"
|
||||
|
||||
comfyui_prompt["5"]["inputs"]["batch_size"] = payload.n
|
||||
comfyui_prompt["5"]["inputs"]["width"] = payload.width
|
||||
comfyui_prompt["5"]["inputs"]["height"] = payload.height
|
||||
|
||||
# set the text prompt for our positive CLIPTextEncode
|
||||
comfyui_prompt["6"]["inputs"]["text"] = payload.prompt
|
||||
|
||||
try:
|
||||
ws = websocket.WebSocket()
|
||||
ws.connect(f"{ws_url}/ws?clientId={client_id}")
|
||||
log.info("WebSocket connection established.")
|
||||
except Exception as e:
|
||||
log.exception(f"Failed to connect to WebSocket server: {e}")
|
||||
return None
|
||||
|
||||
try:
|
||||
images = await asyncio.to_thread(
|
||||
get_images, ws, comfyui_prompt, client_id, base_url
|
||||
)
|
||||
except Exception as e:
|
||||
log.exception(f"Error while receiving images: {e}")
|
||||
images = None
|
||||
|
||||
ws.close()
|
||||
|
||||
return images
|
||||
@@ -1,171 +0,0 @@
|
||||
import socketio
|
||||
import asyncio
|
||||
|
||||
|
||||
from apps.webui.models.users import Users
|
||||
from utils.utils import decode_token
|
||||
|
||||
sio = socketio.AsyncServer(cors_allowed_origins=[], async_mode="asgi")
|
||||
app = socketio.ASGIApp(sio, socketio_path="/ws/socket.io")
|
||||
|
||||
# Dictionary to maintain the user pool
|
||||
|
||||
SESSION_POOL = {}
|
||||
USER_POOL = {}
|
||||
USAGE_POOL = {}
|
||||
# Timeout duration in seconds
|
||||
TIMEOUT_DURATION = 3
|
||||
|
||||
|
||||
@sio.event
|
||||
async def connect(sid, environ, auth):
|
||||
user = None
|
||||
if auth and "token" in auth:
|
||||
data = decode_token(auth["token"])
|
||||
|
||||
if data is not None and "id" in data:
|
||||
user = Users.get_user_by_id(data["id"])
|
||||
|
||||
if user:
|
||||
SESSION_POOL[sid] = user.id
|
||||
if user.id in USER_POOL:
|
||||
USER_POOL[user.id].append(sid)
|
||||
else:
|
||||
USER_POOL[user.id] = [sid]
|
||||
|
||||
print(f"user {user.name}({user.id}) connected with session ID {sid}")
|
||||
|
||||
await sio.emit("user-count", {"count": len(set(USER_POOL))})
|
||||
await sio.emit("usage", {"models": get_models_in_use()})
|
||||
|
||||
|
||||
@sio.on("user-join")
|
||||
async def user_join(sid, data):
|
||||
print("user-join", sid, data)
|
||||
|
||||
auth = data["auth"] if "auth" in data else None
|
||||
if not auth or "token" not in auth:
|
||||
return
|
||||
|
||||
data = decode_token(auth["token"])
|
||||
if data is None or "id" not in data:
|
||||
return
|
||||
|
||||
user = Users.get_user_by_id(data["id"])
|
||||
if not user:
|
||||
return
|
||||
|
||||
SESSION_POOL[sid] = user.id
|
||||
if user.id in USER_POOL:
|
||||
USER_POOL[user.id].append(sid)
|
||||
else:
|
||||
USER_POOL[user.id] = [sid]
|
||||
|
||||
print(f"user {user.name}({user.id}) connected with session ID {sid}")
|
||||
|
||||
await sio.emit("user-count", {"count": len(set(USER_POOL))})
|
||||
|
||||
|
||||
@sio.on("user-count")
|
||||
async def user_count(sid):
|
||||
await sio.emit("user-count", {"count": len(set(USER_POOL))})
|
||||
|
||||
|
||||
def get_models_in_use():
|
||||
# Aggregate all models in use
|
||||
models_in_use = []
|
||||
for model_id, data in USAGE_POOL.items():
|
||||
models_in_use.append(model_id)
|
||||
|
||||
return models_in_use
|
||||
|
||||
|
||||
@sio.on("usage")
|
||||
async def usage(sid, data):
|
||||
model_id = data["model"]
|
||||
|
||||
# Cancel previous callback if there is one
|
||||
if model_id in USAGE_POOL:
|
||||
USAGE_POOL[model_id]["callback"].cancel()
|
||||
|
||||
# Store the new usage data and task
|
||||
|
||||
if model_id in USAGE_POOL:
|
||||
USAGE_POOL[model_id]["sids"].append(sid)
|
||||
USAGE_POOL[model_id]["sids"] = list(set(USAGE_POOL[model_id]["sids"]))
|
||||
|
||||
else:
|
||||
USAGE_POOL[model_id] = {"sids": [sid]}
|
||||
|
||||
# Schedule a task to remove the usage data after TIMEOUT_DURATION
|
||||
USAGE_POOL[model_id]["callback"] = asyncio.create_task(
|
||||
remove_after_timeout(sid, model_id)
|
||||
)
|
||||
|
||||
# Broadcast the usage data to all clients
|
||||
await sio.emit("usage", {"models": get_models_in_use()})
|
||||
|
||||
|
||||
async def remove_after_timeout(sid, model_id):
|
||||
try:
|
||||
await asyncio.sleep(TIMEOUT_DURATION)
|
||||
if model_id in USAGE_POOL:
|
||||
print(USAGE_POOL[model_id]["sids"])
|
||||
USAGE_POOL[model_id]["sids"].remove(sid)
|
||||
USAGE_POOL[model_id]["sids"] = list(set(USAGE_POOL[model_id]["sids"]))
|
||||
|
||||
if len(USAGE_POOL[model_id]["sids"]) == 0:
|
||||
del USAGE_POOL[model_id]
|
||||
|
||||
# Broadcast the usage data to all clients
|
||||
await sio.emit("usage", {"models": get_models_in_use()})
|
||||
except asyncio.CancelledError:
|
||||
# Task was cancelled due to new 'usage' event
|
||||
pass
|
||||
|
||||
|
||||
@sio.event
|
||||
async def disconnect(sid):
|
||||
if sid in SESSION_POOL:
|
||||
user_id = SESSION_POOL[sid]
|
||||
del SESSION_POOL[sid]
|
||||
|
||||
USER_POOL[user_id].remove(sid)
|
||||
|
||||
if len(USER_POOL[user_id]) == 0:
|
||||
del USER_POOL[user_id]
|
||||
|
||||
await sio.emit("user-count", {"count": len(USER_POOL)})
|
||||
else:
|
||||
print(f"Unknown session ID {sid} disconnected")
|
||||
|
||||
|
||||
def get_event_emitter(request_info):
|
||||
async def __event_emitter__(event_data):
|
||||
await sio.emit(
|
||||
"chat-events",
|
||||
{
|
||||
"chat_id": request_info["chat_id"],
|
||||
"message_id": request_info["message_id"],
|
||||
"data": event_data,
|
||||
},
|
||||
to=request_info["session_id"],
|
||||
)
|
||||
|
||||
return __event_emitter__
|
||||
|
||||
|
||||
def get_event_call(request_info):
|
||||
async def __event_call__(event_data):
|
||||
response = await sio.call(
|
||||
"chat-events",
|
||||
{
|
||||
"chat_id": request_info["chat_id"],
|
||||
"message_id": request_info["message_id"],
|
||||
"data": event_data,
|
||||
},
|
||||
to=request_info["session_id"],
|
||||
)
|
||||
return response
|
||||
|
||||
return __event_call__
|
||||
@@ -1,407 +0,0 @@
|
||||
from pydantic import BaseModel, ConfigDict
|
||||
from typing import Union, Optional
|
||||
|
||||
import json
|
||||
import uuid
|
||||
import time
|
||||
|
||||
from sqlalchemy import Column, String, BigInteger, Boolean, Text
|
||||
|
||||
from apps.webui.internal.db import Base, get_db
|
||||
|
||||
|
||||
####################
|
||||
# Chat DB Schema
|
||||
####################
|
||||
|
||||
|
||||
class Chat(Base):
|
||||
__tablename__ = "chat"
|
||||
|
||||
id = Column(String, primary_key=True)
|
||||
user_id = Column(String)
|
||||
title = Column(Text)
|
||||
chat = Column(Text) # Save Chat JSON as Text
|
||||
|
||||
created_at = Column(BigInteger)
|
||||
updated_at = Column(BigInteger)
|
||||
|
||||
share_id = Column(Text, unique=True, nullable=True)
|
||||
archived = Column(Boolean, default=False)
|
||||
|
||||
|
||||
class ChatModel(BaseModel):
|
||||
model_config = ConfigDict(from_attributes=True)
|
||||
|
||||
id: str
|
||||
user_id: str
|
||||
title: str
|
||||
chat: str
|
||||
|
||||
created_at: int # timestamp in epoch
|
||||
updated_at: int # timestamp in epoch
|
||||
|
||||
share_id: Optional[str] = None
|
||||
archived: bool = False
|
||||
|
||||
|
||||
####################
|
||||
# Forms
|
||||
####################
|
||||
|
||||
|
||||
class ChatForm(BaseModel):
|
||||
chat: dict
|
||||
|
||||
|
||||
class ChatTitleForm(BaseModel):
|
||||
title: str
|
||||
|
||||
|
||||
class ChatResponse(BaseModel):
|
||||
id: str
|
||||
user_id: str
|
||||
title: str
|
||||
chat: dict
|
||||
updated_at: int # timestamp in epoch
|
||||
created_at: int # timestamp in epoch
|
||||
share_id: Optional[str] = None # id of the chat to be shared
|
||||
archived: bool
|
||||
|
||||
|
||||
class ChatTitleIdResponse(BaseModel):
|
||||
id: str
|
||||
title: str
|
||||
updated_at: int
|
||||
created_at: int
|
||||
|
||||
|
||||
class ChatTable:
|
||||
|
||||
def insert_new_chat(self, user_id: str, form_data: ChatForm) -> Optional[ChatModel]:
|
||||
with get_db() as db:
|
||||
|
||||
id = str(uuid.uuid4())
|
||||
chat = ChatModel(
|
||||
**{
|
||||
"id": id,
|
||||
"user_id": user_id,
|
||||
"title": (
|
||||
form_data.chat["title"]
|
||||
if "title" in form_data.chat
|
||||
else "New Chat"
|
||||
),
|
||||
"chat": json.dumps(form_data.chat),
|
||||
"created_at": int(time.time()),
|
||||
"updated_at": int(time.time()),
|
||||
}
|
||||
)
|
||||
|
||||
result = Chat(**chat.model_dump())
|
||||
db.add(result)
|
||||
db.commit()
|
||||
db.refresh(result)
|
||||
return ChatModel.model_validate(result) if result else None
|
||||
|
||||
def update_chat_by_id(self, id: str, chat: dict) -> Optional[ChatModel]:
|
||||
try:
|
||||
with get_db() as db:
|
||||
|
||||
chat_obj = db.get(Chat, id)
|
||||
chat_obj.chat = json.dumps(chat)
|
||||
chat_obj.title = chat["title"] if "title" in chat else "New Chat"
|
||||
chat_obj.updated_at = int(time.time())
|
||||
db.commit()
|
||||
db.refresh(chat_obj)
|
||||
|
||||
return ChatModel.model_validate(chat_obj)
|
||||
except Exception as e:
|
||||
return None
|
||||
|
||||
def insert_shared_chat_by_chat_id(self, chat_id: str) -> Optional[ChatModel]:
|
||||
with get_db() as db:
|
||||
|
||||
# Get the existing chat to share
|
||||
chat = db.get(Chat, chat_id)
|
||||
# Check if the chat is already shared
|
||||
if chat.share_id:
|
||||
return self.get_chat_by_id_and_user_id(chat.share_id, "shared")
|
||||
# Create a new chat with the same data, but with a new ID
|
||||
shared_chat = ChatModel(
|
||||
**{
|
||||
"id": str(uuid.uuid4()),
|
||||
"user_id": f"shared-{chat_id}",
|
||||
"title": chat.title,
|
||||
"chat": chat.chat,
|
||||
"created_at": chat.created_at,
|
||||
"updated_at": int(time.time()),
|
||||
}
|
||||
)
|
||||
shared_result = Chat(**shared_chat.model_dump())
|
||||
db.add(shared_result)
|
||||
db.commit()
|
||||
db.refresh(shared_result)
|
||||
|
||||
# Update the original chat with the share_id
|
||||
result = (
|
||||
db.query(Chat)
|
||||
.filter_by(id=chat_id)
|
||||
.update({"share_id": shared_chat.id})
|
||||
)
|
||||
db.commit()
|
||||
return shared_chat if (shared_result and result) else None
|
||||
|
||||
def update_shared_chat_by_chat_id(self, chat_id: str) -> Optional[ChatModel]:
|
||||
try:
|
||||
with get_db() as db:
|
||||
|
||||
print("update_shared_chat_by_id")
|
||||
chat = db.get(Chat, chat_id)
|
||||
print(chat)
|
||||
chat.title = chat.title
|
||||
chat.chat = chat.chat
|
||||
db.commit()
|
||||
db.refresh(chat)
|
||||
|
||||
return self.get_chat_by_id(chat.share_id)
|
||||
except Exception:
|
||||
return None
|
||||
|
||||
def delete_shared_chat_by_chat_id(self, chat_id: str) -> bool:
|
||||
try:
|
||||
with get_db() as db:
|
||||
|
||||
db.query(Chat).filter_by(user_id=f"shared-{chat_id}").delete()
|
||||
db.commit()
|
||||
|
||||
return True
|
||||
except Exception:
|
||||
return False
|
||||
|
||||
def update_chat_share_id_by_id(
|
||||
self, id: str, share_id: Optional[str]
|
||||
) -> Optional[ChatModel]:
|
||||
try:
|
||||
with get_db() as db:
|
||||
|
||||
chat = db.get(Chat, id)
|
||||
chat.share_id = share_id
|
||||
db.commit()
|
||||
db.refresh(chat)
|
||||
return ChatModel.model_validate(chat)
|
||||
except Exception:
|
||||
return None
|
||||
|
||||
def toggle_chat_archive_by_id(self, id: str) -> Optional[ChatModel]:
|
||||
try:
|
||||
with get_db() as db:
|
||||
|
||||
chat = db.get(Chat, id)
|
||||
chat.archived = not chat.archived
|
||||
db.commit()
|
||||
db.refresh(chat)
|
||||
return ChatModel.model_validate(chat)
|
||||
except Exception:
|
||||
return None
|
||||
|
||||
def archive_all_chats_by_user_id(self, user_id: str) -> bool:
|
||||
try:
|
||||
with get_db() as db:
|
||||
db.query(Chat).filter_by(user_id=user_id).update({"archived": True})
|
||||
db.commit()
|
||||
return True
|
||||
except Exception:
|
||||
return False
|
||||
|
||||
def get_archived_chat_list_by_user_id(
|
||||
self, user_id: str, skip: int = 0, limit: int = 50
|
||||
) -> list[ChatModel]:
|
||||
with get_db() as db:
|
||||
|
||||
all_chats = (
|
||||
db.query(Chat)
|
||||
.filter_by(user_id=user_id, archived=True)
|
||||
.order_by(Chat.updated_at.desc())
|
||||
# .limit(limit).offset(skip)
|
||||
.all()
|
||||
)
|
||||
return [ChatModel.model_validate(chat) for chat in all_chats]
|
||||
|
||||
def get_chat_list_by_user_id(
|
||||
self,
|
||||
user_id: str,
|
||||
include_archived: bool = False,
|
||||
skip: int = 0,
|
||||
limit: int = 50,
|
||||
) -> list[ChatModel]:
|
||||
with get_db() as db:
|
||||
query = db.query(Chat).filter_by(user_id=user_id)
|
||||
if not include_archived:
|
||||
query = query.filter_by(archived=False)
|
||||
all_chats = (
|
||||
query.order_by(Chat.updated_at.desc())
|
||||
# .limit(limit).offset(skip)
|
||||
.all()
|
||||
)
|
||||
return [ChatModel.model_validate(chat) for chat in all_chats]
|
||||
|
||||
def get_chat_title_id_list_by_user_id(
|
||||
self,
|
||||
user_id: str,
|
||||
include_archived: bool = False,
|
||||
skip: int = 0,
|
||||
limit: int = -1,
|
||||
) -> list[ChatTitleIdResponse]:
|
||||
with get_db() as db:
|
||||
query = db.query(Chat).filter_by(user_id=user_id)
|
||||
if not include_archived:
|
||||
query = query.filter_by(archived=False)
|
||||
|
||||
all_chats = (
|
||||
query.order_by(Chat.updated_at.desc())
|
||||
# limit cols
|
||||
.with_entities(Chat.id, Chat.title, Chat.updated_at, Chat.created_at)
|
||||
.limit(limit)
|
||||
.offset(skip)
|
||||
.all()
|
||||
)
|
||||
# result has to be destrctured from sqlalchemy `row` and mapped to a dict since the `ChatModel`is not the returned dataclass.
|
||||
return [
|
||||
ChatTitleIdResponse.model_validate(
|
||||
{
|
||||
"id": chat[0],
|
||||
"title": chat[1],
|
||||
"updated_at": chat[2],
|
||||
"created_at": chat[3],
|
||||
}
|
||||
)
|
||||
for chat in all_chats
|
||||
]
|
||||
|
||||
def get_chat_list_by_chat_ids(
|
||||
self, chat_ids: list[str], skip: int = 0, limit: int = 50
|
||||
) -> list[ChatModel]:
|
||||
with get_db() as db:
|
||||
all_chats = (
|
||||
db.query(Chat)
|
||||
.filter(Chat.id.in_(chat_ids))
|
||||
.filter_by(archived=False)
|
||||
.order_by(Chat.updated_at.desc())
|
||||
.all()
|
||||
)
|
||||
return [ChatModel.model_validate(chat) for chat in all_chats]
|
||||
|
||||
def get_chat_by_id(self, id: str) -> Optional[ChatModel]:
|
||||
try:
|
||||
with get_db() as db:
|
||||
|
||||
chat = db.get(Chat, id)
|
||||
return ChatModel.model_validate(chat)
|
||||
except Exception:
|
||||
return None
|
||||
|
||||
def get_chat_by_share_id(self, id: str) -> Optional[ChatModel]:
|
||||
try:
|
||||
with get_db() as db:
|
||||
|
||||
chat = db.query(Chat).filter_by(share_id=id).first()
|
||||
|
||||
if chat:
|
||||
return self.get_chat_by_id(id)
|
||||
else:
|
||||
return None
|
||||
except Exception as e:
|
||||
return None
|
||||
|
||||
def get_chat_by_id_and_user_id(self, id: str, user_id: str) -> Optional[ChatModel]:
|
||||
try:
|
||||
with get_db() as db:
|
||||
|
||||
chat = db.query(Chat).filter_by(id=id, user_id=user_id).first()
|
||||
return ChatModel.model_validate(chat)
|
||||
except Exception:
|
||||
return None
|
||||
|
||||
def get_chats(self, skip: int = 0, limit: int = 50) -> list[ChatModel]:
|
||||
with get_db() as db:
|
||||
|
||||
all_chats = (
|
||||
db.query(Chat)
|
||||
# .limit(limit).offset(skip)
|
||||
.order_by(Chat.updated_at.desc())
|
||||
)
|
||||
return [ChatModel.model_validate(chat) for chat in all_chats]
|
||||
|
||||
def get_chats_by_user_id(self, user_id: str) -> list[ChatModel]:
|
||||
with get_db() as db:
|
||||
|
||||
all_chats = (
|
||||
db.query(Chat)
|
||||
.filter_by(user_id=user_id)
|
||||
.order_by(Chat.updated_at.desc())
|
||||
)
|
||||
return [ChatModel.model_validate(chat) for chat in all_chats]
|
||||
|
||||
def get_archived_chats_by_user_id(self, user_id: str) -> list[ChatModel]:
|
||||
with get_db() as db:
|
||||
|
||||
all_chats = (
|
||||
db.query(Chat)
|
||||
.filter_by(user_id=user_id, archived=True)
|
||||
.order_by(Chat.updated_at.desc())
|
||||
)
|
||||
return [ChatModel.model_validate(chat) for chat in all_chats]
|
||||
|
||||
def delete_chat_by_id(self, id: str) -> bool:
|
||||
try:
|
||||
with get_db() as db:
|
||||
|
||||
db.query(Chat).filter_by(id=id).delete()
|
||||
db.commit()
|
||||
|
||||
return True and self.delete_shared_chat_by_chat_id(id)
|
||||
except Exception:
|
||||
return False
|
||||
|
||||
def delete_chat_by_id_and_user_id(self, id: str, user_id: str) -> bool:
|
||||
try:
|
||||
with get_db() as db:
|
||||
|
||||
db.query(Chat).filter_by(id=id, user_id=user_id).delete()
|
||||
db.commit()
|
||||
|
||||
return True and self.delete_shared_chat_by_chat_id(id)
|
||||
except Exception:
|
||||
return False
|
||||
|
||||
def delete_chats_by_user_id(self, user_id: str) -> bool:
|
||||
try:
|
||||
|
||||
with get_db() as db:
|
||||
|
||||
self.delete_shared_chats_by_user_id(user_id)
|
||||
|
||||
db.query(Chat).filter_by(user_id=user_id).delete()
|
||||
db.commit()
|
||||
|
||||
return True
|
||||
except Exception:
|
||||
return False
|
||||
|
||||
def delete_shared_chats_by_user_id(self, user_id: str) -> bool:
|
||||
try:
|
||||
|
||||
with get_db() as db:
|
||||
|
||||
chats_by_user = db.query(Chat).filter_by(user_id=user_id).all()
|
||||
shared_chat_ids = [f"shared-{chat.id}" for chat in chats_by_user]
|
||||
|
||||
db.query(Chat).filter(Chat.user_id.in_(shared_chat_ids)).delete()
|
||||
db.commit()
|
||||
|
||||
return True
|
||||
except Exception:
|
||||
return False
|
||||
|
||||
|
||||
Chats = ChatTable()
|
||||
@@ -1,126 +0,0 @@
|
||||
from pydantic import BaseModel, ConfigDict
|
||||
from typing import Union, Optional
|
||||
import time
|
||||
import logging
|
||||
|
||||
from sqlalchemy import Column, String, BigInteger, Text
|
||||
|
||||
from apps.webui.internal.db import JSONField, Base, get_db
|
||||
|
||||
import json
|
||||
|
||||
from config import SRC_LOG_LEVELS
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
log.setLevel(SRC_LOG_LEVELS["MODELS"])
|
||||
|
||||
####################
|
||||
# Files DB Schema
|
||||
####################
|
||||
|
||||
|
||||
class File(Base):
|
||||
__tablename__ = "file"
|
||||
|
||||
id = Column(String, primary_key=True)
|
||||
user_id = Column(String)
|
||||
filename = Column(Text)
|
||||
meta = Column(JSONField)
|
||||
created_at = Column(BigInteger)
|
||||
|
||||
|
||||
class FileModel(BaseModel):
|
||||
id: str
|
||||
user_id: str
|
||||
filename: str
|
||||
meta: dict
|
||||
created_at: int # timestamp in epoch
|
||||
|
||||
model_config = ConfigDict(from_attributes=True)
|
||||
|
||||
|
||||
####################
|
||||
# Forms
|
||||
####################
|
||||
|
||||
|
||||
class FileModelResponse(BaseModel):
|
||||
id: str
|
||||
user_id: str
|
||||
filename: str
|
||||
meta: dict
|
||||
created_at: int # timestamp in epoch
|
||||
|
||||
|
||||
class FileForm(BaseModel):
|
||||
id: str
|
||||
filename: str
|
||||
meta: dict = {}
|
||||
|
||||
|
||||
class FilesTable:
|
||||
|
||||
def insert_new_file(self, user_id: str, form_data: FileForm) -> Optional[FileModel]:
|
||||
with get_db() as db:
|
||||
|
||||
file = FileModel(
|
||||
**{
|
||||
**form_data.model_dump(),
|
||||
"user_id": user_id,
|
||||
"created_at": int(time.time()),
|
||||
}
|
||||
)
|
||||
|
||||
try:
|
||||
result = File(**file.model_dump())
|
||||
db.add(result)
|
||||
db.commit()
|
||||
db.refresh(result)
|
||||
if result:
|
||||
return FileModel.model_validate(result)
|
||||
else:
|
||||
return None
|
||||
except Exception as e:
|
||||
print(f"Error creating tool: {e}")
|
||||
return None
|
||||
|
||||
def get_file_by_id(self, id: str) -> Optional[FileModel]:
|
||||
with get_db() as db:
|
||||
|
||||
try:
|
||||
file = db.get(File, id)
|
||||
return FileModel.model_validate(file)
|
||||
except Exception:
|
||||
return None
|
||||
|
||||
def get_files(self) -> list[FileModel]:
|
||||
with get_db() as db:
|
||||
|
||||
return [FileModel.model_validate(file) for file in db.query(File).all()]
|
||||
|
||||
def delete_file_by_id(self, id: str) -> bool:
|
||||
|
||||
with get_db() as db:
|
||||
|
||||
try:
|
||||
db.query(File).filter_by(id=id).delete()
|
||||
db.commit()
|
||||
|
||||
return True
|
||||
except Exception:
|
||||
return False
|
||||
|
||||
def delete_all_files(self) -> bool:
|
||||
|
||||
with get_db() as db:
|
||||
|
||||
try:
|
||||
db.query(File).delete()
|
||||
db.commit()
|
||||
|
||||
return True
|
||||
except Exception:
|
||||
return False
|
||||
|
||||
|
||||
Files = FilesTable()
|
||||
@@ -1,272 +0,0 @@
|
||||
from pydantic import BaseModel, ConfigDict
|
||||
from typing import Optional
|
||||
|
||||
import json
|
||||
import uuid
|
||||
import time
|
||||
import logging
|
||||
|
||||
from sqlalchemy import String, Column, BigInteger, Text
|
||||
|
||||
from apps.webui.internal.db import Base, get_db
|
||||
|
||||
from config import SRC_LOG_LEVELS
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
log.setLevel(SRC_LOG_LEVELS["MODELS"])
|
||||
|
||||
####################
|
||||
# Tag DB Schema
|
||||
####################
|
||||
|
||||
|
||||
class Tag(Base):
|
||||
__tablename__ = "tag"
|
||||
|
||||
id = Column(String, primary_key=True)
|
||||
name = Column(String)
|
||||
user_id = Column(String)
|
||||
data = Column(Text, nullable=True)
|
||||
|
||||
|
||||
class ChatIdTag(Base):
|
||||
__tablename__ = "chatidtag"
|
||||
|
||||
id = Column(String, primary_key=True)
|
||||
tag_name = Column(String)
|
||||
chat_id = Column(String)
|
||||
user_id = Column(String)
|
||||
timestamp = Column(BigInteger)
|
||||
|
||||
|
||||
class TagModel(BaseModel):
|
||||
id: str
|
||||
name: str
|
||||
user_id: str
|
||||
data: Optional[str] = None
|
||||
|
||||
model_config = ConfigDict(from_attributes=True)
|
||||
|
||||
|
||||
class ChatIdTagModel(BaseModel):
|
||||
id: str
|
||||
tag_name: str
|
||||
chat_id: str
|
||||
user_id: str
|
||||
timestamp: int
|
||||
|
||||
model_config = ConfigDict(from_attributes=True)
|
||||
|
||||
|
||||
####################
|
||||
# Forms
|
||||
####################
|
||||
|
||||
|
||||
class ChatIdTagForm(BaseModel):
|
||||
tag_name: str
|
||||
chat_id: str
|
||||
|
||||
|
||||
class TagChatIdsResponse(BaseModel):
|
||||
chat_ids: list[str]
|
||||
|
||||
|
||||
class ChatTagsResponse(BaseModel):
|
||||
tags: list[str]
|
||||
|
||||
|
||||
class TagTable:
|
||||
|
||||
def insert_new_tag(self, name: str, user_id: str) -> Optional[TagModel]:
|
||||
with get_db() as db:
|
||||
|
||||
id = str(uuid.uuid4())
|
||||
tag = TagModel(**{"id": id, "user_id": user_id, "name": name})
|
||||
try:
|
||||
result = Tag(**tag.model_dump())
|
||||
db.add(result)
|
||||
db.commit()
|
||||
db.refresh(result)
|
||||
if result:
|
||||
return TagModel.model_validate(result)
|
||||
else:
|
||||
return None
|
||||
except Exception as e:
|
||||
return None
|
||||
|
||||
def get_tag_by_name_and_user_id(
|
||||
self, name: str, user_id: str
|
||||
) -> Optional[TagModel]:
|
||||
try:
|
||||
with get_db() as db:
|
||||
tag = db.query(Tag).filter_by(name=name, user_id=user_id).first()
|
||||
return TagModel.model_validate(tag)
|
||||
except Exception as e:
|
||||
return None
|
||||
|
||||
def add_tag_to_chat(
|
||||
self, user_id: str, form_data: ChatIdTagForm
|
||||
) -> Optional[ChatIdTagModel]:
|
||||
tag = self.get_tag_by_name_and_user_id(form_data.tag_name, user_id)
|
||||
if tag is None:
|
||||
tag = self.insert_new_tag(form_data.tag_name, user_id)
|
||||
|
||||
id = str(uuid.uuid4())
|
||||
chatIdTag = ChatIdTagModel(
|
||||
**{
|
||||
"id": id,
|
||||
"user_id": user_id,
|
||||
"chat_id": form_data.chat_id,
|
||||
"tag_name": tag.name,
|
||||
"timestamp": int(time.time()),
|
||||
}
|
||||
)
|
||||
try:
|
||||
with get_db() as db:
|
||||
result = ChatIdTag(**chatIdTag.model_dump())
|
||||
db.add(result)
|
||||
db.commit()
|
||||
db.refresh(result)
|
||||
if result:
|
||||
return ChatIdTagModel.model_validate(result)
|
||||
else:
|
||||
return None
|
||||
except Exception:
|
||||
return None
|
||||
|
||||
def get_tags_by_user_id(self, user_id: str) -> list[TagModel]:
|
||||
with get_db() as db:
|
||||
tag_names = [
|
||||
chat_id_tag.tag_name
|
||||
for chat_id_tag in (
|
||||
db.query(ChatIdTag)
|
||||
.filter_by(user_id=user_id)
|
||||
.order_by(ChatIdTag.timestamp.desc())
|
||||
.all()
|
||||
)
|
||||
]
|
||||
|
||||
return [
|
||||
TagModel.model_validate(tag)
|
||||
for tag in (
|
||||
db.query(Tag)
|
||||
.filter_by(user_id=user_id)
|
||||
.filter(Tag.name.in_(tag_names))
|
||||
.all()
|
||||
)
|
||||
]
|
||||
|
||||
def get_tags_by_chat_id_and_user_id(
|
||||
self, chat_id: str, user_id: str
|
||||
) -> list[TagModel]:
|
||||
with get_db() as db:
|
||||
|
||||
tag_names = [
|
||||
chat_id_tag.tag_name
|
||||
for chat_id_tag in (
|
||||
db.query(ChatIdTag)
|
||||
.filter_by(user_id=user_id, chat_id=chat_id)
|
||||
.order_by(ChatIdTag.timestamp.desc())
|
||||
.all()
|
||||
)
|
||||
]
|
||||
|
||||
return [
|
||||
TagModel.model_validate(tag)
|
||||
for tag in (
|
||||
db.query(Tag)
|
||||
.filter_by(user_id=user_id)
|
||||
.filter(Tag.name.in_(tag_names))
|
||||
.all()
|
||||
)
|
||||
]
|
||||
|
||||
def get_chat_ids_by_tag_name_and_user_id(
|
||||
self, tag_name: str, user_id: str
|
||||
) -> list[ChatIdTagModel]:
|
||||
with get_db() as db:
|
||||
|
||||
return [
|
||||
ChatIdTagModel.model_validate(chat_id_tag)
|
||||
for chat_id_tag in (
|
||||
db.query(ChatIdTag)
|
||||
.filter_by(user_id=user_id, tag_name=tag_name)
|
||||
.order_by(ChatIdTag.timestamp.desc())
|
||||
.all()
|
||||
)
|
||||
]
|
||||
|
||||
def count_chat_ids_by_tag_name_and_user_id(
|
||||
self, tag_name: str, user_id: str
|
||||
) -> int:
|
||||
with get_db() as db:
|
||||
|
||||
return (
|
||||
db.query(ChatIdTag)
|
||||
.filter_by(tag_name=tag_name, user_id=user_id)
|
||||
.count()
|
||||
)
|
||||
|
||||
def delete_tag_by_tag_name_and_user_id(self, tag_name: str, user_id: str) -> bool:
|
||||
try:
|
||||
with get_db() as db:
|
||||
res = (
|
||||
db.query(ChatIdTag)
|
||||
.filter_by(tag_name=tag_name, user_id=user_id)
|
||||
.delete()
|
||||
)
|
||||
log.debug(f"res: {res}")
|
||||
db.commit()
|
||||
|
||||
tag_count = self.count_chat_ids_by_tag_name_and_user_id(
|
||||
tag_name, user_id
|
||||
)
|
||||
if tag_count == 0:
|
||||
# Remove tag item from Tag col as well
|
||||
db.query(Tag).filter_by(name=tag_name, user_id=user_id).delete()
|
||||
db.commit()
|
||||
return True
|
||||
except Exception as e:
|
||||
log.error(f"delete_tag: {e}")
|
||||
return False
|
||||
|
||||
def delete_tag_by_tag_name_and_chat_id_and_user_id(
|
||||
self, tag_name: str, chat_id: str, user_id: str
|
||||
) -> bool:
|
||||
try:
|
||||
with get_db() as db:
|
||||
|
||||
res = (
|
||||
db.query(ChatIdTag)
|
||||
.filter_by(tag_name=tag_name, chat_id=chat_id, user_id=user_id)
|
||||
.delete()
|
||||
)
|
||||
log.debug(f"res: {res}")
|
||||
db.commit()
|
||||
|
||||
tag_count = self.count_chat_ids_by_tag_name_and_user_id(
|
||||
tag_name, user_id
|
||||
)
|
||||
if tag_count == 0:
|
||||
# Remove tag item from Tag col as well
|
||||
db.query(Tag).filter_by(name=tag_name, user_id=user_id).delete()
|
||||
db.commit()
|
||||
|
||||
return True
|
||||
except Exception as e:
|
||||
log.error(f"delete_tag: {e}")
|
||||
return False
|
||||
|
||||
def delete_tags_by_chat_id_and_user_id(self, chat_id: str, user_id: str) -> bool:
|
||||
tags = self.get_tags_by_chat_id_and_user_id(chat_id, user_id)
|
||||
|
||||
for tag in tags:
|
||||
self.delete_tag_by_tag_name_and_chat_id_and_user_id(
|
||||
tag.tag_name, chat_id, user_id
|
||||
)
|
||||
|
||||
return True
|
||||
|
||||
|
||||
Tags = TagTable()
|
||||
@@ -1,241 +0,0 @@
|
||||
from fastapi import (
|
||||
Depends,
|
||||
FastAPI,
|
||||
HTTPException,
|
||||
status,
|
||||
Request,
|
||||
UploadFile,
|
||||
File,
|
||||
Form,
|
||||
)
|
||||
|
||||
|
||||
from datetime import datetime, timedelta
|
||||
from typing import Union, Optional
|
||||
from pathlib import Path
|
||||
|
||||
from fastapi import APIRouter
|
||||
from fastapi.responses import StreamingResponse, JSONResponse, FileResponse
|
||||
|
||||
from pydantic import BaseModel
|
||||
import json
|
||||
|
||||
from apps.webui.models.files import (
|
||||
Files,
|
||||
FileForm,
|
||||
FileModel,
|
||||
FileModelResponse,
|
||||
)
|
||||
from utils.utils import get_verified_user, get_admin_user
|
||||
from constants import ERROR_MESSAGES
|
||||
|
||||
from importlib import util
|
||||
import os
|
||||
import uuid
|
||||
import os, shutil, logging, re
|
||||
|
||||
|
||||
from config import SRC_LOG_LEVELS, UPLOAD_DIR
|
||||
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
log.setLevel(SRC_LOG_LEVELS["MODELS"])
|
||||
|
||||
|
||||
router = APIRouter()
|
||||
|
||||
############################
|
||||
# Upload File
|
||||
############################
|
||||
|
||||
|
||||
@router.post("/")
|
||||
def upload_file(file: UploadFile = File(...), user=Depends(get_verified_user)):
|
||||
log.info(f"file.content_type: {file.content_type}")
|
||||
try:
|
||||
unsanitized_filename = file.filename
|
||||
filename = os.path.basename(unsanitized_filename)
|
||||
|
||||
# replace filename with uuid
|
||||
id = str(uuid.uuid4())
|
||||
name = filename
|
||||
filename = f"{id}_{filename}"
|
||||
file_path = f"{UPLOAD_DIR}/{filename}"
|
||||
|
||||
contents = file.file.read()
|
||||
with open(file_path, "wb") as f:
|
||||
f.write(contents)
|
||||
f.close()
|
||||
|
||||
file = Files.insert_new_file(
|
||||
user.id,
|
||||
FileForm(
|
||||
**{
|
||||
"id": id,
|
||||
"filename": filename,
|
||||
"meta": {
|
||||
"name": name,
|
||||
"content_type": file.content_type,
|
||||
"size": len(contents),
|
||||
"path": file_path,
|
||||
},
|
||||
}
|
||||
),
|
||||
)
|
||||
|
||||
if file:
|
||||
return file
|
||||
else:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_400_BAD_REQUEST,
|
||||
detail=ERROR_MESSAGES.DEFAULT("Error uploading file"),
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
log.exception(e)
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_400_BAD_REQUEST,
|
||||
detail=ERROR_MESSAGES.DEFAULT(e),
|
||||
)
|
||||
|
||||
|
||||
############################
|
||||
# List Files
|
||||
############################
|
||||
|
||||
|
||||
@router.get("/", response_model=list[FileModel])
|
||||
async def list_files(user=Depends(get_verified_user)):
|
||||
files = Files.get_files()
|
||||
return files
|
||||
|
||||
|
||||
############################
|
||||
# Delete All Files
|
||||
############################
|
||||
|
||||
|
||||
@router.delete("/all")
|
||||
async def delete_all_files(user=Depends(get_admin_user)):
|
||||
result = Files.delete_all_files()
|
||||
|
||||
if result:
|
||||
folder = f"{UPLOAD_DIR}"
|
||||
try:
|
||||
# Check if the directory exists
|
||||
if os.path.exists(folder):
|
||||
# Iterate over all the files and directories in the specified directory
|
||||
for filename in os.listdir(folder):
|
||||
file_path = os.path.join(folder, filename)
|
||||
try:
|
||||
if os.path.isfile(file_path) or os.path.islink(file_path):
|
||||
os.unlink(file_path) # Remove the file or link
|
||||
elif os.path.isdir(file_path):
|
||||
shutil.rmtree(file_path) # Remove the directory
|
||||
except Exception as e:
|
||||
print(f"Failed to delete {file_path}. Reason: {e}")
|
||||
else:
|
||||
print(f"The directory {folder} does not exist")
|
||||
except Exception as e:
|
||||
print(f"Failed to process the directory {folder}. Reason: {e}")
|
||||
|
||||
return {"message": "All files deleted successfully"}
|
||||
else:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_400_BAD_REQUEST,
|
||||
detail=ERROR_MESSAGES.DEFAULT("Error deleting files"),
|
||||
)
|
||||
|
||||
|
||||
############################
|
||||
# Get File By Id
|
||||
############################
|
||||
|
||||
|
||||
@router.get("/{id}", response_model=Optional[FileModel])
|
||||
async def get_file_by_id(id: str, user=Depends(get_verified_user)):
|
||||
file = Files.get_file_by_id(id)
|
||||
|
||||
if file:
|
||||
return file
|
||||
else:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_404_NOT_FOUND,
|
||||
detail=ERROR_MESSAGES.NOT_FOUND,
|
||||
)
|
||||
|
||||
|
||||
############################
|
||||
# Get File Content By Id
|
||||
############################
|
||||
|
||||
|
||||
@router.get("/{id}/content", response_model=Optional[FileModel])
|
||||
async def get_file_content_by_id(id: str, user=Depends(get_verified_user)):
|
||||
file = Files.get_file_by_id(id)
|
||||
|
||||
if file:
|
||||
file_path = Path(file.meta["path"])
|
||||
|
||||
# Check if the file already exists in the cache
|
||||
if file_path.is_file():
|
||||
print(f"file_path: {file_path}")
|
||||
return FileResponse(file_path)
|
||||
else:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_404_NOT_FOUND,
|
||||
detail=ERROR_MESSAGES.NOT_FOUND,
|
||||
)
|
||||
else:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_404_NOT_FOUND,
|
||||
detail=ERROR_MESSAGES.NOT_FOUND,
|
||||
)
|
||||
|
||||
|
||||
@router.get("/{id}/content/{file_name}", response_model=Optional[FileModel])
|
||||
async def get_file_content_by_id(id: str, user=Depends(get_verified_user)):
|
||||
file = Files.get_file_by_id(id)
|
||||
|
||||
if file:
|
||||
file_path = Path(file.meta["path"])
|
||||
|
||||
# Check if the file already exists in the cache
|
||||
if file_path.is_file():
|
||||
print(f"file_path: {file_path}")
|
||||
return FileResponse(file_path)
|
||||
else:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_404_NOT_FOUND,
|
||||
detail=ERROR_MESSAGES.NOT_FOUND,
|
||||
)
|
||||
else:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_404_NOT_FOUND,
|
||||
detail=ERROR_MESSAGES.NOT_FOUND,
|
||||
)
|
||||
|
||||
|
||||
############################
|
||||
# Delete File By Id
|
||||
############################
|
||||
|
||||
|
||||
@router.delete("/{id}")
|
||||
async def delete_file_by_id(id: str, user=Depends(get_verified_user)):
|
||||
file = Files.get_file_by_id(id)
|
||||
|
||||
if file:
|
||||
result = Files.delete_file_by_id(id)
|
||||
if result:
|
||||
return {"message": "File deleted successfully"}
|
||||
else:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_400_BAD_REQUEST,
|
||||
detail=ERROR_MESSAGES.DEFAULT("Error deleting file"),
|
||||
)
|
||||
else:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_404_NOT_FOUND,
|
||||
detail=ERROR_MESSAGES.NOT_FOUND,
|
||||
)
|
||||
@@ -1,148 +0,0 @@
|
||||
from pathlib import Path
|
||||
import site
|
||||
|
||||
from fastapi import APIRouter, UploadFile, File, Response
|
||||
from fastapi import Depends, HTTPException, status
|
||||
from starlette.responses import StreamingResponse, FileResponse
|
||||
from pydantic import BaseModel
|
||||
|
||||
|
||||
from fpdf import FPDF
|
||||
import markdown
|
||||
import black
|
||||
|
||||
|
||||
from utils.utils import get_admin_user
|
||||
from utils.misc import calculate_sha256, get_gravatar_url
|
||||
|
||||
from config import OLLAMA_BASE_URLS, DATA_DIR, UPLOAD_DIR, ENABLE_ADMIN_EXPORT
|
||||
from constants import ERROR_MESSAGES
|
||||
|
||||
|
||||
router = APIRouter()
|
||||
|
||||
|
||||
@router.get("/gravatar")
|
||||
async def get_gravatar(
|
||||
email: str,
|
||||
):
|
||||
return get_gravatar_url(email)
|
||||
|
||||
|
||||
class CodeFormatRequest(BaseModel):
|
||||
code: str
|
||||
|
||||
|
||||
@router.post("/code/format")
|
||||
async def format_code(request: CodeFormatRequest):
|
||||
try:
|
||||
formatted_code = black.format_str(request.code, mode=black.Mode())
|
||||
return {"code": formatted_code}
|
||||
except black.NothingChanged:
|
||||
return {"code": request.code}
|
||||
except Exception as e:
|
||||
raise HTTPException(status_code=400, detail=str(e))
|
||||
|
||||
|
||||
class MarkdownForm(BaseModel):
|
||||
md: str
|
||||
|
||||
|
||||
@router.post("/markdown")
|
||||
async def get_html_from_markdown(
|
||||
form_data: MarkdownForm,
|
||||
):
|
||||
return {"html": markdown.markdown(form_data.md)}
|
||||
|
||||
|
||||
class ChatForm(BaseModel):
|
||||
title: str
|
||||
messages: list[dict]
|
||||
|
||||
|
||||
@router.post("/pdf")
|
||||
async def download_chat_as_pdf(
|
||||
form_data: ChatForm,
|
||||
):
|
||||
pdf = FPDF()
|
||||
pdf.add_page()
|
||||
|
||||
# When running in docker, workdir is /app/backend, so fonts is in /app/backend/static/fonts
|
||||
FONTS_DIR = Path("./static/fonts")
|
||||
|
||||
# Non Docker Installation
|
||||
|
||||
# When running using `pip install` the static directory is in the site packages.
|
||||
if not FONTS_DIR.exists():
|
||||
FONTS_DIR = Path(site.getsitepackages()[0]) / "static/fonts"
|
||||
# When running using `pip install -e .` the static directory is in the site packages.
|
||||
# This path only works if `open-webui serve` is run from the root of this project.
|
||||
if not FONTS_DIR.exists():
|
||||
FONTS_DIR = Path("./backend/static/fonts")
|
||||
|
||||
pdf.add_font("NotoSans", "", f"{FONTS_DIR}/NotoSans-Regular.ttf")
|
||||
pdf.add_font("NotoSans", "b", f"{FONTS_DIR}/NotoSans-Bold.ttf")
|
||||
pdf.add_font("NotoSans", "i", f"{FONTS_DIR}/NotoSans-Italic.ttf")
|
||||
pdf.add_font("NotoSansKR", "", f"{FONTS_DIR}/NotoSansKR-Regular.ttf")
|
||||
pdf.add_font("NotoSansJP", "", f"{FONTS_DIR}/NotoSansJP-Regular.ttf")
|
||||
|
||||
pdf.set_font("NotoSans", size=12)
|
||||
pdf.set_fallback_fonts(["NotoSansKR", "NotoSansJP"])
|
||||
|
||||
pdf.set_auto_page_break(auto=True, margin=15)
|
||||
|
||||
# Adjust the effective page width for multi_cell
|
||||
effective_page_width = (
|
||||
pdf.w - 2 * pdf.l_margin - 10
|
||||
) # Subtracted an additional 10 for extra padding
|
||||
|
||||
# Add chat messages
|
||||
for message in form_data.messages:
|
||||
role = message["role"]
|
||||
content = message["content"]
|
||||
pdf.set_font("NotoSans", "B", size=14) # Bold for the role
|
||||
pdf.multi_cell(effective_page_width, 10, f"{role.upper()}", 0, "L")
|
||||
pdf.ln(1) # Extra space between messages
|
||||
|
||||
pdf.set_font("NotoSans", size=10) # Regular for content
|
||||
pdf.multi_cell(effective_page_width, 6, content, 0, "L")
|
||||
pdf.ln(1.5) # Extra space between messages
|
||||
|
||||
# Save the pdf with name .pdf
|
||||
pdf_bytes = pdf.output()
|
||||
|
||||
return Response(
|
||||
content=bytes(pdf_bytes),
|
||||
media_type="application/pdf",
|
||||
headers={"Content-Disposition": f"attachment;filename=chat.pdf"},
|
||||
)
|
||||
|
||||
|
||||
@router.get("/db/download")
|
||||
async def download_db(user=Depends(get_admin_user)):
|
||||
if not ENABLE_ADMIN_EXPORT:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_401_UNAUTHORIZED,
|
||||
detail=ERROR_MESSAGES.ACCESS_PROHIBITED,
|
||||
)
|
||||
from apps.webui.internal.db import engine
|
||||
|
||||
if engine.name != "sqlite":
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_400_BAD_REQUEST,
|
||||
detail=ERROR_MESSAGES.DB_NOT_SQLITE,
|
||||
)
|
||||
return FileResponse(
|
||||
engine.url.database,
|
||||
media_type="application/octet-stream",
|
||||
filename="webui.db",
|
||||
)
|
||||
|
||||
|
||||
@router.get("/litellm/config")
|
||||
async def download_litellm_config_yaml(user=Depends(get_admin_user)):
|
||||
return FileResponse(
|
||||
f"{DATA_DIR}/litellm/config.yaml",
|
||||
media_type="application/octet-stream",
|
||||
filename="config.yaml",
|
||||
)
|
||||
@@ -1,104 +0,0 @@
|
||||
from importlib import util
|
||||
import os
|
||||
import re
|
||||
import sys
|
||||
import subprocess
|
||||
|
||||
from config import TOOLS_DIR, FUNCTIONS_DIR
|
||||
|
||||
|
||||
def extract_frontmatter(file_path):
|
||||
"""
|
||||
Extract frontmatter as a dictionary from the specified file path.
|
||||
"""
|
||||
frontmatter = {}
|
||||
frontmatter_started = False
|
||||
frontmatter_ended = False
|
||||
frontmatter_pattern = re.compile(r"^\s*([a-z_]+):\s*(.*)\s*$", re.IGNORECASE)
|
||||
|
||||
try:
|
||||
with open(file_path, "r", encoding="utf-8") as file:
|
||||
first_line = file.readline()
|
||||
if first_line.strip() != '"""':
|
||||
# The file doesn't start with triple quotes
|
||||
return {}
|
||||
|
||||
frontmatter_started = True
|
||||
|
||||
for line in file:
|
||||
if '"""' in line:
|
||||
if frontmatter_started:
|
||||
frontmatter_ended = True
|
||||
break
|
||||
|
||||
if frontmatter_started and not frontmatter_ended:
|
||||
match = frontmatter_pattern.match(line)
|
||||
if match:
|
||||
key, value = match.groups()
|
||||
frontmatter[key.strip()] = value.strip()
|
||||
|
||||
except FileNotFoundError:
|
||||
print(f"Error: The file {file_path} does not exist.")
|
||||
return {}
|
||||
except Exception as e:
|
||||
print(f"An error occurred: {e}")
|
||||
return {}
|
||||
|
||||
return frontmatter
|
||||
|
||||
|
||||
def load_toolkit_module_by_id(toolkit_id):
|
||||
toolkit_path = os.path.join(TOOLS_DIR, f"{toolkit_id}.py")
|
||||
spec = util.spec_from_file_location(toolkit_id, toolkit_path)
|
||||
module = util.module_from_spec(spec)
|
||||
frontmatter = extract_frontmatter(toolkit_path)
|
||||
|
||||
try:
|
||||
install_frontmatter_requirements(frontmatter.get("requirements", ""))
|
||||
spec.loader.exec_module(module)
|
||||
print(f"Loaded module: {module.__name__}")
|
||||
if hasattr(module, "Tools"):
|
||||
return module.Tools(), frontmatter
|
||||
else:
|
||||
raise Exception("No Tools class found")
|
||||
except Exception as e:
|
||||
print(f"Error loading module: {toolkit_id}")
|
||||
# Move the file to the error folder
|
||||
os.rename(toolkit_path, f"{toolkit_path}.error")
|
||||
raise e
|
||||
|
||||
|
||||
def load_function_module_by_id(function_id):
|
||||
function_path = os.path.join(FUNCTIONS_DIR, f"{function_id}.py")
|
||||
|
||||
spec = util.spec_from_file_location(function_id, function_path)
|
||||
module = util.module_from_spec(spec)
|
||||
frontmatter = extract_frontmatter(function_path)
|
||||
|
||||
try:
|
||||
install_frontmatter_requirements(frontmatter.get("requirements", ""))
|
||||
spec.loader.exec_module(module)
|
||||
print(f"Loaded module: {module.__name__}")
|
||||
if hasattr(module, "Pipe"):
|
||||
return module.Pipe(), "pipe", frontmatter
|
||||
elif hasattr(module, "Filter"):
|
||||
return module.Filter(), "filter", frontmatter
|
||||
elif hasattr(module, "Action"):
|
||||
return module.Action(), "action", frontmatter
|
||||
else:
|
||||
raise Exception("No Function class found")
|
||||
except Exception as e:
|
||||
print(f"Error loading module: {function_id}")
|
||||
# Move the file to the error folder
|
||||
os.rename(function_path, f"{function_path}.error")
|
||||
raise e
|
||||
|
||||
|
||||
def install_frontmatter_requirements(requirements):
|
||||
if requirements:
|
||||
req_list = [req.strip() for req in requirements.split(",")]
|
||||
for req in req_list:
|
||||
print(f"Installing requirement: {req}")
|
||||
subprocess.check_call([sys.executable, "-m", "pip", "install", req])
|
||||
else:
|
||||
print("No requirements found in frontmatter.")
|
||||
@@ -1,36 +0,0 @@
|
||||
{
|
||||
"version": 0,
|
||||
"ui": {
|
||||
"default_locale": "",
|
||||
"prompt_suggestions": [
|
||||
{
|
||||
"title": ["Help me study", "vocabulary for a college entrance exam"],
|
||||
"content": "Help me study vocabulary: write a sentence for me to fill in the blank, and I'll try to pick the correct option."
|
||||
},
|
||||
{
|
||||
"title": ["Give me ideas", "for what to do with my kids' art"],
|
||||
"content": "What are 5 creative things I could do with my kids' art? I don't want to throw them away, but it's also so much clutter."
|
||||
},
|
||||
{
|
||||
"title": ["Tell me a fun fact", "about the Roman Empire"],
|
||||
"content": "Tell me a random fun fact about the Roman Empire"
|
||||
},
|
||||
{
|
||||
"title": ["Show me a code snippet", "of a website's sticky header"],
|
||||
"content": "Show me a code snippet of a website's sticky header in CSS and JavaScript."
|
||||
},
|
||||
{
|
||||
"title": ["Explain options trading", "if I'm familiar with buying and selling stocks"],
|
||||
"content": "Explain options trading in simple terms if I'm familiar with buying and selling stocks."
|
||||
},
|
||||
{
|
||||
"title": ["Overcome procrastination", "give me tips"],
|
||||
"content": "Could you start by asking me about instances when I procrastinate the most and then give me some suggestions to overcome it?"
|
||||
},
|
||||
{
|
||||
"title": ["Grammar check", "rewrite it for better readability "],
|
||||
"content": "Check the following sentence for grammar and clarity: \"[sentence]\". Rewrite it for better readability while maintaining its original meaning."
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
@@ -1,4 +0,0 @@
|
||||
general_settings: {}
|
||||
litellm_settings: {}
|
||||
model_list: []
|
||||
router_settings: {}
|
||||
@@ -1 +1 @@
|
||||
dir for backend files (db, documents, etc.)
|
||||
docker dir for backend files (db, documents, etc.)
|
||||
@@ -1,2 +1,2 @@
|
||||
PORT="${PORT:-8080}"
|
||||
uvicorn main:app --port $PORT --host 0.0.0.0 --forwarded-allow-ips '*' --reload
|
||||
uvicorn open_webui.main:app --port $PORT --host 0.0.0.0 --forwarded-allow-ips '*' --reload
|
||||
@@ -9,8 +9,6 @@ import uvicorn
|
||||
app = typer.Typer()
|
||||
|
||||
KEY_FILE = Path.cwd() / ".webui_secret_key"
|
||||
if (frontend_build_dir := Path(__file__).parent / "frontend").exists():
|
||||
os.environ["FRONTEND_BUILD_DIR"] = str(frontend_build_dir)
|
||||
|
||||
|
||||
@app.command()
|
||||
@@ -18,6 +16,7 @@ def serve(
|
||||
host: str = "0.0.0.0",
|
||||
port: int = 8080,
|
||||
):
|
||||
os.environ["FROM_INIT_PY"] = "true"
|
||||
if os.getenv("WEBUI_SECRET_KEY") is None:
|
||||
typer.echo(
|
||||
"Loading WEBUI_SECRET_KEY from file, not provided as an environment variable."
|
||||
@@ -40,9 +39,23 @@ def serve(
|
||||
"/usr/local/lib/python3.11/site-packages/nvidia/cudnn/lib",
|
||||
]
|
||||
)
|
||||
import main # we need set environment variables before importing main
|
||||
try:
|
||||
import torch
|
||||
|
||||
uvicorn.run(main.app, host=host, port=port, forwarded_allow_ips="*")
|
||||
assert torch.cuda.is_available(), "CUDA not available"
|
||||
typer.echo("CUDA seems to be working")
|
||||
except Exception as e:
|
||||
typer.echo(
|
||||
"Error when testing CUDA but USE_CUDA_DOCKER is true. "
|
||||
"Resetting USE_CUDA_DOCKER to false and removing "
|
||||
f"LD_LIBRARY_PATH modifications: {e}"
|
||||
)
|
||||
os.environ["USE_CUDA_DOCKER"] = "false"
|
||||
os.environ["LD_LIBRARY_PATH"] = ":".join(LD_LIBRARY_PATH)
|
||||
|
||||
import open_webui.main # we need set environment variables before importing main
|
||||
|
||||
uvicorn.run(open_webui.main.app, host=host, port=port, forwarded_allow_ips="*")
|
||||
|
||||
|
||||
@app.command()
|
||||
@@ -52,7 +65,11 @@ def dev(
|
||||
reload: bool = True,
|
||||
):
|
||||
uvicorn.run(
|
||||
"main:app", host=host, port=port, reload=reload, forwarded_allow_ips="*"
|
||||
"open_webui.main:app",
|
||||
host=host,
|
||||
port=port,
|
||||
reload=reload,
|
||||
forwarded_allow_ips="*",
|
||||
)
|
||||
|
||||
|
||||
|
||||
@@ -1,65 +1,56 @@
|
||||
import os
|
||||
import logging
|
||||
from fastapi import (
|
||||
FastAPI,
|
||||
Request,
|
||||
Depends,
|
||||
HTTPException,
|
||||
status,
|
||||
UploadFile,
|
||||
File,
|
||||
Form,
|
||||
)
|
||||
from fastapi.responses import StreamingResponse, JSONResponse, FileResponse
|
||||
|
||||
from fastapi.middleware.cors import CORSMiddleware
|
||||
from pydantic import BaseModel
|
||||
|
||||
|
||||
import uuid
|
||||
import requests
|
||||
import hashlib
|
||||
from pathlib import Path
|
||||
import json
|
||||
import logging
|
||||
import os
|
||||
import uuid
|
||||
from functools import lru_cache
|
||||
from pathlib import Path
|
||||
from pydub import AudioSegment
|
||||
from pydub.silence import split_on_silence
|
||||
|
||||
from constants import ERROR_MESSAGES
|
||||
from utils.utils import (
|
||||
decode_token,
|
||||
get_current_user,
|
||||
get_verified_user,
|
||||
get_admin_user,
|
||||
)
|
||||
from utils.misc import calculate_sha256
|
||||
|
||||
|
||||
from config import (
|
||||
SRC_LOG_LEVELS,
|
||||
CACHE_DIR,
|
||||
UPLOAD_DIR,
|
||||
WHISPER_MODEL,
|
||||
WHISPER_MODEL_DIR,
|
||||
WHISPER_MODEL_AUTO_UPDATE,
|
||||
DEVICE_TYPE,
|
||||
AUDIO_STT_OPENAI_API_BASE_URL,
|
||||
AUDIO_STT_OPENAI_API_KEY,
|
||||
AUDIO_TTS_OPENAI_API_BASE_URL,
|
||||
AUDIO_TTS_OPENAI_API_KEY,
|
||||
AUDIO_TTS_API_KEY,
|
||||
import requests
|
||||
from open_webui.config import (
|
||||
AUDIO_STT_ENGINE,
|
||||
AUDIO_STT_MODEL,
|
||||
AUDIO_STT_OPENAI_API_BASE_URL,
|
||||
AUDIO_STT_OPENAI_API_KEY,
|
||||
AUDIO_TTS_API_KEY,
|
||||
AUDIO_TTS_ENGINE,
|
||||
AUDIO_TTS_MODEL,
|
||||
AUDIO_TTS_OPENAI_API_BASE_URL,
|
||||
AUDIO_TTS_OPENAI_API_KEY,
|
||||
AUDIO_TTS_SPLIT_ON,
|
||||
AUDIO_TTS_VOICE,
|
||||
AUDIO_TTS_AZURE_SPEECH_REGION,
|
||||
AUDIO_TTS_AZURE_SPEECH_OUTPUT_FORMAT,
|
||||
CACHE_DIR,
|
||||
CORS_ALLOW_ORIGIN,
|
||||
WHISPER_MODEL,
|
||||
WHISPER_MODEL_AUTO_UPDATE,
|
||||
WHISPER_MODEL_DIR,
|
||||
AppConfig,
|
||||
)
|
||||
|
||||
from open_webui.constants import ERROR_MESSAGES
|
||||
from open_webui.env import SRC_LOG_LEVELS, DEVICE_TYPE
|
||||
from fastapi import Depends, FastAPI, File, HTTPException, Request, UploadFile, status
|
||||
from fastapi.middleware.cors import CORSMiddleware
|
||||
from fastapi.responses import FileResponse
|
||||
from pydantic import BaseModel
|
||||
from open_webui.utils.utils import get_admin_user, get_verified_user
|
||||
|
||||
# Constants
|
||||
MAX_FILE_SIZE_MB = 25
|
||||
MAX_FILE_SIZE = MAX_FILE_SIZE_MB * 1024 * 1024 # Convert MB to bytes
|
||||
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
log.setLevel(SRC_LOG_LEVELS["AUDIO"])
|
||||
|
||||
app = FastAPI()
|
||||
app.add_middleware(
|
||||
CORSMiddleware,
|
||||
allow_origins=["*"],
|
||||
allow_origins=CORS_ALLOW_ORIGIN,
|
||||
allow_credentials=True,
|
||||
allow_methods=["*"],
|
||||
allow_headers=["*"],
|
||||
@@ -72,12 +63,19 @@ app.state.config.STT_OPENAI_API_KEY = AUDIO_STT_OPENAI_API_KEY
|
||||
app.state.config.STT_ENGINE = AUDIO_STT_ENGINE
|
||||
app.state.config.STT_MODEL = AUDIO_STT_MODEL
|
||||
|
||||
app.state.config.WHISPER_MODEL = WHISPER_MODEL
|
||||
app.state.faster_whisper_model = None
|
||||
|
||||
app.state.config.TTS_OPENAI_API_BASE_URL = AUDIO_TTS_OPENAI_API_BASE_URL
|
||||
app.state.config.TTS_OPENAI_API_KEY = AUDIO_TTS_OPENAI_API_KEY
|
||||
app.state.config.TTS_ENGINE = AUDIO_TTS_ENGINE
|
||||
app.state.config.TTS_MODEL = AUDIO_TTS_MODEL
|
||||
app.state.config.TTS_VOICE = AUDIO_TTS_VOICE
|
||||
app.state.config.TTS_API_KEY = AUDIO_TTS_API_KEY
|
||||
app.state.config.TTS_SPLIT_ON = AUDIO_TTS_SPLIT_ON
|
||||
|
||||
app.state.config.TTS_AZURE_SPEECH_REGION = AUDIO_TTS_AZURE_SPEECH_REGION
|
||||
app.state.config.TTS_AZURE_SPEECH_OUTPUT_FORMAT = AUDIO_TTS_AZURE_SPEECH_OUTPUT_FORMAT
|
||||
|
||||
# setting device type for whisper model
|
||||
whisper_device_type = DEVICE_TYPE if DEVICE_TYPE and DEVICE_TYPE == "cuda" else "cpu"
|
||||
@@ -87,6 +85,31 @@ SPEECH_CACHE_DIR = Path(CACHE_DIR).joinpath("./audio/speech/")
|
||||
SPEECH_CACHE_DIR.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
|
||||
def set_faster_whisper_model(model: str, auto_update: bool = False):
|
||||
if model and app.state.config.STT_ENGINE == "":
|
||||
from faster_whisper import WhisperModel
|
||||
|
||||
faster_whisper_kwargs = {
|
||||
"model_size_or_path": model,
|
||||
"device": whisper_device_type,
|
||||
"compute_type": "int8",
|
||||
"download_root": WHISPER_MODEL_DIR,
|
||||
"local_files_only": not auto_update,
|
||||
}
|
||||
|
||||
try:
|
||||
app.state.faster_whisper_model = WhisperModel(**faster_whisper_kwargs)
|
||||
except Exception:
|
||||
log.warning(
|
||||
"WhisperModel initialization failed, attempting download with local_files_only=False"
|
||||
)
|
||||
faster_whisper_kwargs["local_files_only"] = False
|
||||
app.state.faster_whisper_model = WhisperModel(**faster_whisper_kwargs)
|
||||
|
||||
else:
|
||||
app.state.faster_whisper_model = None
|
||||
|
||||
|
||||
class TTSConfigForm(BaseModel):
|
||||
OPENAI_API_BASE_URL: str
|
||||
OPENAI_API_KEY: str
|
||||
@@ -94,6 +117,9 @@ class TTSConfigForm(BaseModel):
|
||||
ENGINE: str
|
||||
MODEL: str
|
||||
VOICE: str
|
||||
SPLIT_ON: str
|
||||
AZURE_SPEECH_REGION: str
|
||||
AZURE_SPEECH_OUTPUT_FORMAT: str
|
||||
|
||||
|
||||
class STTConfigForm(BaseModel):
|
||||
@@ -101,6 +127,7 @@ class STTConfigForm(BaseModel):
|
||||
OPENAI_API_KEY: str
|
||||
ENGINE: str
|
||||
MODEL: str
|
||||
WHISPER_MODEL: str
|
||||
|
||||
|
||||
class AudioConfigUpdateForm(BaseModel):
|
||||
@@ -145,12 +172,16 @@ async def get_audio_config(user=Depends(get_admin_user)):
|
||||
"ENGINE": app.state.config.TTS_ENGINE,
|
||||
"MODEL": app.state.config.TTS_MODEL,
|
||||
"VOICE": app.state.config.TTS_VOICE,
|
||||
"SPLIT_ON": app.state.config.TTS_SPLIT_ON,
|
||||
"AZURE_SPEECH_REGION": app.state.config.TTS_AZURE_SPEECH_REGION,
|
||||
"AZURE_SPEECH_OUTPUT_FORMAT": app.state.config.TTS_AZURE_SPEECH_OUTPUT_FORMAT,
|
||||
},
|
||||
"stt": {
|
||||
"OPENAI_API_BASE_URL": app.state.config.STT_OPENAI_API_BASE_URL,
|
||||
"OPENAI_API_KEY": app.state.config.STT_OPENAI_API_KEY,
|
||||
"ENGINE": app.state.config.STT_ENGINE,
|
||||
"MODEL": app.state.config.STT_MODEL,
|
||||
"WHISPER_MODEL": app.state.config.WHISPER_MODEL,
|
||||
},
|
||||
}
|
||||
|
||||
@@ -165,11 +196,18 @@ async def update_audio_config(
|
||||
app.state.config.TTS_ENGINE = form_data.tts.ENGINE
|
||||
app.state.config.TTS_MODEL = form_data.tts.MODEL
|
||||
app.state.config.TTS_VOICE = form_data.tts.VOICE
|
||||
app.state.config.TTS_SPLIT_ON = form_data.tts.SPLIT_ON
|
||||
app.state.config.TTS_AZURE_SPEECH_REGION = form_data.tts.AZURE_SPEECH_REGION
|
||||
app.state.config.TTS_AZURE_SPEECH_OUTPUT_FORMAT = (
|
||||
form_data.tts.AZURE_SPEECH_OUTPUT_FORMAT
|
||||
)
|
||||
|
||||
app.state.config.STT_OPENAI_API_BASE_URL = form_data.stt.OPENAI_API_BASE_URL
|
||||
app.state.config.STT_OPENAI_API_KEY = form_data.stt.OPENAI_API_KEY
|
||||
app.state.config.STT_ENGINE = form_data.stt.ENGINE
|
||||
app.state.config.STT_MODEL = form_data.stt.MODEL
|
||||
app.state.config.WHISPER_MODEL = form_data.stt.WHISPER_MODEL
|
||||
set_faster_whisper_model(form_data.stt.WHISPER_MODEL, WHISPER_MODEL_AUTO_UPDATE)
|
||||
|
||||
return {
|
||||
"tts": {
|
||||
@@ -179,12 +217,16 @@ async def update_audio_config(
|
||||
"ENGINE": app.state.config.TTS_ENGINE,
|
||||
"MODEL": app.state.config.TTS_MODEL,
|
||||
"VOICE": app.state.config.TTS_VOICE,
|
||||
"SPLIT_ON": app.state.config.TTS_SPLIT_ON,
|
||||
"AZURE_SPEECH_REGION": app.state.config.TTS_AZURE_SPEECH_REGION,
|
||||
"AZURE_SPEECH_OUTPUT_FORMAT": app.state.config.TTS_AZURE_SPEECH_OUTPUT_FORMAT,
|
||||
},
|
||||
"stt": {
|
||||
"OPENAI_API_BASE_URL": app.state.config.STT_OPENAI_API_BASE_URL,
|
||||
"OPENAI_API_KEY": app.state.config.STT_OPENAI_API_KEY,
|
||||
"ENGINE": app.state.config.STT_ENGINE,
|
||||
"MODEL": app.state.config.STT_MODEL,
|
||||
"WHISPER_MODEL": app.state.config.WHISPER_MODEL,
|
||||
},
|
||||
}
|
||||
|
||||
@@ -211,7 +253,7 @@ async def speech(request: Request, user=Depends(get_verified_user)):
|
||||
body = json.loads(body)
|
||||
body["model"] = app.state.config.TTS_MODEL
|
||||
body = json.dumps(body).encode("utf-8")
|
||||
except Exception as e:
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
r = None
|
||||
@@ -261,6 +303,13 @@ async def speech(request: Request, user=Depends(get_verified_user)):
|
||||
raise HTTPException(status_code=400, detail="Invalid JSON payload")
|
||||
|
||||
voice_id = payload.get("voice", "")
|
||||
|
||||
if voice_id not in get_available_voices():
|
||||
raise HTTPException(
|
||||
status_code=400,
|
||||
detail="Invalid voice id",
|
||||
)
|
||||
|
||||
url = f"https://api.elevenlabs.io/v1/text-to-speech/{voice_id}"
|
||||
|
||||
headers = {
|
||||
@@ -307,68 +356,96 @@ async def speech(request: Request, user=Depends(get_verified_user)):
|
||||
detail=error_detail,
|
||||
)
|
||||
|
||||
elif app.state.config.TTS_ENGINE == "azure":
|
||||
payload = None
|
||||
try:
|
||||
payload = json.loads(body.decode("utf-8"))
|
||||
except Exception as e:
|
||||
log.exception(e)
|
||||
raise HTTPException(status_code=400, detail="Invalid JSON payload")
|
||||
|
||||
@app.post("/transcriptions")
|
||||
def transcribe(
|
||||
file: UploadFile = File(...),
|
||||
user=Depends(get_current_user),
|
||||
):
|
||||
log.info(f"file.content_type: {file.content_type}")
|
||||
region = app.state.config.TTS_AZURE_SPEECH_REGION
|
||||
language = app.state.config.TTS_VOICE
|
||||
locale = "-".join(app.state.config.TTS_VOICE.split("-")[:1])
|
||||
output_format = app.state.config.TTS_AZURE_SPEECH_OUTPUT_FORMAT
|
||||
url = f"https://{region}.tts.speech.microsoft.com/cognitiveservices/v1"
|
||||
|
||||
if file.content_type not in ["audio/mpeg", "audio/wav"]:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_400_BAD_REQUEST,
|
||||
detail=ERROR_MESSAGES.FILE_NOT_SUPPORTED,
|
||||
)
|
||||
headers = {
|
||||
"Ocp-Apim-Subscription-Key": app.state.config.TTS_API_KEY,
|
||||
"Content-Type": "application/ssml+xml",
|
||||
"X-Microsoft-OutputFormat": output_format,
|
||||
}
|
||||
|
||||
try:
|
||||
ext = file.filename.split(".")[-1]
|
||||
data = f"""<speak version="1.0" xmlns="http://www.w3.org/2001/10/synthesis" xml:lang="{locale}">
|
||||
<voice name="{language}">{payload["input"]}</voice>
|
||||
</speak>"""
|
||||
|
||||
id = uuid.uuid4()
|
||||
filename = f"{id}.{ext}"
|
||||
response = requests.post(url, headers=headers, data=data)
|
||||
|
||||
file_dir = f"{CACHE_DIR}/audio/transcriptions"
|
||||
os.makedirs(file_dir, exist_ok=True)
|
||||
file_path = f"{file_dir}/{filename}"
|
||||
|
||||
print(filename)
|
||||
|
||||
contents = file.file.read()
|
||||
with open(file_path, "wb") as f:
|
||||
f.write(contents)
|
||||
f.close()
|
||||
|
||||
if app.state.config.STT_ENGINE == "":
|
||||
from faster_whisper import WhisperModel
|
||||
|
||||
whisper_kwargs = {
|
||||
"model_size_or_path": WHISPER_MODEL,
|
||||
"device": whisper_device_type,
|
||||
"compute_type": "int8",
|
||||
"download_root": WHISPER_MODEL_DIR,
|
||||
"local_files_only": not WHISPER_MODEL_AUTO_UPDATE,
|
||||
}
|
||||
|
||||
log.debug(f"whisper_kwargs: {whisper_kwargs}")
|
||||
|
||||
try:
|
||||
model = WhisperModel(**whisper_kwargs)
|
||||
except Exception:
|
||||
log.warning(
|
||||
"WhisperModel initialization failed, attempting download with local_files_only=False"
|
||||
)
|
||||
whisper_kwargs["local_files_only"] = False
|
||||
model = WhisperModel(**whisper_kwargs)
|
||||
|
||||
segments, info = model.transcribe(file_path, beam_size=5)
|
||||
log.info(
|
||||
"Detected language '%s' with probability %f"
|
||||
% (info.language, info.language_probability)
|
||||
if response.status_code == 200:
|
||||
with open(file_path, "wb") as f:
|
||||
f.write(response.content)
|
||||
return FileResponse(file_path)
|
||||
else:
|
||||
log.error(f"Error synthesizing speech - {response.reason}")
|
||||
raise HTTPException(
|
||||
status_code=500, detail=f"Error synthesizing speech - {response.reason}"
|
||||
)
|
||||
|
||||
transcript = "".join([segment.text for segment in list(segments)])
|
||||
|
||||
data = {"text": transcript.strip()}
|
||||
def transcribe(file_path):
|
||||
print("transcribe", file_path)
|
||||
filename = os.path.basename(file_path)
|
||||
file_dir = os.path.dirname(file_path)
|
||||
id = filename.split(".")[0]
|
||||
|
||||
if app.state.config.STT_ENGINE == "":
|
||||
if app.state.faster_whisper_model is None:
|
||||
set_faster_whisper_model(app.state.config.WHISPER_MODEL)
|
||||
|
||||
model = app.state.faster_whisper_model
|
||||
segments, info = model.transcribe(file_path, beam_size=5)
|
||||
log.info(
|
||||
"Detected language '%s' with probability %f"
|
||||
% (info.language, info.language_probability)
|
||||
)
|
||||
|
||||
transcript = "".join([segment.text for segment in list(segments)])
|
||||
data = {"text": transcript.strip()}
|
||||
|
||||
# save the transcript to a json file
|
||||
transcript_file = f"{file_dir}/{id}.json"
|
||||
with open(transcript_file, "w") as f:
|
||||
json.dump(data, f)
|
||||
|
||||
log.debug(data)
|
||||
return data
|
||||
elif app.state.config.STT_ENGINE == "openai":
|
||||
if is_mp4_audio(file_path):
|
||||
print("is_mp4_audio")
|
||||
os.rename(file_path, file_path.replace(".wav", ".mp4"))
|
||||
# Convert MP4 audio file to WAV format
|
||||
convert_mp4_to_wav(file_path.replace(".wav", ".mp4"), file_path)
|
||||
|
||||
headers = {"Authorization": f"Bearer {app.state.config.STT_OPENAI_API_KEY}"}
|
||||
|
||||
files = {"file": (filename, open(file_path, "rb"))}
|
||||
data = {"model": app.state.config.STT_MODEL}
|
||||
|
||||
log.debug(files, data)
|
||||
|
||||
r = None
|
||||
try:
|
||||
r = requests.post(
|
||||
url=f"{app.state.config.STT_OPENAI_API_BASE_URL}/audio/transcriptions",
|
||||
headers=headers,
|
||||
files=files,
|
||||
data=data,
|
||||
)
|
||||
|
||||
r.raise_for_status()
|
||||
|
||||
data = r.json()
|
||||
|
||||
# save the transcript to a json file
|
||||
transcript_file = f"{file_dir}/{id}.json"
|
||||
@@ -376,58 +453,82 @@ def transcribe(
|
||||
json.dump(data, f)
|
||||
|
||||
print(data)
|
||||
return data
|
||||
except Exception as e:
|
||||
log.exception(e)
|
||||
error_detail = "Open WebUI: Server Connection Error"
|
||||
if r is not None:
|
||||
try:
|
||||
res = r.json()
|
||||
if "error" in res:
|
||||
error_detail = f"External: {res['error']['message']}"
|
||||
except Exception:
|
||||
error_detail = f"External: {e}"
|
||||
|
||||
raise Exception(error_detail)
|
||||
|
||||
|
||||
@app.post("/transcriptions")
|
||||
def transcription(
|
||||
file: UploadFile = File(...),
|
||||
user=Depends(get_verified_user),
|
||||
):
|
||||
log.info(f"file.content_type: {file.content_type}")
|
||||
|
||||
if file.content_type not in ["audio/mpeg", "audio/wav", "audio/ogg", "audio/x-m4a"]:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_400_BAD_REQUEST,
|
||||
detail=ERROR_MESSAGES.FILE_NOT_SUPPORTED,
|
||||
)
|
||||
|
||||
try:
|
||||
ext = file.filename.split(".")[-1]
|
||||
id = uuid.uuid4()
|
||||
|
||||
filename = f"{id}.{ext}"
|
||||
contents = file.file.read()
|
||||
|
||||
file_dir = f"{CACHE_DIR}/audio/transcriptions"
|
||||
os.makedirs(file_dir, exist_ok=True)
|
||||
file_path = f"{file_dir}/{filename}"
|
||||
|
||||
with open(file_path, "wb") as f:
|
||||
f.write(contents)
|
||||
|
||||
try:
|
||||
if os.path.getsize(file_path) > MAX_FILE_SIZE: # file is bigger than 25MB
|
||||
log.debug(f"File size is larger than {MAX_FILE_SIZE_MB}MB")
|
||||
audio = AudioSegment.from_file(file_path)
|
||||
audio = audio.set_frame_rate(16000).set_channels(1) # Compress audio
|
||||
compressed_path = f"{file_dir}/{id}_compressed.opus"
|
||||
audio.export(compressed_path, format="opus", bitrate="32k")
|
||||
log.debug(f"Compressed audio to {compressed_path}")
|
||||
file_path = compressed_path
|
||||
|
||||
if (
|
||||
os.path.getsize(file_path) > MAX_FILE_SIZE
|
||||
): # Still larger than 25MB after compression
|
||||
log.debug(
|
||||
f"Compressed file size is still larger than {MAX_FILE_SIZE_MB}MB: {os.path.getsize(file_path)}"
|
||||
)
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_400_BAD_REQUEST,
|
||||
detail=ERROR_MESSAGES.FILE_TOO_LARGE(
|
||||
size=f"{MAX_FILE_SIZE_MB}MB"
|
||||
),
|
||||
)
|
||||
|
||||
data = transcribe(file_path)
|
||||
else:
|
||||
data = transcribe(file_path)
|
||||
|
||||
return data
|
||||
|
||||
elif app.state.config.STT_ENGINE == "openai":
|
||||
if is_mp4_audio(file_path):
|
||||
print("is_mp4_audio")
|
||||
os.rename(file_path, file_path.replace(".wav", ".mp4"))
|
||||
# Convert MP4 audio file to WAV format
|
||||
convert_mp4_to_wav(file_path.replace(".wav", ".mp4"), file_path)
|
||||
|
||||
headers = {"Authorization": f"Bearer {app.state.config.STT_OPENAI_API_KEY}"}
|
||||
|
||||
files = {"file": (filename, open(file_path, "rb"))}
|
||||
data = {"model": app.state.config.STT_MODEL}
|
||||
|
||||
print(files, data)
|
||||
|
||||
r = None
|
||||
try:
|
||||
r = requests.post(
|
||||
url=f"{app.state.config.STT_OPENAI_API_BASE_URL}/audio/transcriptions",
|
||||
headers=headers,
|
||||
files=files,
|
||||
data=data,
|
||||
)
|
||||
|
||||
r.raise_for_status()
|
||||
|
||||
data = r.json()
|
||||
|
||||
# save the transcript to a json file
|
||||
transcript_file = f"{file_dir}/{id}.json"
|
||||
with open(transcript_file, "w") as f:
|
||||
json.dump(data, f)
|
||||
|
||||
print(data)
|
||||
return data
|
||||
except Exception as e:
|
||||
log.exception(e)
|
||||
error_detail = "Open WebUI: Server Connection Error"
|
||||
if r is not None:
|
||||
try:
|
||||
res = r.json()
|
||||
if "error" in res:
|
||||
error_detail = f"External: {res['error']['message']}"
|
||||
except Exception:
|
||||
error_detail = f"External: {e}"
|
||||
|
||||
raise HTTPException(
|
||||
status_code=r.status_code if r != None else 500,
|
||||
detail=error_detail,
|
||||
)
|
||||
except Exception as e:
|
||||
log.exception(e)
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_400_BAD_REQUEST,
|
||||
detail=ERROR_MESSAGES.DEFAULT(e),
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
log.exception(e)
|
||||
@@ -449,7 +550,7 @@ def get_available_models() -> list[dict]:
|
||||
|
||||
try:
|
||||
response = requests.get(
|
||||
"https://api.elevenlabs.io/v1/models", headers=headers
|
||||
"https://api.elevenlabs.io/v1/models", headers=headers, timeout=5
|
||||
)
|
||||
response.raise_for_status()
|
||||
models = response.json()
|
||||
@@ -466,39 +567,73 @@ async def get_models(user=Depends(get_verified_user)):
|
||||
return {"models": get_available_models()}
|
||||
|
||||
|
||||
def get_available_voices() -> list[dict]:
|
||||
def get_available_voices() -> dict:
|
||||
"""Returns {voice_id: voice_name} dict"""
|
||||
ret = {}
|
||||
if app.state.config.TTS_ENGINE == "openai":
|
||||
return [
|
||||
{"name": "alloy", "id": "alloy"},
|
||||
{"name": "echo", "id": "echo"},
|
||||
{"name": "fable", "id": "fable"},
|
||||
{"name": "onyx", "id": "onyx"},
|
||||
{"name": "nova", "id": "nova"},
|
||||
{"name": "shimmer", "id": "shimmer"},
|
||||
]
|
||||
elif app.state.config.TTS_ENGINE == "elevenlabs":
|
||||
headers = {
|
||||
"xi-api-key": app.state.config.TTS_API_KEY,
|
||||
"Content-Type": "application/json",
|
||||
ret = {
|
||||
"alloy": "alloy",
|
||||
"echo": "echo",
|
||||
"fable": "fable",
|
||||
"onyx": "onyx",
|
||||
"nova": "nova",
|
||||
"shimmer": "shimmer",
|
||||
}
|
||||
|
||||
elif app.state.config.TTS_ENGINE == "elevenlabs":
|
||||
try:
|
||||
response = requests.get(
|
||||
"https://api.elevenlabs.io/v1/voices", headers=headers
|
||||
)
|
||||
response.raise_for_status()
|
||||
voices_data = response.json()
|
||||
ret = get_elevenlabs_voices()
|
||||
except Exception:
|
||||
# Avoided @lru_cache with exception
|
||||
pass
|
||||
elif app.state.config.TTS_ENGINE == "azure":
|
||||
try:
|
||||
region = app.state.config.TTS_AZURE_SPEECH_REGION
|
||||
url = f"https://{region}.tts.speech.microsoft.com/cognitiveservices/voices/list"
|
||||
headers = {"Ocp-Apim-Subscription-Key": app.state.config.TTS_API_KEY}
|
||||
|
||||
voices = []
|
||||
for voice in voices_data.get("voices", []):
|
||||
voices.append({"name": voice["name"], "id": voice["voice_id"]})
|
||||
return voices
|
||||
response = requests.get(url, headers=headers)
|
||||
response.raise_for_status()
|
||||
voices = response.json()
|
||||
for voice in voices:
|
||||
ret[voice["ShortName"]] = (
|
||||
f"{voice['DisplayName']} ({voice['ShortName']})"
|
||||
)
|
||||
except requests.RequestException as e:
|
||||
log.error(f"Error fetching voices: {str(e)}")
|
||||
|
||||
return []
|
||||
return ret
|
||||
|
||||
|
||||
@lru_cache
|
||||
def get_elevenlabs_voices() -> dict:
|
||||
"""
|
||||
Note, set the following in your .env file to use Elevenlabs:
|
||||
AUDIO_TTS_ENGINE=elevenlabs
|
||||
AUDIO_TTS_API_KEY=sk_... # Your Elevenlabs API key
|
||||
AUDIO_TTS_VOICE=EXAVITQu4vr4xnSDxMaL # From https://api.elevenlabs.io/v1/voices
|
||||
AUDIO_TTS_MODEL=eleven_multilingual_v2
|
||||
"""
|
||||
headers = {
|
||||
"xi-api-key": app.state.config.TTS_API_KEY,
|
||||
"Content-Type": "application/json",
|
||||
}
|
||||
try:
|
||||
# TODO: Add retries
|
||||
response = requests.get("https://api.elevenlabs.io/v1/voices", headers=headers)
|
||||
response.raise_for_status()
|
||||
voices_data = response.json()
|
||||
|
||||
voices = {}
|
||||
for voice in voices_data.get("voices", []):
|
||||
voices[voice["voice_id"]] = voice["name"]
|
||||
except requests.RequestException as e:
|
||||
# Avoid @lru_cache with exception
|
||||
log.error(f"Error fetching voices: {str(e)}")
|
||||
raise RuntimeError(f"Error fetching voices: {str(e)}")
|
||||
|
||||
return voices
|
||||
|
||||
|
||||
@app.get("/voices")
|
||||
async def get_voices(user=Depends(get_verified_user)):
|
||||
return {"voices": get_available_voices()}
|
||||
return {"voices": [{"id": k, "name": v} for k, v in get_available_voices().items()]}
|
||||
@@ -1,57 +1,45 @@
|
||||
import re
|
||||
import requests
|
||||
import base64
|
||||
from fastapi import (
|
||||
FastAPI,
|
||||
Request,
|
||||
Depends,
|
||||
HTTPException,
|
||||
status,
|
||||
UploadFile,
|
||||
File,
|
||||
Form,
|
||||
)
|
||||
from fastapi.middleware.cors import CORSMiddleware
|
||||
|
||||
from constants import ERROR_MESSAGES
|
||||
from utils.utils import (
|
||||
get_verified_user,
|
||||
get_admin_user,
|
||||
)
|
||||
|
||||
from apps.images.utils.comfyui import ImageGenerationPayload, comfyui_generate_image
|
||||
from utils.misc import calculate_sha256
|
||||
from typing import Optional
|
||||
from pydantic import BaseModel
|
||||
from pathlib import Path
|
||||
import mimetypes
|
||||
import uuid
|
||||
import asyncio
|
||||
import base64
|
||||
import json
|
||||
import logging
|
||||
import mimetypes
|
||||
import re
|
||||
import uuid
|
||||
from pathlib import Path
|
||||
from typing import Optional
|
||||
|
||||
from config import (
|
||||
SRC_LOG_LEVELS,
|
||||
CACHE_DIR,
|
||||
IMAGE_GENERATION_ENGINE,
|
||||
ENABLE_IMAGE_GENERATION,
|
||||
AUTOMATIC1111_BASE_URL,
|
||||
import requests
|
||||
from open_webui.apps.images.utils.comfyui import (
|
||||
ComfyUIGenerateImageForm,
|
||||
ComfyUIWorkflow,
|
||||
comfyui_generate_image,
|
||||
)
|
||||
from open_webui.config import (
|
||||
AUTOMATIC1111_API_AUTH,
|
||||
AUTOMATIC1111_BASE_URL,
|
||||
AUTOMATIC1111_CFG_SCALE,
|
||||
AUTOMATIC1111_SAMPLER,
|
||||
AUTOMATIC1111_SCHEDULER,
|
||||
CACHE_DIR,
|
||||
COMFYUI_BASE_URL,
|
||||
COMFYUI_CFG_SCALE,
|
||||
COMFYUI_SAMPLER,
|
||||
COMFYUI_SCHEDULER,
|
||||
COMFYUI_SD3,
|
||||
COMFYUI_FLUX,
|
||||
COMFYUI_FLUX_WEIGHT_DTYPE,
|
||||
COMFYUI_FLUX_FP8_CLIP,
|
||||
IMAGES_OPENAI_API_BASE_URL,
|
||||
IMAGES_OPENAI_API_KEY,
|
||||
COMFYUI_WORKFLOW,
|
||||
COMFYUI_WORKFLOW_NODES,
|
||||
CORS_ALLOW_ORIGIN,
|
||||
ENABLE_IMAGE_GENERATION,
|
||||
IMAGE_GENERATION_ENGINE,
|
||||
IMAGE_GENERATION_MODEL,
|
||||
IMAGE_SIZE,
|
||||
IMAGE_STEPS,
|
||||
IMAGES_OPENAI_API_BASE_URL,
|
||||
IMAGES_OPENAI_API_KEY,
|
||||
AppConfig,
|
||||
)
|
||||
from open_webui.constants import ERROR_MESSAGES
|
||||
from open_webui.env import SRC_LOG_LEVELS
|
||||
from fastapi import Depends, FastAPI, HTTPException, Request
|
||||
from fastapi.middleware.cors import CORSMiddleware
|
||||
from pydantic import BaseModel
|
||||
from open_webui.utils.utils import get_admin_user, get_verified_user
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
log.setLevel(SRC_LOG_LEVELS["IMAGES"])
|
||||
@@ -62,7 +50,7 @@ IMAGE_CACHE_DIR.mkdir(parents=True, exist_ok=True)
|
||||
app = FastAPI()
|
||||
app.add_middleware(
|
||||
CORSMiddleware,
|
||||
allow_origins=["*"],
|
||||
allow_origins=CORS_ALLOW_ORIGIN,
|
||||
allow_credentials=True,
|
||||
allow_methods=["*"],
|
||||
allow_headers=["*"],
|
||||
@@ -80,17 +68,123 @@ app.state.config.MODEL = IMAGE_GENERATION_MODEL
|
||||
|
||||
app.state.config.AUTOMATIC1111_BASE_URL = AUTOMATIC1111_BASE_URL
|
||||
app.state.config.AUTOMATIC1111_API_AUTH = AUTOMATIC1111_API_AUTH
|
||||
app.state.config.AUTOMATIC1111_CFG_SCALE = AUTOMATIC1111_CFG_SCALE
|
||||
app.state.config.AUTOMATIC1111_SAMPLER = AUTOMATIC1111_SAMPLER
|
||||
app.state.config.AUTOMATIC1111_SCHEDULER = AUTOMATIC1111_SCHEDULER
|
||||
app.state.config.COMFYUI_BASE_URL = COMFYUI_BASE_URL
|
||||
app.state.config.COMFYUI_WORKFLOW = COMFYUI_WORKFLOW
|
||||
app.state.config.COMFYUI_WORKFLOW_NODES = COMFYUI_WORKFLOW_NODES
|
||||
|
||||
app.state.config.IMAGE_SIZE = IMAGE_SIZE
|
||||
app.state.config.IMAGE_STEPS = IMAGE_STEPS
|
||||
app.state.config.COMFYUI_CFG_SCALE = COMFYUI_CFG_SCALE
|
||||
app.state.config.COMFYUI_SAMPLER = COMFYUI_SAMPLER
|
||||
app.state.config.COMFYUI_SCHEDULER = COMFYUI_SCHEDULER
|
||||
app.state.config.COMFYUI_SD3 = COMFYUI_SD3
|
||||
app.state.config.COMFYUI_FLUX = COMFYUI_FLUX
|
||||
app.state.config.COMFYUI_FLUX_WEIGHT_DTYPE = COMFYUI_FLUX_WEIGHT_DTYPE
|
||||
app.state.config.COMFYUI_FLUX_FP8_CLIP = COMFYUI_FLUX_FP8_CLIP
|
||||
|
||||
|
||||
@app.get("/config")
|
||||
async def get_config(request: Request, user=Depends(get_admin_user)):
|
||||
return {
|
||||
"enabled": app.state.config.ENABLED,
|
||||
"engine": app.state.config.ENGINE,
|
||||
"openai": {
|
||||
"OPENAI_API_BASE_URL": app.state.config.OPENAI_API_BASE_URL,
|
||||
"OPENAI_API_KEY": app.state.config.OPENAI_API_KEY,
|
||||
},
|
||||
"automatic1111": {
|
||||
"AUTOMATIC1111_BASE_URL": app.state.config.AUTOMATIC1111_BASE_URL,
|
||||
"AUTOMATIC1111_API_AUTH": app.state.config.AUTOMATIC1111_API_AUTH,
|
||||
"AUTOMATIC1111_CFG_SCALE": app.state.config.AUTOMATIC1111_CFG_SCALE,
|
||||
"AUTOMATIC1111_SAMPLER": app.state.config.AUTOMATIC1111_SAMPLER,
|
||||
"AUTOMATIC1111_SCHEDULER": app.state.config.AUTOMATIC1111_SCHEDULER,
|
||||
},
|
||||
"comfyui": {
|
||||
"COMFYUI_BASE_URL": app.state.config.COMFYUI_BASE_URL,
|
||||
"COMFYUI_WORKFLOW": app.state.config.COMFYUI_WORKFLOW,
|
||||
"COMFYUI_WORKFLOW_NODES": app.state.config.COMFYUI_WORKFLOW_NODES,
|
||||
},
|
||||
}
|
||||
|
||||
|
||||
class OpenAIConfigForm(BaseModel):
|
||||
OPENAI_API_BASE_URL: str
|
||||
OPENAI_API_KEY: str
|
||||
|
||||
|
||||
class Automatic1111ConfigForm(BaseModel):
|
||||
AUTOMATIC1111_BASE_URL: str
|
||||
AUTOMATIC1111_API_AUTH: str
|
||||
AUTOMATIC1111_CFG_SCALE: Optional[str]
|
||||
AUTOMATIC1111_SAMPLER: Optional[str]
|
||||
AUTOMATIC1111_SCHEDULER: Optional[str]
|
||||
|
||||
|
||||
class ComfyUIConfigForm(BaseModel):
|
||||
COMFYUI_BASE_URL: str
|
||||
COMFYUI_WORKFLOW: str
|
||||
COMFYUI_WORKFLOW_NODES: list[dict]
|
||||
|
||||
|
||||
class ConfigForm(BaseModel):
|
||||
enabled: bool
|
||||
engine: str
|
||||
openai: OpenAIConfigForm
|
||||
automatic1111: Automatic1111ConfigForm
|
||||
comfyui: ComfyUIConfigForm
|
||||
|
||||
|
||||
@app.post("/config/update")
|
||||
async def update_config(form_data: ConfigForm, user=Depends(get_admin_user)):
|
||||
app.state.config.ENGINE = form_data.engine
|
||||
app.state.config.ENABLED = form_data.enabled
|
||||
|
||||
app.state.config.OPENAI_API_BASE_URL = form_data.openai.OPENAI_API_BASE_URL
|
||||
app.state.config.OPENAI_API_KEY = form_data.openai.OPENAI_API_KEY
|
||||
|
||||
app.state.config.AUTOMATIC1111_BASE_URL = (
|
||||
form_data.automatic1111.AUTOMATIC1111_BASE_URL
|
||||
)
|
||||
app.state.config.AUTOMATIC1111_API_AUTH = (
|
||||
form_data.automatic1111.AUTOMATIC1111_API_AUTH
|
||||
)
|
||||
|
||||
app.state.config.AUTOMATIC1111_CFG_SCALE = (
|
||||
float(form_data.automatic1111.AUTOMATIC1111_CFG_SCALE)
|
||||
if form_data.automatic1111.AUTOMATIC1111_CFG_SCALE
|
||||
else None
|
||||
)
|
||||
app.state.config.AUTOMATIC1111_SAMPLER = (
|
||||
form_data.automatic1111.AUTOMATIC1111_SAMPLER
|
||||
if form_data.automatic1111.AUTOMATIC1111_SAMPLER
|
||||
else None
|
||||
)
|
||||
app.state.config.AUTOMATIC1111_SCHEDULER = (
|
||||
form_data.automatic1111.AUTOMATIC1111_SCHEDULER
|
||||
if form_data.automatic1111.AUTOMATIC1111_SCHEDULER
|
||||
else None
|
||||
)
|
||||
|
||||
app.state.config.COMFYUI_BASE_URL = form_data.comfyui.COMFYUI_BASE_URL.strip("/")
|
||||
app.state.config.COMFYUI_WORKFLOW = form_data.comfyui.COMFYUI_WORKFLOW
|
||||
app.state.config.COMFYUI_WORKFLOW_NODES = form_data.comfyui.COMFYUI_WORKFLOW_NODES
|
||||
|
||||
return {
|
||||
"enabled": app.state.config.ENABLED,
|
||||
"engine": app.state.config.ENGINE,
|
||||
"openai": {
|
||||
"OPENAI_API_BASE_URL": app.state.config.OPENAI_API_BASE_URL,
|
||||
"OPENAI_API_KEY": app.state.config.OPENAI_API_KEY,
|
||||
},
|
||||
"automatic1111": {
|
||||
"AUTOMATIC1111_BASE_URL": app.state.config.AUTOMATIC1111_BASE_URL,
|
||||
"AUTOMATIC1111_API_AUTH": app.state.config.AUTOMATIC1111_API_AUTH,
|
||||
"AUTOMATIC1111_CFG_SCALE": app.state.config.AUTOMATIC1111_CFG_SCALE,
|
||||
"AUTOMATIC1111_SAMPLER": app.state.config.AUTOMATIC1111_SAMPLER,
|
||||
"AUTOMATIC1111_SCHEDULER": app.state.config.AUTOMATIC1111_SCHEDULER,
|
||||
},
|
||||
"comfyui": {
|
||||
"COMFYUI_BASE_URL": app.state.config.COMFYUI_BASE_URL,
|
||||
"COMFYUI_WORKFLOW": app.state.config.COMFYUI_WORKFLOW,
|
||||
"COMFYUI_WORKFLOW_NODES": app.state.config.COMFYUI_WORKFLOW_NODES,
|
||||
},
|
||||
}
|
||||
|
||||
|
||||
def get_automatic1111_api_auth():
|
||||
@@ -103,166 +197,112 @@ def get_automatic1111_api_auth():
|
||||
return f"Basic {auth1111_base64_encoded_string}"
|
||||
|
||||
|
||||
@app.get("/config")
|
||||
async def get_config(request: Request, user=Depends(get_admin_user)):
|
||||
return {
|
||||
"engine": app.state.config.ENGINE,
|
||||
"enabled": app.state.config.ENABLED,
|
||||
}
|
||||
|
||||
|
||||
class ConfigUpdateForm(BaseModel):
|
||||
engine: str
|
||||
enabled: bool
|
||||
|
||||
|
||||
@app.post("/config/update")
|
||||
async def update_config(form_data: ConfigUpdateForm, user=Depends(get_admin_user)):
|
||||
app.state.config.ENGINE = form_data.engine
|
||||
app.state.config.ENABLED = form_data.enabled
|
||||
return {
|
||||
"engine": app.state.config.ENGINE,
|
||||
"enabled": app.state.config.ENABLED,
|
||||
}
|
||||
|
||||
|
||||
class EngineUrlUpdateForm(BaseModel):
|
||||
AUTOMATIC1111_BASE_URL: Optional[str] = None
|
||||
AUTOMATIC1111_API_AUTH: Optional[str] = None
|
||||
COMFYUI_BASE_URL: Optional[str] = None
|
||||
|
||||
|
||||
@app.get("/url")
|
||||
async def get_engine_url(user=Depends(get_admin_user)):
|
||||
return {
|
||||
"AUTOMATIC1111_BASE_URL": app.state.config.AUTOMATIC1111_BASE_URL,
|
||||
"AUTOMATIC1111_API_AUTH": app.state.config.AUTOMATIC1111_API_AUTH,
|
||||
"COMFYUI_BASE_URL": app.state.config.COMFYUI_BASE_URL,
|
||||
}
|
||||
|
||||
|
||||
@app.post("/url/update")
|
||||
async def update_engine_url(
|
||||
form_data: EngineUrlUpdateForm, user=Depends(get_admin_user)
|
||||
):
|
||||
if form_data.AUTOMATIC1111_BASE_URL is None:
|
||||
app.state.config.AUTOMATIC1111_BASE_URL = AUTOMATIC1111_BASE_URL
|
||||
else:
|
||||
url = form_data.AUTOMATIC1111_BASE_URL.strip("/")
|
||||
@app.get("/config/url/verify")
|
||||
async def verify_url(user=Depends(get_admin_user)):
|
||||
if app.state.config.ENGINE == "automatic1111":
|
||||
try:
|
||||
r = requests.head(url)
|
||||
r = requests.get(
|
||||
url=f"{app.state.config.AUTOMATIC1111_BASE_URL}/sdapi/v1/options",
|
||||
headers={"authorization": get_automatic1111_api_auth()},
|
||||
)
|
||||
r.raise_for_status()
|
||||
app.state.config.AUTOMATIC1111_BASE_URL = url
|
||||
except Exception as e:
|
||||
return True
|
||||
except Exception:
|
||||
app.state.config.ENABLED = False
|
||||
raise HTTPException(status_code=400, detail=ERROR_MESSAGES.INVALID_URL)
|
||||
|
||||
if form_data.COMFYUI_BASE_URL is None:
|
||||
app.state.config.COMFYUI_BASE_URL = COMFYUI_BASE_URL
|
||||
else:
|
||||
url = form_data.COMFYUI_BASE_URL.strip("/")
|
||||
|
||||
elif app.state.config.ENGINE == "comfyui":
|
||||
try:
|
||||
r = requests.head(url)
|
||||
r = requests.get(url=f"{app.state.config.COMFYUI_BASE_URL}/object_info")
|
||||
r.raise_for_status()
|
||||
app.state.config.COMFYUI_BASE_URL = url
|
||||
except Exception as e:
|
||||
return True
|
||||
except Exception:
|
||||
app.state.config.ENABLED = False
|
||||
raise HTTPException(status_code=400, detail=ERROR_MESSAGES.INVALID_URL)
|
||||
|
||||
if form_data.AUTOMATIC1111_API_AUTH is None:
|
||||
app.state.config.AUTOMATIC1111_API_AUTH = AUTOMATIC1111_API_AUTH
|
||||
else:
|
||||
app.state.config.AUTOMATIC1111_API_AUTH = form_data.AUTOMATIC1111_API_AUTH
|
||||
return True
|
||||
|
||||
|
||||
def set_image_model(model: str):
|
||||
log.info(f"Setting image model to {model}")
|
||||
app.state.config.MODEL = model
|
||||
if app.state.config.ENGINE in ["", "automatic1111"]:
|
||||
api_auth = get_automatic1111_api_auth()
|
||||
r = requests.get(
|
||||
url=f"{app.state.config.AUTOMATIC1111_BASE_URL}/sdapi/v1/options",
|
||||
headers={"authorization": api_auth},
|
||||
)
|
||||
options = r.json()
|
||||
if model != options["sd_model_checkpoint"]:
|
||||
options["sd_model_checkpoint"] = model
|
||||
r = requests.post(
|
||||
url=f"{app.state.config.AUTOMATIC1111_BASE_URL}/sdapi/v1/options",
|
||||
json=options,
|
||||
headers={"authorization": api_auth},
|
||||
)
|
||||
return app.state.config.MODEL
|
||||
|
||||
|
||||
def get_image_model():
|
||||
if app.state.config.ENGINE == "openai":
|
||||
return app.state.config.MODEL if app.state.config.MODEL else "dall-e-2"
|
||||
elif app.state.config.ENGINE == "comfyui":
|
||||
return app.state.config.MODEL if app.state.config.MODEL else ""
|
||||
elif app.state.config.ENGINE == "automatic1111" or app.state.config.ENGINE == "":
|
||||
try:
|
||||
r = requests.get(
|
||||
url=f"{app.state.config.AUTOMATIC1111_BASE_URL}/sdapi/v1/options",
|
||||
headers={"authorization": get_automatic1111_api_auth()},
|
||||
)
|
||||
options = r.json()
|
||||
return options["sd_model_checkpoint"]
|
||||
except Exception as e:
|
||||
app.state.config.ENABLED = False
|
||||
raise HTTPException(status_code=400, detail=ERROR_MESSAGES.DEFAULT(e))
|
||||
|
||||
|
||||
class ImageConfigForm(BaseModel):
|
||||
MODEL: str
|
||||
IMAGE_SIZE: str
|
||||
IMAGE_STEPS: int
|
||||
|
||||
|
||||
@app.get("/image/config")
|
||||
async def get_image_config(user=Depends(get_admin_user)):
|
||||
return {
|
||||
"AUTOMATIC1111_BASE_URL": app.state.config.AUTOMATIC1111_BASE_URL,
|
||||
"AUTOMATIC1111_API_AUTH": app.state.config.AUTOMATIC1111_API_AUTH,
|
||||
"COMFYUI_BASE_URL": app.state.config.COMFYUI_BASE_URL,
|
||||
"status": True,
|
||||
"MODEL": app.state.config.MODEL,
|
||||
"IMAGE_SIZE": app.state.config.IMAGE_SIZE,
|
||||
"IMAGE_STEPS": app.state.config.IMAGE_STEPS,
|
||||
}
|
||||
|
||||
|
||||
class OpenAIConfigUpdateForm(BaseModel):
|
||||
url: str
|
||||
key: str
|
||||
@app.post("/image/config/update")
|
||||
async def update_image_config(form_data: ImageConfigForm, user=Depends(get_admin_user)):
|
||||
|
||||
set_image_model(form_data.MODEL)
|
||||
|
||||
@app.get("/openai/config")
|
||||
async def get_openai_config(user=Depends(get_admin_user)):
|
||||
return {
|
||||
"OPENAI_API_BASE_URL": app.state.config.OPENAI_API_BASE_URL,
|
||||
"OPENAI_API_KEY": app.state.config.OPENAI_API_KEY,
|
||||
}
|
||||
|
||||
|
||||
@app.post("/openai/config/update")
|
||||
async def update_openai_config(
|
||||
form_data: OpenAIConfigUpdateForm, user=Depends(get_admin_user)
|
||||
):
|
||||
if form_data.key == "":
|
||||
raise HTTPException(status_code=400, detail=ERROR_MESSAGES.API_KEY_NOT_FOUND)
|
||||
|
||||
app.state.config.OPENAI_API_BASE_URL = form_data.url
|
||||
app.state.config.OPENAI_API_KEY = form_data.key
|
||||
|
||||
return {
|
||||
"status": True,
|
||||
"OPENAI_API_BASE_URL": app.state.config.OPENAI_API_BASE_URL,
|
||||
"OPENAI_API_KEY": app.state.config.OPENAI_API_KEY,
|
||||
}
|
||||
|
||||
|
||||
class ImageSizeUpdateForm(BaseModel):
|
||||
size: str
|
||||
|
||||
|
||||
@app.get("/size")
|
||||
async def get_image_size(user=Depends(get_admin_user)):
|
||||
return {"IMAGE_SIZE": app.state.config.IMAGE_SIZE}
|
||||
|
||||
|
||||
@app.post("/size/update")
|
||||
async def update_image_size(
|
||||
form_data: ImageSizeUpdateForm, user=Depends(get_admin_user)
|
||||
):
|
||||
pattern = r"^\d+x\d+$" # Regular expression pattern
|
||||
if re.match(pattern, form_data.size):
|
||||
app.state.config.IMAGE_SIZE = form_data.size
|
||||
return {
|
||||
"IMAGE_SIZE": app.state.config.IMAGE_SIZE,
|
||||
"status": True,
|
||||
}
|
||||
pattern = r"^\d+x\d+$"
|
||||
if re.match(pattern, form_data.IMAGE_SIZE):
|
||||
app.state.config.IMAGE_SIZE = form_data.IMAGE_SIZE
|
||||
else:
|
||||
raise HTTPException(
|
||||
status_code=400,
|
||||
detail=ERROR_MESSAGES.INCORRECT_FORMAT(" (e.g., 512x512)."),
|
||||
)
|
||||
|
||||
|
||||
class ImageStepsUpdateForm(BaseModel):
|
||||
steps: int
|
||||
|
||||
|
||||
@app.get("/steps")
|
||||
async def get_image_size(user=Depends(get_admin_user)):
|
||||
return {"IMAGE_STEPS": app.state.config.IMAGE_STEPS}
|
||||
|
||||
|
||||
@app.post("/steps/update")
|
||||
async def update_image_size(
|
||||
form_data: ImageStepsUpdateForm, user=Depends(get_admin_user)
|
||||
):
|
||||
if form_data.steps >= 0:
|
||||
app.state.config.IMAGE_STEPS = form_data.steps
|
||||
return {
|
||||
"IMAGE_STEPS": app.state.config.IMAGE_STEPS,
|
||||
"status": True,
|
||||
}
|
||||
if form_data.IMAGE_STEPS >= 0:
|
||||
app.state.config.IMAGE_STEPS = form_data.IMAGE_STEPS
|
||||
else:
|
||||
raise HTTPException(
|
||||
status_code=400,
|
||||
detail=ERROR_MESSAGES.INCORRECT_FORMAT(" (e.g., 50)."),
|
||||
)
|
||||
|
||||
return {
|
||||
"MODEL": app.state.config.MODEL,
|
||||
"IMAGE_SIZE": app.state.config.IMAGE_SIZE,
|
||||
"IMAGE_STEPS": app.state.config.IMAGE_STEPS,
|
||||
}
|
||||
|
||||
|
||||
@app.get("/models")
|
||||
def get_models(user=Depends(get_verified_user)):
|
||||
@@ -273,18 +313,51 @@ def get_models(user=Depends(get_verified_user)):
|
||||
{"id": "dall-e-3", "name": "DALL·E 3"},
|
||||
]
|
||||
elif app.state.config.ENGINE == "comfyui":
|
||||
|
||||
# TODO - get models from comfyui
|
||||
r = requests.get(url=f"{app.state.config.COMFYUI_BASE_URL}/object_info")
|
||||
info = r.json()
|
||||
|
||||
return list(
|
||||
map(
|
||||
lambda model: {"id": model, "name": model},
|
||||
info["CheckpointLoaderSimple"]["input"]["required"]["ckpt_name"][0],
|
||||
)
|
||||
)
|
||||
workflow = json.loads(app.state.config.COMFYUI_WORKFLOW)
|
||||
model_node_id = None
|
||||
|
||||
else:
|
||||
for node in app.state.config.COMFYUI_WORKFLOW_NODES:
|
||||
if node["type"] == "model":
|
||||
if node["node_ids"]:
|
||||
model_node_id = node["node_ids"][0]
|
||||
break
|
||||
|
||||
if model_node_id:
|
||||
model_list_key = None
|
||||
|
||||
print(workflow[model_node_id]["class_type"])
|
||||
for key in info[workflow[model_node_id]["class_type"]]["input"][
|
||||
"required"
|
||||
]:
|
||||
if "_name" in key:
|
||||
model_list_key = key
|
||||
break
|
||||
|
||||
if model_list_key:
|
||||
return list(
|
||||
map(
|
||||
lambda model: {"id": model, "name": model},
|
||||
info[workflow[model_node_id]["class_type"]]["input"][
|
||||
"required"
|
||||
][model_list_key][0],
|
||||
)
|
||||
)
|
||||
else:
|
||||
return list(
|
||||
map(
|
||||
lambda model: {"id": model, "name": model},
|
||||
info["CheckpointLoaderSimple"]["input"]["required"][
|
||||
"ckpt_name"
|
||||
][0],
|
||||
)
|
||||
)
|
||||
elif (
|
||||
app.state.config.ENGINE == "automatic1111" or app.state.config.ENGINE == ""
|
||||
):
|
||||
r = requests.get(
|
||||
url=f"{app.state.config.AUTOMATIC1111_BASE_URL}/sdapi/v1/sd-models",
|
||||
headers={"authorization": get_automatic1111_api_auth()},
|
||||
@@ -301,69 +374,11 @@ def get_models(user=Depends(get_verified_user)):
|
||||
raise HTTPException(status_code=400, detail=ERROR_MESSAGES.DEFAULT(e))
|
||||
|
||||
|
||||
@app.get("/models/default")
|
||||
async def get_default_model(user=Depends(get_admin_user)):
|
||||
try:
|
||||
if app.state.config.ENGINE == "openai":
|
||||
return {
|
||||
"model": (
|
||||
app.state.config.MODEL if app.state.config.MODEL else "dall-e-2"
|
||||
)
|
||||
}
|
||||
elif app.state.config.ENGINE == "comfyui":
|
||||
return {"model": (app.state.config.MODEL if app.state.config.MODEL else "")}
|
||||
else:
|
||||
r = requests.get(
|
||||
url=f"{app.state.config.AUTOMATIC1111_BASE_URL}/sdapi/v1/options",
|
||||
headers={"authorization": get_automatic1111_api_auth()},
|
||||
)
|
||||
options = r.json()
|
||||
return {"model": options["sd_model_checkpoint"]}
|
||||
except Exception as e:
|
||||
app.state.config.ENABLED = False
|
||||
raise HTTPException(status_code=400, detail=ERROR_MESSAGES.DEFAULT(e))
|
||||
|
||||
|
||||
class UpdateModelForm(BaseModel):
|
||||
model: str
|
||||
|
||||
|
||||
def set_model_handler(model: str):
|
||||
if app.state.config.ENGINE in ["openai", "comfyui"]:
|
||||
app.state.config.MODEL = model
|
||||
return app.state.config.MODEL
|
||||
else:
|
||||
api_auth = get_automatic1111_api_auth()
|
||||
r = requests.get(
|
||||
url=f"{app.state.config.AUTOMATIC1111_BASE_URL}/sdapi/v1/options",
|
||||
headers={"authorization": api_auth},
|
||||
)
|
||||
options = r.json()
|
||||
|
||||
if model != options["sd_model_checkpoint"]:
|
||||
options["sd_model_checkpoint"] = model
|
||||
r = requests.post(
|
||||
url=f"{app.state.config.AUTOMATIC1111_BASE_URL}/sdapi/v1/options",
|
||||
json=options,
|
||||
headers={"authorization": api_auth},
|
||||
)
|
||||
|
||||
return options
|
||||
|
||||
|
||||
@app.post("/models/default/update")
|
||||
def update_default_model(
|
||||
form_data: UpdateModelForm,
|
||||
user=Depends(get_verified_user),
|
||||
):
|
||||
return set_model_handler(form_data.model)
|
||||
|
||||
|
||||
class GenerateImageForm(BaseModel):
|
||||
model: Optional[str] = None
|
||||
prompt: str
|
||||
n: int = 1
|
||||
size: Optional[str] = None
|
||||
n: int = 1
|
||||
negative_prompt: Optional[str] = None
|
||||
|
||||
|
||||
@@ -405,7 +420,6 @@ def save_url_image(url):
|
||||
r = requests.get(url)
|
||||
r.raise_for_status()
|
||||
if r.headers["content-type"].split("/")[0] == "image":
|
||||
|
||||
mime_type = r.headers["content-type"]
|
||||
image_format = mimetypes.guess_extension(mime_type)
|
||||
|
||||
@@ -420,7 +434,7 @@ def save_url_image(url):
|
||||
image_file.write(chunk)
|
||||
return image_filename
|
||||
else:
|
||||
log.error(f"Url does not point to an image.")
|
||||
log.error("Url does not point to an image.")
|
||||
return None
|
||||
|
||||
except Exception as e:
|
||||
@@ -438,7 +452,6 @@ async def image_generations(
|
||||
r = None
|
||||
try:
|
||||
if app.state.config.ENGINE == "openai":
|
||||
|
||||
headers = {}
|
||||
headers["Authorization"] = f"Bearer {app.state.config.OPENAI_API_KEY}"
|
||||
headers["Content-Type"] = "application/json"
|
||||
@@ -457,7 +470,9 @@ async def image_generations(
|
||||
"response_format": "b64_json",
|
||||
}
|
||||
|
||||
r = requests.post(
|
||||
# Use asyncio.to_thread for the requests.post call
|
||||
r = await asyncio.to_thread(
|
||||
requests.post,
|
||||
url=f"{app.state.config.OPENAI_API_BASE_URL}/images/generations",
|
||||
json=data,
|
||||
headers=headers,
|
||||
@@ -479,7 +494,6 @@ async def image_generations(
|
||||
return images
|
||||
|
||||
elif app.state.config.ENGINE == "comfyui":
|
||||
|
||||
data = {
|
||||
"prompt": form_data.prompt,
|
||||
"width": width,
|
||||
@@ -493,32 +507,20 @@ async def image_generations(
|
||||
if form_data.negative_prompt is not None:
|
||||
data["negative_prompt"] = form_data.negative_prompt
|
||||
|
||||
if app.state.config.COMFYUI_CFG_SCALE:
|
||||
data["cfg_scale"] = app.state.config.COMFYUI_CFG_SCALE
|
||||
|
||||
if app.state.config.COMFYUI_SAMPLER is not None:
|
||||
data["sampler"] = app.state.config.COMFYUI_SAMPLER
|
||||
|
||||
if app.state.config.COMFYUI_SCHEDULER is not None:
|
||||
data["scheduler"] = app.state.config.COMFYUI_SCHEDULER
|
||||
|
||||
if app.state.config.COMFYUI_SD3 is not None:
|
||||
data["sd3"] = app.state.config.COMFYUI_SD3
|
||||
|
||||
if app.state.config.COMFYUI_FLUX is not None:
|
||||
data["flux"] = app.state.config.COMFYUI_FLUX
|
||||
|
||||
if app.state.config.COMFYUI_FLUX_WEIGHT_DTYPE is not None:
|
||||
data["flux_weight_dtype"] = app.state.config.COMFYUI_FLUX_WEIGHT_DTYPE
|
||||
|
||||
if app.state.config.COMFYUI_FLUX_FP8_CLIP is not None:
|
||||
data["flux_fp8_clip"] = app.state.config.COMFYUI_FLUX_FP8_CLIP
|
||||
|
||||
data = ImageGenerationPayload(**data)
|
||||
|
||||
form_data = ComfyUIGenerateImageForm(
|
||||
**{
|
||||
"workflow": ComfyUIWorkflow(
|
||||
**{
|
||||
"workflow": app.state.config.COMFYUI_WORKFLOW,
|
||||
"nodes": app.state.config.COMFYUI_WORKFLOW_NODES,
|
||||
}
|
||||
),
|
||||
**data,
|
||||
}
|
||||
)
|
||||
res = await comfyui_generate_image(
|
||||
app.state.config.MODEL,
|
||||
data,
|
||||
form_data,
|
||||
user.id,
|
||||
app.state.config.COMFYUI_BASE_URL,
|
||||
)
|
||||
@@ -532,13 +534,15 @@ async def image_generations(
|
||||
file_body_path = IMAGE_CACHE_DIR.joinpath(f"{image_filename}.json")
|
||||
|
||||
with open(file_body_path, "w") as f:
|
||||
json.dump(data.model_dump(exclude_none=True), f)
|
||||
json.dump(form_data.model_dump(exclude_none=True), f)
|
||||
|
||||
log.debug(f"images: {images}")
|
||||
return images
|
||||
else:
|
||||
elif (
|
||||
app.state.config.ENGINE == "automatic1111" or app.state.config.ENGINE == ""
|
||||
):
|
||||
if form_data.model:
|
||||
set_model_handler(form_data.model)
|
||||
set_image_model(form_data.model)
|
||||
|
||||
data = {
|
||||
"prompt": form_data.prompt,
|
||||
@@ -553,14 +557,24 @@ async def image_generations(
|
||||
if form_data.negative_prompt is not None:
|
||||
data["negative_prompt"] = form_data.negative_prompt
|
||||
|
||||
r = requests.post(
|
||||
if app.state.config.AUTOMATIC1111_CFG_SCALE:
|
||||
data["cfg_scale"] = app.state.config.AUTOMATIC1111_CFG_SCALE
|
||||
|
||||
if app.state.config.AUTOMATIC1111_SAMPLER:
|
||||
data["sampler_name"] = app.state.config.AUTOMATIC1111_SAMPLER
|
||||
|
||||
if app.state.config.AUTOMATIC1111_SCHEDULER:
|
||||
data["scheduler"] = app.state.config.AUTOMATIC1111_SCHEDULER
|
||||
|
||||
# Use asyncio.to_thread for the requests.post call
|
||||
r = await asyncio.to_thread(
|
||||
requests.post,
|
||||
url=f"{app.state.config.AUTOMATIC1111_BASE_URL}/sdapi/v1/txt2img",
|
||||
json=data,
|
||||
headers={"authorization": get_automatic1111_api_auth()},
|
||||
)
|
||||
|
||||
res = r.json()
|
||||
|
||||
log.debug(f"res: {res}")
|
||||
|
||||
images = []
|
||||
@@ -574,10 +588,8 @@ async def image_generations(
|
||||
json.dump({**data, "info": res["info"]}, f)
|
||||
|
||||
return images
|
||||
|
||||
except Exception as e:
|
||||
error = e
|
||||
|
||||
if r != None:
|
||||
data = r.json()
|
||||
if "error" in data:
|
||||
186
backend/open_webui/apps/images/utils/comfyui.py
Normal file
186
backend/open_webui/apps/images/utils/comfyui.py
Normal file
@@ -0,0 +1,186 @@
|
||||
import asyncio
|
||||
import json
|
||||
import logging
|
||||
import random
|
||||
import urllib.parse
|
||||
import urllib.request
|
||||
from typing import Optional
|
||||
|
||||
import websocket # NOTE: websocket-client (https://github.com/websocket-client/websocket-client)
|
||||
from open_webui.env import SRC_LOG_LEVELS
|
||||
from pydantic import BaseModel
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
log.setLevel(SRC_LOG_LEVELS["COMFYUI"])
|
||||
|
||||
default_headers = {"User-Agent": "Mozilla/5.0"}
|
||||
|
||||
|
||||
def queue_prompt(prompt, client_id, base_url):
|
||||
log.info("queue_prompt")
|
||||
p = {"prompt": prompt, "client_id": client_id}
|
||||
data = json.dumps(p).encode("utf-8")
|
||||
log.debug(f"queue_prompt data: {data}")
|
||||
try:
|
||||
req = urllib.request.Request(
|
||||
f"{base_url}/prompt", data=data, headers=default_headers
|
||||
)
|
||||
response = urllib.request.urlopen(req).read()
|
||||
return json.loads(response)
|
||||
except Exception as e:
|
||||
log.exception(f"Error while queuing prompt: {e}")
|
||||
raise e
|
||||
|
||||
|
||||
def get_image(filename, subfolder, folder_type, base_url):
|
||||
log.info("get_image")
|
||||
data = {"filename": filename, "subfolder": subfolder, "type": folder_type}
|
||||
url_values = urllib.parse.urlencode(data)
|
||||
req = urllib.request.Request(
|
||||
f"{base_url}/view?{url_values}", headers=default_headers
|
||||
)
|
||||
with urllib.request.urlopen(req) as response:
|
||||
return response.read()
|
||||
|
||||
|
||||
def get_image_url(filename, subfolder, folder_type, base_url):
|
||||
log.info("get_image")
|
||||
data = {"filename": filename, "subfolder": subfolder, "type": folder_type}
|
||||
url_values = urllib.parse.urlencode(data)
|
||||
return f"{base_url}/view?{url_values}"
|
||||
|
||||
|
||||
def get_history(prompt_id, base_url):
|
||||
log.info("get_history")
|
||||
|
||||
req = urllib.request.Request(
|
||||
f"{base_url}/history/{prompt_id}", headers=default_headers
|
||||
)
|
||||
with urllib.request.urlopen(req) as response:
|
||||
return json.loads(response.read())
|
||||
|
||||
|
||||
def get_images(ws, prompt, client_id, base_url):
|
||||
prompt_id = queue_prompt(prompt, client_id, base_url)["prompt_id"]
|
||||
output_images = []
|
||||
while True:
|
||||
out = ws.recv()
|
||||
if isinstance(out, str):
|
||||
message = json.loads(out)
|
||||
if message["type"] == "executing":
|
||||
data = message["data"]
|
||||
if data["node"] is None and data["prompt_id"] == prompt_id:
|
||||
break # Execution is done
|
||||
else:
|
||||
continue # previews are binary data
|
||||
|
||||
history = get_history(prompt_id, base_url)[prompt_id]
|
||||
for o in history["outputs"]:
|
||||
for node_id in history["outputs"]:
|
||||
node_output = history["outputs"][node_id]
|
||||
if "images" in node_output:
|
||||
for image in node_output["images"]:
|
||||
url = get_image_url(
|
||||
image["filename"], image["subfolder"], image["type"], base_url
|
||||
)
|
||||
output_images.append({"url": url})
|
||||
return {"data": output_images}
|
||||
|
||||
|
||||
class ComfyUINodeInput(BaseModel):
|
||||
type: Optional[str] = None
|
||||
node_ids: list[str] = []
|
||||
key: Optional[str] = "text"
|
||||
value: Optional[str] = None
|
||||
|
||||
|
||||
class ComfyUIWorkflow(BaseModel):
|
||||
workflow: str
|
||||
nodes: list[ComfyUINodeInput]
|
||||
|
||||
|
||||
class ComfyUIGenerateImageForm(BaseModel):
|
||||
workflow: ComfyUIWorkflow
|
||||
|
||||
prompt: str
|
||||
negative_prompt: Optional[str] = None
|
||||
width: int
|
||||
height: int
|
||||
n: int = 1
|
||||
|
||||
steps: Optional[int] = None
|
||||
seed: Optional[int] = None
|
||||
|
||||
|
||||
async def comfyui_generate_image(
|
||||
model: str, payload: ComfyUIGenerateImageForm, client_id, base_url
|
||||
):
|
||||
ws_url = base_url.replace("http://", "ws://").replace("https://", "wss://")
|
||||
workflow = json.loads(payload.workflow.workflow)
|
||||
|
||||
for node in payload.workflow.nodes:
|
||||
if node.type:
|
||||
if node.type == "model":
|
||||
for node_id in node.node_ids:
|
||||
workflow[node_id]["inputs"][node.key] = model
|
||||
elif node.type == "prompt":
|
||||
for node_id in node.node_ids:
|
||||
workflow[node_id]["inputs"][
|
||||
node.key if node.key else "text"
|
||||
] = payload.prompt
|
||||
elif node.type == "negative_prompt":
|
||||
for node_id in node.node_ids:
|
||||
workflow[node_id]["inputs"][
|
||||
node.key if node.key else "text"
|
||||
] = payload.negative_prompt
|
||||
elif node.type == "width":
|
||||
for node_id in node.node_ids:
|
||||
workflow[node_id]["inputs"][
|
||||
node.key if node.key else "width"
|
||||
] = payload.width
|
||||
elif node.type == "height":
|
||||
for node_id in node.node_ids:
|
||||
workflow[node_id]["inputs"][
|
||||
node.key if node.key else "height"
|
||||
] = payload.height
|
||||
elif node.type == "n":
|
||||
for node_id in node.node_ids:
|
||||
workflow[node_id]["inputs"][
|
||||
node.key if node.key else "batch_size"
|
||||
] = payload.n
|
||||
elif node.type == "steps":
|
||||
for node_id in node.node_ids:
|
||||
workflow[node_id]["inputs"][
|
||||
node.key if node.key else "steps"
|
||||
] = payload.steps
|
||||
elif node.type == "seed":
|
||||
seed = (
|
||||
payload.seed
|
||||
if payload.seed
|
||||
else random.randint(0, 18446744073709551614)
|
||||
)
|
||||
for node_id in node.node_ids:
|
||||
workflow[node_id]["inputs"][node.key] = seed
|
||||
else:
|
||||
for node_id in node.node_ids:
|
||||
workflow[node_id]["inputs"][node.key] = node.value
|
||||
|
||||
try:
|
||||
ws = websocket.WebSocket()
|
||||
ws.connect(f"{ws_url}/ws?clientId={client_id}")
|
||||
log.info("WebSocket connection established.")
|
||||
except Exception as e:
|
||||
log.exception(f"Failed to connect to WebSocket server: {e}")
|
||||
return None
|
||||
|
||||
try:
|
||||
log.info("Sending workflow to WebSocket server.")
|
||||
log.info(f"Workflow: {workflow}")
|
||||
images = await asyncio.to_thread(get_images, ws, workflow, client_id, base_url)
|
||||
except Exception as e:
|
||||
log.exception(f"Error while receiving images: {e}")
|
||||
images = None
|
||||
|
||||
ws.close()
|
||||
|
||||
return images
|
||||
@@ -1,53 +1,46 @@
|
||||
from fastapi import (
|
||||
FastAPI,
|
||||
Request,
|
||||
HTTPException,
|
||||
Depends,
|
||||
UploadFile,
|
||||
File,
|
||||
)
|
||||
from fastapi.middleware.cors import CORSMiddleware
|
||||
from fastapi.responses import StreamingResponse
|
||||
|
||||
from pydantic import BaseModel, ConfigDict
|
||||
|
||||
import os
|
||||
import re
|
||||
import random
|
||||
import requests
|
||||
import json
|
||||
import aiohttp
|
||||
import asyncio
|
||||
import json
|
||||
import logging
|
||||
import os
|
||||
import random
|
||||
import re
|
||||
import time
|
||||
from urllib.parse import urlparse
|
||||
from typing import Optional, Union
|
||||
from urllib.parse import urlparse
|
||||
|
||||
from starlette.background import BackgroundTask
|
||||
|
||||
from apps.webui.models.models import Models
|
||||
from constants import ERROR_MESSAGES
|
||||
from utils.utils import (
|
||||
get_verified_user,
|
||||
get_admin_user,
|
||||
)
|
||||
|
||||
from config import (
|
||||
SRC_LOG_LEVELS,
|
||||
OLLAMA_BASE_URLS,
|
||||
ENABLE_OLLAMA_API,
|
||||
AIOHTTP_CLIENT_TIMEOUT,
|
||||
import aiohttp
|
||||
import requests
|
||||
from open_webui.apps.webui.models.models import Models
|
||||
from open_webui.config import (
|
||||
CORS_ALLOW_ORIGIN,
|
||||
ENABLE_MODEL_FILTER,
|
||||
ENABLE_OLLAMA_API,
|
||||
MODEL_FILTER_LIST,
|
||||
OLLAMA_BASE_URLS,
|
||||
UPLOAD_DIR,
|
||||
AppConfig,
|
||||
)
|
||||
from utils.misc import (
|
||||
from open_webui.env import AIOHTTP_CLIENT_TIMEOUT
|
||||
|
||||
|
||||
from open_webui.constants import ERROR_MESSAGES
|
||||
from open_webui.env import SRC_LOG_LEVELS
|
||||
from fastapi import Depends, FastAPI, File, HTTPException, Request, UploadFile
|
||||
from fastapi.middleware.cors import CORSMiddleware
|
||||
from fastapi.responses import StreamingResponse
|
||||
from pydantic import BaseModel, ConfigDict
|
||||
from starlette.background import BackgroundTask
|
||||
|
||||
|
||||
from open_webui.utils.misc import (
|
||||
calculate_sha256,
|
||||
)
|
||||
from open_webui.utils.payload import (
|
||||
apply_model_params_to_body_ollama,
|
||||
apply_model_params_to_body_openai,
|
||||
apply_model_system_prompt_to_body,
|
||||
)
|
||||
from open_webui.utils.utils import get_admin_user, get_verified_user
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
log.setLevel(SRC_LOG_LEVELS["OLLAMA"])
|
||||
@@ -55,7 +48,7 @@ log.setLevel(SRC_LOG_LEVELS["OLLAMA"])
|
||||
app = FastAPI()
|
||||
app.add_middleware(
|
||||
CORSMiddleware,
|
||||
allow_origins=["*"],
|
||||
allow_origins=CORS_ALLOW_ORIGIN,
|
||||
allow_credentials=True,
|
||||
allow_methods=["*"],
|
||||
allow_headers=["*"],
|
||||
@@ -126,7 +119,7 @@ async def update_ollama_api_url(form_data: UrlUpdateForm, user=Depends(get_admin
|
||||
|
||||
|
||||
async def fetch_url(url):
|
||||
timeout = aiohttp.ClientTimeout(total=5)
|
||||
timeout = aiohttp.ClientTimeout(total=3)
|
||||
try:
|
||||
async with aiohttp.ClientSession(timeout=timeout, trust_env=True) as session:
|
||||
async with session.get(url) as response:
|
||||
@@ -147,20 +140,29 @@ async def cleanup_response(
|
||||
await session.close()
|
||||
|
||||
|
||||
async def post_streaming_url(url: str, payload: str, stream: bool = True):
|
||||
async def post_streaming_url(
|
||||
url: str, payload: Union[str, bytes], stream: bool = True, content_type=None
|
||||
):
|
||||
r = None
|
||||
try:
|
||||
session = aiohttp.ClientSession(
|
||||
trust_env=True, timeout=aiohttp.ClientTimeout(total=AIOHTTP_CLIENT_TIMEOUT)
|
||||
)
|
||||
r = await session.post(url, data=payload)
|
||||
r = await session.post(
|
||||
url,
|
||||
data=payload,
|
||||
headers={"Content-Type": "application/json"},
|
||||
)
|
||||
r.raise_for_status()
|
||||
|
||||
if stream:
|
||||
headers = dict(r.headers)
|
||||
if content_type:
|
||||
headers["Content-Type"] = content_type
|
||||
return StreamingResponse(
|
||||
r.content,
|
||||
status_code=r.status,
|
||||
headers=dict(r.headers),
|
||||
headers=headers,
|
||||
background=BackgroundTask(
|
||||
cleanup_response, response=r, session=session
|
||||
),
|
||||
@@ -422,6 +424,7 @@ async def copy_model(
|
||||
r = requests.request(
|
||||
method="POST",
|
||||
url=f"{url}/api/copy",
|
||||
headers={"Content-Type": "application/json"},
|
||||
data=form_data.model_dump_json(exclude_none=True).encode(),
|
||||
)
|
||||
|
||||
@@ -470,6 +473,7 @@ async def delete_model(
|
||||
r = requests.request(
|
||||
method="DELETE",
|
||||
url=f"{url}/api/delete",
|
||||
headers={"Content-Type": "application/json"},
|
||||
data=form_data.model_dump_json(exclude_none=True).encode(),
|
||||
)
|
||||
try:
|
||||
@@ -510,6 +514,7 @@ async def show_model_info(form_data: ModelNameForm, user=Depends(get_verified_us
|
||||
r = requests.request(
|
||||
method="POST",
|
||||
url=f"{url}/api/show",
|
||||
headers={"Content-Type": "application/json"},
|
||||
data=form_data.model_dump_json(exclude_none=True).encode(),
|
||||
)
|
||||
try:
|
||||
@@ -540,6 +545,24 @@ class GenerateEmbeddingsForm(BaseModel):
|
||||
keep_alive: Optional[Union[int, str]] = None
|
||||
|
||||
|
||||
class GenerateEmbedForm(BaseModel):
|
||||
model: str
|
||||
input: list[str] | str
|
||||
truncate: Optional[bool] = None
|
||||
options: Optional[dict] = None
|
||||
keep_alive: Optional[Union[int, str]] = None
|
||||
|
||||
|
||||
@app.post("/api/embed")
|
||||
@app.post("/api/embed/{url_idx}")
|
||||
async def generate_embeddings(
|
||||
form_data: GenerateEmbedForm,
|
||||
url_idx: Optional[int] = None,
|
||||
user=Depends(get_verified_user),
|
||||
):
|
||||
return generate_ollama_batch_embeddings(form_data, url_idx)
|
||||
|
||||
|
||||
@app.post("/api/embeddings")
|
||||
@app.post("/api/embeddings/{url_idx}")
|
||||
async def generate_embeddings(
|
||||
@@ -547,47 +570,7 @@ async def generate_embeddings(
|
||||
url_idx: Optional[int] = None,
|
||||
user=Depends(get_verified_user),
|
||||
):
|
||||
if url_idx is None:
|
||||
model = form_data.model
|
||||
|
||||
if ":" not in model:
|
||||
model = f"{model}:latest"
|
||||
|
||||
if model in app.state.MODELS:
|
||||
url_idx = random.choice(app.state.MODELS[model]["urls"])
|
||||
else:
|
||||
raise HTTPException(
|
||||
status_code=400,
|
||||
detail=ERROR_MESSAGES.MODEL_NOT_FOUND(form_data.model),
|
||||
)
|
||||
|
||||
url = app.state.config.OLLAMA_BASE_URLS[url_idx]
|
||||
log.info(f"url: {url}")
|
||||
|
||||
r = requests.request(
|
||||
method="POST",
|
||||
url=f"{url}/api/embeddings",
|
||||
data=form_data.model_dump_json(exclude_none=True).encode(),
|
||||
)
|
||||
try:
|
||||
r.raise_for_status()
|
||||
|
||||
return r.json()
|
||||
except Exception as e:
|
||||
log.exception(e)
|
||||
error_detail = "Open WebUI: Server Connection Error"
|
||||
if r is not None:
|
||||
try:
|
||||
res = r.json()
|
||||
if "error" in res:
|
||||
error_detail = f"Ollama: {res['error']}"
|
||||
except Exception:
|
||||
error_detail = f"Ollama: {e}"
|
||||
|
||||
raise HTTPException(
|
||||
status_code=r.status_code if r else 500,
|
||||
detail=error_detail,
|
||||
)
|
||||
return generate_ollama_embeddings(form_data=form_data, url_idx=url_idx)
|
||||
|
||||
|
||||
def generate_ollama_embeddings(
|
||||
@@ -616,6 +599,7 @@ def generate_ollama_embeddings(
|
||||
r = requests.request(
|
||||
method="POST",
|
||||
url=f"{url}/api/embeddings",
|
||||
headers={"Content-Type": "application/json"},
|
||||
data=form_data.model_dump_json(exclude_none=True).encode(),
|
||||
)
|
||||
try:
|
||||
@@ -626,7 +610,64 @@ def generate_ollama_embeddings(
|
||||
log.info(f"generate_ollama_embeddings {data}")
|
||||
|
||||
if "embedding" in data:
|
||||
return data["embedding"]
|
||||
return data
|
||||
else:
|
||||
raise Exception("Something went wrong :/")
|
||||
except Exception as e:
|
||||
log.exception(e)
|
||||
error_detail = "Open WebUI: Server Connection Error"
|
||||
if r is not None:
|
||||
try:
|
||||
res = r.json()
|
||||
if "error" in res:
|
||||
error_detail = f"Ollama: {res['error']}"
|
||||
except Exception:
|
||||
error_detail = f"Ollama: {e}"
|
||||
|
||||
raise HTTPException(
|
||||
status_code=r.status_code if r else 500,
|
||||
detail=error_detail,
|
||||
)
|
||||
|
||||
|
||||
def generate_ollama_batch_embeddings(
|
||||
form_data: GenerateEmbedForm,
|
||||
url_idx: Optional[int] = None,
|
||||
):
|
||||
log.info(f"generate_ollama_batch_embeddings {form_data}")
|
||||
|
||||
if url_idx is None:
|
||||
model = form_data.model
|
||||
|
||||
if ":" not in model:
|
||||
model = f"{model}:latest"
|
||||
|
||||
if model in app.state.MODELS:
|
||||
url_idx = random.choice(app.state.MODELS[model]["urls"])
|
||||
else:
|
||||
raise HTTPException(
|
||||
status_code=400,
|
||||
detail=ERROR_MESSAGES.MODEL_NOT_FOUND(form_data.model),
|
||||
)
|
||||
|
||||
url = app.state.config.OLLAMA_BASE_URLS[url_idx]
|
||||
log.info(f"url: {url}")
|
||||
|
||||
r = requests.request(
|
||||
method="POST",
|
||||
url=f"{url}/api/embed",
|
||||
headers={"Content-Type": "application/json"},
|
||||
data=form_data.model_dump_json(exclude_none=True).encode(),
|
||||
)
|
||||
try:
|
||||
r.raise_for_status()
|
||||
|
||||
data = r.json()
|
||||
|
||||
log.info(f"generate_ollama_batch_embeddings {data}")
|
||||
|
||||
if "embeddings" in data:
|
||||
return data
|
||||
else:
|
||||
raise Exception("Something went wrong :/")
|
||||
except Exception as e:
|
||||
@@ -721,15 +762,20 @@ async def generate_chat_completion(
|
||||
url_idx: Optional[int] = None,
|
||||
user=Depends(get_verified_user),
|
||||
):
|
||||
log.debug(f"{form_data.model_dump_json(exclude_none=True).encode()}=")
|
||||
|
||||
payload = {
|
||||
**form_data.model_dump(exclude_none=True, exclude=["metadata"]),
|
||||
}
|
||||
payload = {**form_data.model_dump(exclude_none=True)}
|
||||
log.debug(f"generate_chat_completion() - 1.payload = {payload}")
|
||||
if "metadata" in payload:
|
||||
del payload["metadata"]
|
||||
|
||||
model_id = form_data.model
|
||||
|
||||
if app.state.config.ENABLE_MODEL_FILTER:
|
||||
if user.role == "user" and model_id not in app.state.config.MODEL_FILTER_LIST:
|
||||
raise HTTPException(
|
||||
status_code=403,
|
||||
detail="Model not found",
|
||||
)
|
||||
|
||||
model_info = Models.get_model_by_id(model_id)
|
||||
|
||||
if model_info:
|
||||
@@ -752,9 +798,14 @@ async def generate_chat_completion(
|
||||
|
||||
url = get_ollama_url(url_idx, payload["model"])
|
||||
log.info(f"url: {url}")
|
||||
log.debug(payload)
|
||||
log.debug(f"generate_chat_completion() - 2.payload = {payload}")
|
||||
|
||||
return await post_streaming_url(f"{url}/api/chat", json.dumps(payload))
|
||||
return await post_streaming_url(
|
||||
f"{url}/api/chat",
|
||||
json.dumps(payload),
|
||||
stream=form_data.stream,
|
||||
content_type="application/x-ndjson",
|
||||
)
|
||||
|
||||
|
||||
# TODO: we should update this part once Ollama supports other types
|
||||
@@ -790,6 +841,14 @@ async def generate_openai_chat_completion(
|
||||
del payload["metadata"]
|
||||
|
||||
model_id = completion_form.model
|
||||
|
||||
if app.state.config.ENABLE_MODEL_FILTER:
|
||||
if user.role == "user" and model_id not in app.state.config.MODEL_FILTER_LIST:
|
||||
raise HTTPException(
|
||||
status_code=403,
|
||||
detail="Model not found",
|
||||
)
|
||||
|
||||
model_info = Models.get_model_by_id(model_id)
|
||||
|
||||
if model_info:
|
||||
@@ -1,43 +1,42 @@
|
||||
from fastapi import FastAPI, Request, HTTPException, Depends
|
||||
from fastapi.middleware.cors import CORSMiddleware
|
||||
from fastapi.responses import StreamingResponse, FileResponse
|
||||
|
||||
import requests
|
||||
import aiohttp
|
||||
import asyncio
|
||||
import hashlib
|
||||
import json
|
||||
import logging
|
||||
from pathlib import Path
|
||||
from typing import Literal, Optional, overload
|
||||
|
||||
import aiohttp
|
||||
import requests
|
||||
from open_webui.apps.webui.models.models import Models
|
||||
from open_webui.config import (
|
||||
CACHE_DIR,
|
||||
CORS_ALLOW_ORIGIN,
|
||||
ENABLE_MODEL_FILTER,
|
||||
ENABLE_OPENAI_API,
|
||||
MODEL_FILTER_LIST,
|
||||
OPENAI_API_BASE_URLS,
|
||||
OPENAI_API_KEYS,
|
||||
AppConfig,
|
||||
)
|
||||
from open_webui.env import (
|
||||
AIOHTTP_CLIENT_TIMEOUT,
|
||||
AIOHTTP_CLIENT_TIMEOUT_OPENAI_MODEL_LIST,
|
||||
)
|
||||
|
||||
from open_webui.constants import ERROR_MESSAGES
|
||||
from open_webui.env import SRC_LOG_LEVELS
|
||||
from fastapi import Depends, FastAPI, HTTPException, Request
|
||||
from fastapi.middleware.cors import CORSMiddleware
|
||||
from fastapi.responses import FileResponse, StreamingResponse
|
||||
from pydantic import BaseModel
|
||||
from starlette.background import BackgroundTask
|
||||
|
||||
from apps.webui.models.models import Models
|
||||
from constants import ERROR_MESSAGES
|
||||
from utils.utils import (
|
||||
get_verified_user,
|
||||
get_admin_user,
|
||||
)
|
||||
from utils.misc import (
|
||||
from open_webui.utils.payload import (
|
||||
apply_model_params_to_body_openai,
|
||||
apply_model_system_prompt_to_body,
|
||||
)
|
||||
|
||||
from config import (
|
||||
SRC_LOG_LEVELS,
|
||||
ENABLE_OPENAI_API,
|
||||
AIOHTTP_CLIENT_TIMEOUT,
|
||||
OPENAI_API_BASE_URLS,
|
||||
OPENAI_API_KEYS,
|
||||
CACHE_DIR,
|
||||
ENABLE_MODEL_FILTER,
|
||||
MODEL_FILTER_LIST,
|
||||
AppConfig,
|
||||
)
|
||||
from typing import Optional, Literal, overload
|
||||
|
||||
|
||||
import hashlib
|
||||
from pathlib import Path
|
||||
from open_webui.utils.utils import get_admin_user, get_verified_user
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
log.setLevel(SRC_LOG_LEVELS["OPENAI"])
|
||||
@@ -45,13 +44,12 @@ log.setLevel(SRC_LOG_LEVELS["OPENAI"])
|
||||
app = FastAPI()
|
||||
app.add_middleware(
|
||||
CORSMiddleware,
|
||||
allow_origins=["*"],
|
||||
allow_origins=CORS_ALLOW_ORIGIN,
|
||||
allow_credentials=True,
|
||||
allow_methods=["*"],
|
||||
allow_headers=["*"],
|
||||
)
|
||||
|
||||
|
||||
app.state.config = AppConfig()
|
||||
|
||||
app.state.config.ENABLE_MODEL_FILTER = ENABLE_MODEL_FILTER
|
||||
@@ -184,7 +182,7 @@ async def speech(request: Request, user=Depends(get_verified_user)):
|
||||
|
||||
|
||||
async def fetch_url(url, key):
|
||||
timeout = aiohttp.ClientTimeout(total=5)
|
||||
timeout = aiohttp.ClientTimeout(total=AIOHTTP_CLIENT_TIMEOUT_OPENAI_MODEL_LIST)
|
||||
try:
|
||||
headers = {"Authorization": f"Bearer {key}"}
|
||||
async with aiohttp.ClientSession(timeout=timeout, trust_env=True) as session:
|
||||
@@ -224,7 +222,17 @@ def merge_models_lists(model_lists):
|
||||
for model in models
|
||||
if "api.openai.com"
|
||||
not in app.state.config.OPENAI_API_BASE_URLS[idx]
|
||||
or "gpt" in model["id"]
|
||||
or not any(
|
||||
name in model["id"]
|
||||
for name in [
|
||||
"babbage",
|
||||
"dall-e",
|
||||
"davinci",
|
||||
"embedding",
|
||||
"tts",
|
||||
"whisper",
|
||||
]
|
||||
)
|
||||
]
|
||||
)
|
||||
|
||||
@@ -232,9 +240,7 @@ def merge_models_lists(model_lists):
|
||||
|
||||
|
||||
def is_openai_api_disabled():
|
||||
api_keys = app.state.config.OPENAI_API_KEYS
|
||||
no_keys = len(api_keys) == 1 and api_keys[0] == ""
|
||||
return no_keys or not app.state.config.ENABLE_OPENAI_API
|
||||
return not app.state.config.ENABLE_OPENAI_API
|
||||
|
||||
|
||||
async def get_all_models_raw() -> list:
|
||||
@@ -327,10 +333,24 @@ async def get_models(url_idx: Optional[int] = None, user=Depends(get_verified_us
|
||||
r.raise_for_status()
|
||||
|
||||
response_data = r.json()
|
||||
|
||||
if "api.openai.com" in url:
|
||||
response_data["data"] = list(
|
||||
filter(lambda model: "gpt" in model["id"], response_data["data"])
|
||||
)
|
||||
# Filter the response data
|
||||
response_data["data"] = [
|
||||
model
|
||||
for model in response_data["data"]
|
||||
if not any(
|
||||
name in model["id"]
|
||||
for name in [
|
||||
"babbage",
|
||||
"dall-e",
|
||||
"davinci",
|
||||
"embedding",
|
||||
"tts",
|
||||
"whisper",
|
||||
]
|
||||
)
|
||||
]
|
||||
|
||||
return response_data
|
||||
except Exception as e:
|
||||
@@ -385,14 +405,32 @@ async def generate_chat_completion(
|
||||
"role": user.role,
|
||||
}
|
||||
|
||||
url = app.state.config.OPENAI_API_BASE_URLS[idx]
|
||||
key = app.state.config.OPENAI_API_KEYS[idx]
|
||||
is_o1 = payload["model"].lower().startswith("o1-")
|
||||
|
||||
# Change max_completion_tokens to max_tokens (Backward compatible)
|
||||
if "api.openai.com" not in url and not is_o1:
|
||||
if "max_completion_tokens" in payload:
|
||||
# Remove "max_completion_tokens" from the payload
|
||||
payload["max_tokens"] = payload["max_completion_tokens"]
|
||||
del payload["max_completion_tokens"]
|
||||
else:
|
||||
if is_o1 and "max_tokens" in payload:
|
||||
payload["max_completion_tokens"] = payload["max_tokens"]
|
||||
del payload["max_tokens"]
|
||||
if "max_tokens" in payload and "max_completion_tokens" in payload:
|
||||
del payload["max_tokens"]
|
||||
|
||||
# Fix: O1 does not support the "system" parameter, Modify "system" to "user"
|
||||
if is_o1 and payload["messages"][0]["role"] == "system":
|
||||
payload["messages"][0]["role"] = "user"
|
||||
|
||||
# Convert the modified body back to JSON
|
||||
payload = json.dumps(payload)
|
||||
|
||||
log.debug(payload)
|
||||
|
||||
url = app.state.config.OPENAI_API_BASE_URLS[idx]
|
||||
key = app.state.config.OPENAI_API_KEYS[idx]
|
||||
|
||||
headers = {}
|
||||
headers["Authorization"] = f"Bearer {key}"
|
||||
headers["Content-Type"] = "application/json"
|
||||
@@ -403,6 +441,7 @@ async def generate_chat_completion(
|
||||
r = None
|
||||
session = None
|
||||
streaming = False
|
||||
response = None
|
||||
|
||||
try:
|
||||
session = aiohttp.ClientSession(
|
||||
@@ -415,8 +454,6 @@ async def generate_chat_completion(
|
||||
headers=headers,
|
||||
)
|
||||
|
||||
r.raise_for_status()
|
||||
|
||||
# Check if response is SSE
|
||||
if "text/event-stream" in r.headers.get("Content-Type", ""):
|
||||
streaming = True
|
||||
@@ -429,19 +466,23 @@ async def generate_chat_completion(
|
||||
),
|
||||
)
|
||||
else:
|
||||
response_data = await r.json()
|
||||
return response_data
|
||||
try:
|
||||
response = await r.json()
|
||||
except Exception as e:
|
||||
log.error(e)
|
||||
response = await r.text()
|
||||
|
||||
r.raise_for_status()
|
||||
return response
|
||||
except Exception as e:
|
||||
log.exception(e)
|
||||
error_detail = "Open WebUI: Server Connection Error"
|
||||
if r is not None:
|
||||
try:
|
||||
res = await r.json()
|
||||
print(res)
|
||||
if "error" in res:
|
||||
error_detail = f"External: {res['error']['message'] if 'message' in res['error'] else res['error']}"
|
||||
except Exception:
|
||||
error_detail = f"External: {e}"
|
||||
if isinstance(response, dict):
|
||||
if "error" in response:
|
||||
error_detail = f"{response['error']['message'] if 'message' in response['error'] else response['error']}"
|
||||
elif isinstance(response, str):
|
||||
error_detail = response
|
||||
|
||||
raise HTTPException(status_code=r.status if r else 500, detail=error_detail)
|
||||
finally:
|
||||
if not streaming and session:
|
||||
190
backend/open_webui/apps/retrieval/loaders/main.py
Normal file
190
backend/open_webui/apps/retrieval/loaders/main.py
Normal file
@@ -0,0 +1,190 @@
|
||||
import requests
|
||||
import logging
|
||||
import ftfy
|
||||
|
||||
from langchain_community.document_loaders import (
|
||||
BSHTMLLoader,
|
||||
CSVLoader,
|
||||
Docx2txtLoader,
|
||||
OutlookMessageLoader,
|
||||
PyPDFLoader,
|
||||
TextLoader,
|
||||
UnstructuredEPubLoader,
|
||||
UnstructuredExcelLoader,
|
||||
UnstructuredMarkdownLoader,
|
||||
UnstructuredPowerPointLoader,
|
||||
UnstructuredRSTLoader,
|
||||
UnstructuredXMLLoader,
|
||||
YoutubeLoader,
|
||||
)
|
||||
from langchain_core.documents import Document
|
||||
from open_webui.env import SRC_LOG_LEVELS
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
log.setLevel(SRC_LOG_LEVELS["RAG"])
|
||||
|
||||
known_source_ext = [
|
||||
"go",
|
||||
"py",
|
||||
"java",
|
||||
"sh",
|
||||
"bat",
|
||||
"ps1",
|
||||
"cmd",
|
||||
"js",
|
||||
"ts",
|
||||
"css",
|
||||
"cpp",
|
||||
"hpp",
|
||||
"h",
|
||||
"c",
|
||||
"cs",
|
||||
"sql",
|
||||
"log",
|
||||
"ini",
|
||||
"pl",
|
||||
"pm",
|
||||
"r",
|
||||
"dart",
|
||||
"dockerfile",
|
||||
"env",
|
||||
"php",
|
||||
"hs",
|
||||
"hsc",
|
||||
"lua",
|
||||
"nginxconf",
|
||||
"conf",
|
||||
"m",
|
||||
"mm",
|
||||
"plsql",
|
||||
"perl",
|
||||
"rb",
|
||||
"rs",
|
||||
"db2",
|
||||
"scala",
|
||||
"bash",
|
||||
"swift",
|
||||
"vue",
|
||||
"svelte",
|
||||
"msg",
|
||||
"ex",
|
||||
"exs",
|
||||
"erl",
|
||||
"tsx",
|
||||
"jsx",
|
||||
"hs",
|
||||
"lhs",
|
||||
]
|
||||
|
||||
|
||||
class TikaLoader:
|
||||
def __init__(self, url, file_path, mime_type=None):
|
||||
self.url = url
|
||||
self.file_path = file_path
|
||||
self.mime_type = mime_type
|
||||
|
||||
def load(self) -> list[Document]:
|
||||
with open(self.file_path, "rb") as f:
|
||||
data = f.read()
|
||||
|
||||
if self.mime_type is not None:
|
||||
headers = {"Content-Type": self.mime_type}
|
||||
else:
|
||||
headers = {}
|
||||
|
||||
endpoint = self.url
|
||||
if not endpoint.endswith("/"):
|
||||
endpoint += "/"
|
||||
endpoint += "tika/text"
|
||||
|
||||
r = requests.put(endpoint, data=data, headers=headers)
|
||||
|
||||
if r.ok:
|
||||
raw_metadata = r.json()
|
||||
text = raw_metadata.get("X-TIKA:content", "<No text content found>")
|
||||
|
||||
if "Content-Type" in raw_metadata:
|
||||
headers["Content-Type"] = raw_metadata["Content-Type"]
|
||||
|
||||
log.info("Tika extracted text: %s", text)
|
||||
|
||||
return [Document(page_content=text, metadata=headers)]
|
||||
else:
|
||||
raise Exception(f"Error calling Tika: {r.reason}")
|
||||
|
||||
|
||||
class Loader:
|
||||
def __init__(self, engine: str = "", **kwargs):
|
||||
self.engine = engine
|
||||
self.kwargs = kwargs
|
||||
|
||||
def load(
|
||||
self, filename: str, file_content_type: str, file_path: str
|
||||
) -> list[Document]:
|
||||
loader = self._get_loader(filename, file_content_type, file_path)
|
||||
docs = loader.load()
|
||||
|
||||
return [
|
||||
Document(
|
||||
page_content=ftfy.fix_text(doc.page_content), metadata=doc.metadata
|
||||
)
|
||||
for doc in docs
|
||||
]
|
||||
|
||||
def _get_loader(self, filename: str, file_content_type: str, file_path: str):
|
||||
file_ext = filename.split(".")[-1].lower()
|
||||
|
||||
if self.engine == "tika" and self.kwargs.get("TIKA_SERVER_URL"):
|
||||
if file_ext in known_source_ext or (
|
||||
file_content_type and file_content_type.find("text/") >= 0
|
||||
):
|
||||
loader = TextLoader(file_path, autodetect_encoding=True)
|
||||
else:
|
||||
loader = TikaLoader(
|
||||
url=self.kwargs.get("TIKA_SERVER_URL"),
|
||||
file_path=file_path,
|
||||
mime_type=file_content_type,
|
||||
)
|
||||
else:
|
||||
if file_ext == "pdf":
|
||||
loader = PyPDFLoader(
|
||||
file_path, extract_images=self.kwargs.get("PDF_EXTRACT_IMAGES")
|
||||
)
|
||||
elif file_ext == "csv":
|
||||
loader = CSVLoader(file_path)
|
||||
elif file_ext == "rst":
|
||||
loader = UnstructuredRSTLoader(file_path, mode="elements")
|
||||
elif file_ext == "xml":
|
||||
loader = UnstructuredXMLLoader(file_path)
|
||||
elif file_ext in ["htm", "html"]:
|
||||
loader = BSHTMLLoader(file_path, open_encoding="unicode_escape")
|
||||
elif file_ext == "md":
|
||||
loader = UnstructuredMarkdownLoader(file_path)
|
||||
elif file_content_type == "application/epub+zip":
|
||||
loader = UnstructuredEPubLoader(file_path)
|
||||
elif (
|
||||
file_content_type
|
||||
== "application/vnd.openxmlformats-officedocument.wordprocessingml.document"
|
||||
or file_ext == "docx"
|
||||
):
|
||||
loader = Docx2txtLoader(file_path)
|
||||
elif file_content_type in [
|
||||
"application/vnd.ms-excel",
|
||||
"application/vnd.openxmlformats-officedocument.spreadsheetml.sheet",
|
||||
] or file_ext in ["xls", "xlsx"]:
|
||||
loader = UnstructuredExcelLoader(file_path)
|
||||
elif file_content_type in [
|
||||
"application/vnd.ms-powerpoint",
|
||||
"application/vnd.openxmlformats-officedocument.presentationml.presentation",
|
||||
] or file_ext in ["ppt", "pptx"]:
|
||||
loader = UnstructuredPowerPointLoader(file_path)
|
||||
elif file_ext == "msg":
|
||||
loader = OutlookMessageLoader(file_path)
|
||||
elif file_ext in known_source_ext or (
|
||||
file_content_type and file_content_type.find("text/") >= 0
|
||||
):
|
||||
loader = TextLoader(file_path, autodetect_encoding=True)
|
||||
else:
|
||||
loader = TextLoader(file_path, autodetect_encoding=True)
|
||||
|
||||
return loader
|
||||
File diff suppressed because it is too large
Load Diff
81
backend/open_webui/apps/retrieval/models/colbert.py
Normal file
81
backend/open_webui/apps/retrieval/models/colbert.py
Normal file
@@ -0,0 +1,81 @@
|
||||
import os
|
||||
import torch
|
||||
import numpy as np
|
||||
from colbert.infra import ColBERTConfig
|
||||
from colbert.modeling.checkpoint import Checkpoint
|
||||
|
||||
|
||||
class ColBERT:
|
||||
def __init__(self, name, **kwargs) -> None:
|
||||
print("ColBERT: Loading model", name)
|
||||
self.device = "cuda" if torch.cuda.is_available() else "cpu"
|
||||
|
||||
DOCKER = kwargs.get("env") == "docker"
|
||||
if DOCKER:
|
||||
# This is a workaround for the issue with the docker container
|
||||
# where the torch extension is not loaded properly
|
||||
# and the following error is thrown:
|
||||
# /root/.cache/torch_extensions/py311_cpu/segmented_maxsim_cpp/segmented_maxsim_cpp.so: cannot open shared object file: No such file or directory
|
||||
|
||||
lock_file = (
|
||||
"/root/.cache/torch_extensions/py311_cpu/segmented_maxsim_cpp/lock"
|
||||
)
|
||||
if os.path.exists(lock_file):
|
||||
os.remove(lock_file)
|
||||
|
||||
self.ckpt = Checkpoint(
|
||||
name,
|
||||
colbert_config=ColBERTConfig(model_name=name),
|
||||
).to(self.device)
|
||||
pass
|
||||
|
||||
def calculate_similarity_scores(self, query_embeddings, document_embeddings):
|
||||
|
||||
query_embeddings = query_embeddings.to(self.device)
|
||||
document_embeddings = document_embeddings.to(self.device)
|
||||
|
||||
# Validate dimensions to ensure compatibility
|
||||
if query_embeddings.dim() != 3:
|
||||
raise ValueError(
|
||||
f"Expected query embeddings to have 3 dimensions, but got {query_embeddings.dim()}."
|
||||
)
|
||||
if document_embeddings.dim() != 3:
|
||||
raise ValueError(
|
||||
f"Expected document embeddings to have 3 dimensions, but got {document_embeddings.dim()}."
|
||||
)
|
||||
if query_embeddings.size(0) not in [1, document_embeddings.size(0)]:
|
||||
raise ValueError(
|
||||
"There should be either one query or queries equal to the number of documents."
|
||||
)
|
||||
|
||||
# Transpose the query embeddings to align for matrix multiplication
|
||||
transposed_query_embeddings = query_embeddings.permute(0, 2, 1)
|
||||
# Compute similarity scores using batch matrix multiplication
|
||||
computed_scores = torch.matmul(document_embeddings, transposed_query_embeddings)
|
||||
# Apply max pooling to extract the highest semantic similarity across each document's sequence
|
||||
maximum_scores = torch.max(computed_scores, dim=1).values
|
||||
|
||||
# Sum up the maximum scores across features to get the overall document relevance scores
|
||||
final_scores = maximum_scores.sum(dim=1)
|
||||
|
||||
normalized_scores = torch.softmax(final_scores, dim=0)
|
||||
|
||||
return normalized_scores.detach().cpu().numpy().astype(np.float32)
|
||||
|
||||
def predict(self, sentences):
|
||||
|
||||
query = sentences[0][0]
|
||||
docs = [i[1] for i in sentences]
|
||||
|
||||
# Embedding the documents
|
||||
embedded_docs = self.ckpt.docFromText(docs, bsize=32)[0]
|
||||
# Embedding the queries
|
||||
embedded_queries = self.ckpt.queryFromText([query], bsize=32)
|
||||
embedded_query = embedded_queries[0]
|
||||
|
||||
# Calculate retrieval scores for the query against all documents
|
||||
scores = self.calculate_similarity_scores(
|
||||
embedded_query.unsqueeze(0), embedded_docs
|
||||
)
|
||||
|
||||
return scores
|
||||
@@ -1,50 +1,85 @@
|
||||
import os
|
||||
import logging
|
||||
import os
|
||||
import uuid
|
||||
from typing import Optional, Union
|
||||
|
||||
import requests
|
||||
|
||||
from typing import Union
|
||||
|
||||
from apps.ollama.main import (
|
||||
generate_ollama_embeddings,
|
||||
GenerateEmbeddingsForm,
|
||||
)
|
||||
|
||||
from huggingface_hub import snapshot_download
|
||||
|
||||
from langchain_core.documents import Document
|
||||
from langchain.retrievers import ContextualCompressionRetriever, EnsembleRetriever
|
||||
from langchain_community.retrievers import BM25Retriever
|
||||
from langchain.retrievers import (
|
||||
ContextualCompressionRetriever,
|
||||
EnsembleRetriever,
|
||||
from langchain_core.documents import Document
|
||||
|
||||
|
||||
from open_webui.apps.ollama.main import (
|
||||
GenerateEmbedForm,
|
||||
generate_ollama_batch_embeddings,
|
||||
)
|
||||
from open_webui.apps.retrieval.vector.connector import VECTOR_DB_CLIENT
|
||||
from open_webui.utils.misc import get_last_user_message
|
||||
|
||||
from typing import Optional
|
||||
from open_webui.env import SRC_LOG_LEVELS
|
||||
from open_webui.config import DEFAULT_RAG_TEMPLATE
|
||||
|
||||
from utils.misc import get_last_user_message, add_or_update_system_message
|
||||
from config import SRC_LOG_LEVELS, CHROMA_CLIENT
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
log.setLevel(SRC_LOG_LEVELS["RAG"])
|
||||
|
||||
|
||||
from typing import Any
|
||||
|
||||
from langchain_core.callbacks import CallbackManagerForRetrieverRun
|
||||
from langchain_core.retrievers import BaseRetriever
|
||||
|
||||
|
||||
class VectorSearchRetriever(BaseRetriever):
|
||||
collection_name: Any
|
||||
embedding_function: Any
|
||||
top_k: int
|
||||
|
||||
def _get_relevant_documents(
|
||||
self,
|
||||
query: str,
|
||||
*,
|
||||
run_manager: CallbackManagerForRetrieverRun,
|
||||
) -> list[Document]:
|
||||
result = VECTOR_DB_CLIENT.search(
|
||||
collection_name=self.collection_name,
|
||||
vectors=[self.embedding_function(query)],
|
||||
limit=self.top_k,
|
||||
)
|
||||
|
||||
ids = result.ids[0]
|
||||
metadatas = result.metadatas[0]
|
||||
documents = result.documents[0]
|
||||
|
||||
results = []
|
||||
for idx in range(len(ids)):
|
||||
results.append(
|
||||
Document(
|
||||
metadata=metadatas[idx],
|
||||
page_content=documents[idx],
|
||||
)
|
||||
)
|
||||
return results
|
||||
|
||||
|
||||
def query_doc(
|
||||
collection_name: str,
|
||||
query: str,
|
||||
embedding_function,
|
||||
query_embedding: list[float],
|
||||
k: int,
|
||||
):
|
||||
try:
|
||||
collection = CHROMA_CLIENT.get_collection(name=collection_name)
|
||||
query_embeddings = embedding_function(query)
|
||||
|
||||
result = collection.query(
|
||||
query_embeddings=[query_embeddings],
|
||||
n_results=k,
|
||||
result = VECTOR_DB_CLIENT.search(
|
||||
collection_name=collection_name,
|
||||
vectors=[query_embedding],
|
||||
limit=k,
|
||||
)
|
||||
|
||||
log.info(f"query_doc:result {result}")
|
||||
return result
|
||||
except Exception as e:
|
||||
print(e)
|
||||
raise e
|
||||
|
||||
|
||||
@@ -55,27 +90,25 @@ def query_doc_with_hybrid_search(
|
||||
k: int,
|
||||
reranking_function,
|
||||
r: float,
|
||||
):
|
||||
) -> dict:
|
||||
try:
|
||||
collection = CHROMA_CLIENT.get_collection(name=collection_name)
|
||||
documents = collection.get() # get all documents
|
||||
result = VECTOR_DB_CLIENT.get(collection_name=collection_name)
|
||||
|
||||
bm25_retriever = BM25Retriever.from_texts(
|
||||
texts=documents.get("documents"),
|
||||
metadatas=documents.get("metadatas"),
|
||||
texts=result.documents[0],
|
||||
metadatas=result.metadatas[0],
|
||||
)
|
||||
bm25_retriever.k = k
|
||||
|
||||
chroma_retriever = ChromaRetriever(
|
||||
collection=collection,
|
||||
vector_search_retriever = VectorSearchRetriever(
|
||||
collection_name=collection_name,
|
||||
embedding_function=embedding_function,
|
||||
top_n=k,
|
||||
top_k=k,
|
||||
)
|
||||
|
||||
ensemble_retriever = EnsembleRetriever(
|
||||
retrievers=[bm25_retriever, chroma_retriever], weights=[0.5, 0.5]
|
||||
retrievers=[bm25_retriever, vector_search_retriever], weights=[0.5, 0.5]
|
||||
)
|
||||
|
||||
compressor = RerankCompressor(
|
||||
embedding_function=embedding_function,
|
||||
top_n=k,
|
||||
@@ -100,7 +133,9 @@ def query_doc_with_hybrid_search(
|
||||
raise e
|
||||
|
||||
|
||||
def merge_and_sort_query_results(query_results, k, reverse=False):
|
||||
def merge_and_sort_query_results(
|
||||
query_results: list[dict], k: int, reverse: bool = False
|
||||
) -> list[dict]:
|
||||
# Initialize lists to store combined data
|
||||
combined_distances = []
|
||||
combined_documents = []
|
||||
@@ -146,19 +181,26 @@ def query_collection(
|
||||
query: str,
|
||||
embedding_function,
|
||||
k: int,
|
||||
):
|
||||
) -> dict:
|
||||
|
||||
results = []
|
||||
query_embedding = embedding_function(query)
|
||||
|
||||
for collection_name in collection_names:
|
||||
try:
|
||||
result = query_doc(
|
||||
collection_name=collection_name,
|
||||
query=query,
|
||||
k=k,
|
||||
embedding_function=embedding_function,
|
||||
)
|
||||
results.append(result)
|
||||
except Exception:
|
||||
if collection_name:
|
||||
try:
|
||||
result = query_doc(
|
||||
collection_name=collection_name,
|
||||
k=k,
|
||||
query_embedding=query_embedding,
|
||||
)
|
||||
if result is not None:
|
||||
results.append(result.model_dump())
|
||||
except Exception as e:
|
||||
log.exception(f"Error when querying the collection: {e}")
|
||||
else:
|
||||
pass
|
||||
|
||||
return merge_and_sort_query_results(results, k=k)
|
||||
|
||||
|
||||
@@ -169,8 +211,9 @@ def query_collection_with_hybrid_search(
|
||||
k: int,
|
||||
reranking_function,
|
||||
r: float,
|
||||
):
|
||||
) -> dict:
|
||||
results = []
|
||||
error = False
|
||||
for collection_name in collection_names:
|
||||
try:
|
||||
result = query_doc_with_hybrid_search(
|
||||
@@ -182,14 +225,55 @@ def query_collection_with_hybrid_search(
|
||||
r=r,
|
||||
)
|
||||
results.append(result)
|
||||
except Exception:
|
||||
pass
|
||||
except Exception as e:
|
||||
log.exception(
|
||||
"Error when querying the collection with " f"hybrid_search: {e}"
|
||||
)
|
||||
error = True
|
||||
|
||||
if error:
|
||||
raise Exception(
|
||||
"Hybrid search failed for all collections. Using Non hybrid search as fallback."
|
||||
)
|
||||
|
||||
return merge_and_sort_query_results(results, k=k, reverse=True)
|
||||
|
||||
|
||||
def rag_template(template: str, context: str, query: str):
|
||||
if template == "":
|
||||
template = DEFAULT_RAG_TEMPLATE
|
||||
|
||||
if "[context]" not in template and "{{CONTEXT}}" not in template:
|
||||
log.debug(
|
||||
"WARNING: The RAG template does not contain the '[context]' or '{{CONTEXT}}' placeholder."
|
||||
)
|
||||
|
||||
if "<context>" in context and "</context>" in context:
|
||||
log.debug(
|
||||
"WARNING: Potential prompt injection attack: the RAG "
|
||||
"context contains '<context>' and '</context>'. This might be "
|
||||
"nothing, or the user might be trying to hack something."
|
||||
)
|
||||
|
||||
query_placeholders = []
|
||||
if "[query]" in context:
|
||||
query_placeholder = "{{QUERY" + str(uuid.uuid4()) + "}}"
|
||||
template = template.replace("[query]", query_placeholder)
|
||||
query_placeholders.append(query_placeholder)
|
||||
|
||||
if "{{QUERY}}" in context:
|
||||
query_placeholder = "{{QUERY" + str(uuid.uuid4()) + "}}"
|
||||
template = template.replace("{{QUERY}}", query_placeholder)
|
||||
query_placeholders.append(query_placeholder)
|
||||
|
||||
template = template.replace("[context]", context)
|
||||
template = template.replace("{{CONTEXT}}", context)
|
||||
template = template.replace("[query]", query)
|
||||
template = template.replace("{{QUERY}}", query)
|
||||
|
||||
for query_placeholder in query_placeholders:
|
||||
template = template.replace(query_placeholder, query)
|
||||
|
||||
return template
|
||||
|
||||
|
||||
@@ -199,39 +283,27 @@ def get_embedding_function(
|
||||
embedding_function,
|
||||
openai_key,
|
||||
openai_url,
|
||||
batch_size,
|
||||
embedding_batch_size,
|
||||
):
|
||||
if embedding_engine == "":
|
||||
return lambda query: embedding_function.encode(query).tolist()
|
||||
elif embedding_engine in ["ollama", "openai"]:
|
||||
if embedding_engine == "ollama":
|
||||
func = lambda query: generate_ollama_embeddings(
|
||||
GenerateEmbeddingsForm(
|
||||
**{
|
||||
"model": embedding_model,
|
||||
"prompt": query,
|
||||
}
|
||||
)
|
||||
)
|
||||
elif embedding_engine == "openai":
|
||||
func = lambda query: generate_openai_embeddings(
|
||||
model=embedding_model,
|
||||
text=query,
|
||||
key=openai_key,
|
||||
url=openai_url,
|
||||
)
|
||||
func = lambda query: generate_embeddings(
|
||||
engine=embedding_engine,
|
||||
model=embedding_model,
|
||||
text=query,
|
||||
key=openai_key if embedding_engine == "openai" else "",
|
||||
url=openai_url if embedding_engine == "openai" else "",
|
||||
)
|
||||
|
||||
def generate_multiple(query, f):
|
||||
def generate_multiple(query, func):
|
||||
if isinstance(query, list):
|
||||
if embedding_engine == "openai":
|
||||
embeddings = []
|
||||
for i in range(0, len(query), batch_size):
|
||||
embeddings.extend(f(query[i : i + batch_size]))
|
||||
return embeddings
|
||||
else:
|
||||
return [f(q) for q in query]
|
||||
embeddings = []
|
||||
for i in range(0, len(query), embedding_batch_size):
|
||||
embeddings.extend(func(query[i : i + embedding_batch_size]))
|
||||
return embeddings
|
||||
else:
|
||||
return f(query)
|
||||
return func(query)
|
||||
|
||||
return lambda query: generate_multiple(query, func)
|
||||
|
||||
@@ -252,71 +324,107 @@ def get_rag_context(
|
||||
relevant_contexts = []
|
||||
|
||||
for file in files:
|
||||
context = None
|
||||
|
||||
collection_names = (
|
||||
file["collection_names"]
|
||||
if file["type"] == "collection"
|
||||
else [file["collection_name"]]
|
||||
)
|
||||
|
||||
collection_names = set(collection_names).difference(extracted_collections)
|
||||
if not collection_names:
|
||||
log.debug(f"skipping {file} as it has already been extracted")
|
||||
continue
|
||||
|
||||
try:
|
||||
if file["type"] == "text":
|
||||
context = file["content"]
|
||||
else:
|
||||
if hybrid_search:
|
||||
context = query_collection_with_hybrid_search(
|
||||
collection_names=collection_names,
|
||||
query=query,
|
||||
embedding_function=embedding_function,
|
||||
k=k,
|
||||
reranking_function=reranking_function,
|
||||
r=r,
|
||||
)
|
||||
else:
|
||||
context = query_collection(
|
||||
collection_names=collection_names,
|
||||
query=query,
|
||||
embedding_function=embedding_function,
|
||||
k=k,
|
||||
)
|
||||
except Exception as e:
|
||||
log.exception(e)
|
||||
if file.get("context") == "full":
|
||||
context = {
|
||||
"documents": [[file.get("file").get("data", {}).get("content")]],
|
||||
"metadatas": [[{"file_id": file.get("id"), "name": file.get("name")}]],
|
||||
}
|
||||
else:
|
||||
context = None
|
||||
|
||||
if context:
|
||||
relevant_contexts.append({**context, "source": file})
|
||||
collection_names = []
|
||||
if file.get("type") == "collection":
|
||||
if file.get("legacy"):
|
||||
collection_names = file.get("collection_names", [])
|
||||
else:
|
||||
collection_names.append(file["id"])
|
||||
elif file.get("collection_name"):
|
||||
collection_names.append(file["collection_name"])
|
||||
elif file.get("id"):
|
||||
if file.get("legacy"):
|
||||
collection_names.append(f"{file['id']}")
|
||||
else:
|
||||
collection_names.append(f"file-{file['id']}")
|
||||
|
||||
extracted_collections.extend(collection_names)
|
||||
collection_names = set(collection_names).difference(extracted_collections)
|
||||
if not collection_names:
|
||||
log.debug(f"skipping {file} as it has already been extracted")
|
||||
continue
|
||||
|
||||
try:
|
||||
context = None
|
||||
if file.get("type") == "text":
|
||||
context = file["content"]
|
||||
else:
|
||||
if hybrid_search:
|
||||
try:
|
||||
context = query_collection_with_hybrid_search(
|
||||
collection_names=collection_names,
|
||||
query=query,
|
||||
embedding_function=embedding_function,
|
||||
k=k,
|
||||
reranking_function=reranking_function,
|
||||
r=r,
|
||||
)
|
||||
except Exception as e:
|
||||
log.debug(
|
||||
"Error when using hybrid search, using"
|
||||
" non hybrid search as fallback."
|
||||
)
|
||||
|
||||
if (not hybrid_search) or (context is None):
|
||||
context = query_collection(
|
||||
collection_names=collection_names,
|
||||
query=query,
|
||||
embedding_function=embedding_function,
|
||||
k=k,
|
||||
)
|
||||
except Exception as e:
|
||||
log.exception(e)
|
||||
|
||||
extracted_collections.extend(collection_names)
|
||||
|
||||
if context:
|
||||
if "data" in file:
|
||||
del file["data"]
|
||||
relevant_contexts.append({**context, "file": file})
|
||||
|
||||
contexts = []
|
||||
citations = []
|
||||
|
||||
for context in relevant_contexts:
|
||||
try:
|
||||
if "documents" in context:
|
||||
file_names = list(
|
||||
set(
|
||||
[
|
||||
metadata["name"]
|
||||
for metadata in context["metadatas"][0]
|
||||
if metadata is not None and "name" in metadata
|
||||
]
|
||||
)
|
||||
)
|
||||
contexts.append(
|
||||
"\n\n".join(
|
||||
((", ".join(file_names) + ":\n\n") if file_names else "")
|
||||
+ "\n\n".join(
|
||||
[text for text in context["documents"][0] if text is not None]
|
||||
)
|
||||
)
|
||||
|
||||
if "metadatas" in context:
|
||||
citations.append(
|
||||
{
|
||||
"source": context["source"],
|
||||
"document": context["documents"][0],
|
||||
"metadata": context["metadatas"][0],
|
||||
}
|
||||
)
|
||||
citation = {
|
||||
"source": context["file"],
|
||||
"document": context["documents"][0],
|
||||
"metadata": context["metadatas"][0],
|
||||
}
|
||||
if "distances" in context and context["distances"]:
|
||||
citation["distances"] = context["distances"][0]
|
||||
citations.append(citation)
|
||||
except Exception as e:
|
||||
log.exception(e)
|
||||
|
||||
print("contexts", contexts)
|
||||
print("citations", citations)
|
||||
|
||||
return contexts, citations
|
||||
|
||||
|
||||
@@ -358,20 +466,6 @@ def get_model_path(model: str, update_model: bool = False):
|
||||
return model
|
||||
|
||||
|
||||
def generate_openai_embeddings(
|
||||
model: str,
|
||||
text: Union[str, list[str]],
|
||||
key: str,
|
||||
url: str = "https://api.openai.com/v1",
|
||||
):
|
||||
if isinstance(text, list):
|
||||
embeddings = generate_openai_batch_embeddings(model, text, key, url)
|
||||
else:
|
||||
embeddings = generate_openai_batch_embeddings(model, [text], key, url)
|
||||
|
||||
return embeddings[0] if isinstance(text, str) else embeddings
|
||||
|
||||
|
||||
def generate_openai_batch_embeddings(
|
||||
model: str, texts: list[str], key: str, url: str = "https://api.openai.com/v1"
|
||||
) -> Optional[list[list[float]]]:
|
||||
@@ -395,52 +489,38 @@ def generate_openai_batch_embeddings(
|
||||
return None
|
||||
|
||||
|
||||
from typing import Any
|
||||
|
||||
from langchain_core.retrievers import BaseRetriever
|
||||
from langchain_core.callbacks import CallbackManagerForRetrieverRun
|
||||
|
||||
|
||||
class ChromaRetriever(BaseRetriever):
|
||||
collection: Any
|
||||
embedding_function: Any
|
||||
top_n: int
|
||||
|
||||
def _get_relevant_documents(
|
||||
self,
|
||||
query: str,
|
||||
*,
|
||||
run_manager: CallbackManagerForRetrieverRun,
|
||||
) -> list[Document]:
|
||||
query_embeddings = self.embedding_function(query)
|
||||
|
||||
results = self.collection.query(
|
||||
query_embeddings=[query_embeddings],
|
||||
n_results=self.top_n,
|
||||
)
|
||||
|
||||
ids = results["ids"][0]
|
||||
metadatas = results["metadatas"][0]
|
||||
documents = results["documents"][0]
|
||||
|
||||
results = []
|
||||
for idx in range(len(ids)):
|
||||
results.append(
|
||||
Document(
|
||||
metadata=metadatas[idx],
|
||||
page_content=documents[idx],
|
||||
)
|
||||
def generate_embeddings(engine: str, model: str, text: Union[str, list[str]], **kwargs):
|
||||
if engine == "ollama":
|
||||
if isinstance(text, list):
|
||||
embeddings = generate_ollama_batch_embeddings(
|
||||
GenerateEmbedForm(**{"model": model, "input": text})
|
||||
)
|
||||
return results
|
||||
else:
|
||||
embeddings = generate_ollama_batch_embeddings(
|
||||
GenerateEmbedForm(**{"model": model, "input": [text]})
|
||||
)
|
||||
return (
|
||||
embeddings["embeddings"][0]
|
||||
if isinstance(text, str)
|
||||
else embeddings["embeddings"]
|
||||
)
|
||||
elif engine == "openai":
|
||||
key = kwargs.get("key", "")
|
||||
url = kwargs.get("url", "https://api.openai.com/v1")
|
||||
|
||||
if isinstance(text, list):
|
||||
embeddings = generate_openai_batch_embeddings(model, text, key, url)
|
||||
else:
|
||||
embeddings = generate_openai_batch_embeddings(model, [text], key, url)
|
||||
|
||||
return embeddings[0] if isinstance(text, str) else embeddings
|
||||
|
||||
|
||||
import operator
|
||||
|
||||
from typing import Optional, Sequence
|
||||
|
||||
from langchain_core.documents import BaseDocumentCompressor, Document
|
||||
from langchain_core.callbacks import Callbacks
|
||||
from langchain_core.pydantic_v1 import Extra
|
||||
from langchain_core.documents import BaseDocumentCompressor, Document
|
||||
|
||||
|
||||
class RerankCompressor(BaseDocumentCompressor):
|
||||
@@ -450,7 +530,7 @@ class RerankCompressor(BaseDocumentCompressor):
|
||||
r_score: float
|
||||
|
||||
class Config:
|
||||
extra = Extra.forbid
|
||||
extra = "forbid"
|
||||
arbitrary_types_allowed = True
|
||||
|
||||
def compress_documents(
|
||||
14
backend/open_webui/apps/retrieval/vector/connector.py
Normal file
14
backend/open_webui/apps/retrieval/vector/connector.py
Normal file
@@ -0,0 +1,14 @@
|
||||
from open_webui.config import VECTOR_DB
|
||||
|
||||
if VECTOR_DB == "milvus":
|
||||
from open_webui.apps.retrieval.vector.dbs.milvus import MilvusClient
|
||||
|
||||
VECTOR_DB_CLIENT = MilvusClient()
|
||||
elif VECTOR_DB == "qdrant":
|
||||
from open_webui.apps.retrieval.vector.dbs.qdrant import QdrantClient
|
||||
|
||||
VECTOR_DB_CLIENT = QdrantClient()
|
||||
else:
|
||||
from open_webui.apps.retrieval.vector.dbs.chroma import ChromaClient
|
||||
|
||||
VECTOR_DB_CLIENT = ChromaClient()
|
||||
161
backend/open_webui/apps/retrieval/vector/dbs/chroma.py
Normal file
161
backend/open_webui/apps/retrieval/vector/dbs/chroma.py
Normal file
@@ -0,0 +1,161 @@
|
||||
import chromadb
|
||||
from chromadb import Settings
|
||||
from chromadb.utils.batch_utils import create_batches
|
||||
|
||||
from typing import Optional
|
||||
|
||||
from open_webui.apps.retrieval.vector.main import VectorItem, SearchResult, GetResult
|
||||
from open_webui.config import (
|
||||
CHROMA_DATA_PATH,
|
||||
CHROMA_HTTP_HOST,
|
||||
CHROMA_HTTP_PORT,
|
||||
CHROMA_HTTP_HEADERS,
|
||||
CHROMA_HTTP_SSL,
|
||||
CHROMA_TENANT,
|
||||
CHROMA_DATABASE,
|
||||
)
|
||||
|
||||
|
||||
class ChromaClient:
|
||||
def __init__(self):
|
||||
if CHROMA_HTTP_HOST != "":
|
||||
self.client = chromadb.HttpClient(
|
||||
host=CHROMA_HTTP_HOST,
|
||||
port=CHROMA_HTTP_PORT,
|
||||
headers=CHROMA_HTTP_HEADERS,
|
||||
ssl=CHROMA_HTTP_SSL,
|
||||
tenant=CHROMA_TENANT,
|
||||
database=CHROMA_DATABASE,
|
||||
settings=Settings(allow_reset=True, anonymized_telemetry=False),
|
||||
)
|
||||
else:
|
||||
self.client = chromadb.PersistentClient(
|
||||
path=CHROMA_DATA_PATH,
|
||||
settings=Settings(allow_reset=True, anonymized_telemetry=False),
|
||||
tenant=CHROMA_TENANT,
|
||||
database=CHROMA_DATABASE,
|
||||
)
|
||||
|
||||
def has_collection(self, collection_name: str) -> bool:
|
||||
# Check if the collection exists based on the collection name.
|
||||
collections = self.client.list_collections()
|
||||
return collection_name in [collection.name for collection in collections]
|
||||
|
||||
def delete_collection(self, collection_name: str):
|
||||
# Delete the collection based on the collection name.
|
||||
return self.client.delete_collection(name=collection_name)
|
||||
|
||||
def search(
|
||||
self, collection_name: str, vectors: list[list[float | int]], limit: int
|
||||
) -> Optional[SearchResult]:
|
||||
# Search for the nearest neighbor items based on the vectors and return 'limit' number of results.
|
||||
try:
|
||||
collection = self.client.get_collection(name=collection_name)
|
||||
if collection:
|
||||
result = collection.query(
|
||||
query_embeddings=vectors,
|
||||
n_results=limit,
|
||||
)
|
||||
|
||||
return SearchResult(
|
||||
**{
|
||||
"ids": result["ids"],
|
||||
"distances": result["distances"],
|
||||
"documents": result["documents"],
|
||||
"metadatas": result["metadatas"],
|
||||
}
|
||||
)
|
||||
return None
|
||||
except Exception as e:
|
||||
return None
|
||||
|
||||
def query(
|
||||
self, collection_name: str, filter: dict, limit: Optional[int] = None
|
||||
) -> Optional[GetResult]:
|
||||
# Query the items from the collection based on the filter.
|
||||
try:
|
||||
collection = self.client.get_collection(name=collection_name)
|
||||
if collection:
|
||||
result = collection.get(
|
||||
where=filter,
|
||||
limit=limit,
|
||||
)
|
||||
|
||||
return GetResult(
|
||||
**{
|
||||
"ids": [result["ids"]],
|
||||
"documents": [result["documents"]],
|
||||
"metadatas": [result["metadatas"]],
|
||||
}
|
||||
)
|
||||
return None
|
||||
except Exception as e:
|
||||
print(e)
|
||||
return None
|
||||
|
||||
def get(self, collection_name: str) -> Optional[GetResult]:
|
||||
# Get all the items in the collection.
|
||||
collection = self.client.get_collection(name=collection_name)
|
||||
if collection:
|
||||
result = collection.get()
|
||||
return GetResult(
|
||||
**{
|
||||
"ids": [result["ids"]],
|
||||
"documents": [result["documents"]],
|
||||
"metadatas": [result["metadatas"]],
|
||||
}
|
||||
)
|
||||
return None
|
||||
|
||||
def insert(self, collection_name: str, items: list[VectorItem]):
|
||||
# Insert the items into the collection, if the collection does not exist, it will be created.
|
||||
collection = self.client.get_or_create_collection(
|
||||
name=collection_name, metadata={"hnsw:space": "cosine"}
|
||||
)
|
||||
|
||||
ids = [item["id"] for item in items]
|
||||
documents = [item["text"] for item in items]
|
||||
embeddings = [item["vector"] for item in items]
|
||||
metadatas = [item["metadata"] for item in items]
|
||||
|
||||
for batch in create_batches(
|
||||
api=self.client,
|
||||
documents=documents,
|
||||
embeddings=embeddings,
|
||||
ids=ids,
|
||||
metadatas=metadatas,
|
||||
):
|
||||
collection.add(*batch)
|
||||
|
||||
def upsert(self, collection_name: str, items: list[VectorItem]):
|
||||
# Update the items in the collection, if the items are not present, insert them. If the collection does not exist, it will be created.
|
||||
collection = self.client.get_or_create_collection(
|
||||
name=collection_name, metadata={"hnsw:space": "cosine"}
|
||||
)
|
||||
|
||||
ids = [item["id"] for item in items]
|
||||
documents = [item["text"] for item in items]
|
||||
embeddings = [item["vector"] for item in items]
|
||||
metadatas = [item["metadata"] for item in items]
|
||||
|
||||
collection.upsert(
|
||||
ids=ids, documents=documents, embeddings=embeddings, metadatas=metadatas
|
||||
)
|
||||
|
||||
def delete(
|
||||
self,
|
||||
collection_name: str,
|
||||
ids: Optional[list[str]] = None,
|
||||
filter: Optional[dict] = None,
|
||||
):
|
||||
# Delete the items from the collection based on the ids.
|
||||
collection = self.client.get_collection(name=collection_name)
|
||||
if collection:
|
||||
if ids:
|
||||
collection.delete(ids=ids)
|
||||
elif filter:
|
||||
collection.delete(where=filter)
|
||||
|
||||
def reset(self):
|
||||
# Resets the database. This will delete all collections and item entries.
|
||||
return self.client.reset()
|
||||
286
backend/open_webui/apps/retrieval/vector/dbs/milvus.py
Normal file
286
backend/open_webui/apps/retrieval/vector/dbs/milvus.py
Normal file
@@ -0,0 +1,286 @@
|
||||
from pymilvus import MilvusClient as Client
|
||||
from pymilvus import FieldSchema, DataType
|
||||
import json
|
||||
|
||||
from typing import Optional
|
||||
|
||||
from open_webui.apps.retrieval.vector.main import VectorItem, SearchResult, GetResult
|
||||
from open_webui.config import (
|
||||
MILVUS_URI,
|
||||
)
|
||||
|
||||
|
||||
class MilvusClient:
|
||||
def __init__(self):
|
||||
self.collection_prefix = "open_webui"
|
||||
self.client = Client(uri=MILVUS_URI)
|
||||
|
||||
def _result_to_get_result(self, result) -> GetResult:
|
||||
ids = []
|
||||
documents = []
|
||||
metadatas = []
|
||||
|
||||
for match in result:
|
||||
_ids = []
|
||||
_documents = []
|
||||
_metadatas = []
|
||||
for item in match:
|
||||
_ids.append(item.get("id"))
|
||||
_documents.append(item.get("data", {}).get("text"))
|
||||
_metadatas.append(item.get("metadata"))
|
||||
|
||||
ids.append(_ids)
|
||||
documents.append(_documents)
|
||||
metadatas.append(_metadatas)
|
||||
|
||||
return GetResult(
|
||||
**{
|
||||
"ids": ids,
|
||||
"documents": documents,
|
||||
"metadatas": metadatas,
|
||||
}
|
||||
)
|
||||
|
||||
def _result_to_search_result(self, result) -> SearchResult:
|
||||
ids = []
|
||||
distances = []
|
||||
documents = []
|
||||
metadatas = []
|
||||
|
||||
for match in result:
|
||||
_ids = []
|
||||
_distances = []
|
||||
_documents = []
|
||||
_metadatas = []
|
||||
|
||||
for item in match:
|
||||
_ids.append(item.get("id"))
|
||||
_distances.append(item.get("distance"))
|
||||
_documents.append(item.get("entity", {}).get("data", {}).get("text"))
|
||||
_metadatas.append(item.get("entity", {}).get("metadata"))
|
||||
|
||||
ids.append(_ids)
|
||||
distances.append(_distances)
|
||||
documents.append(_documents)
|
||||
metadatas.append(_metadatas)
|
||||
|
||||
return SearchResult(
|
||||
**{
|
||||
"ids": ids,
|
||||
"distances": distances,
|
||||
"documents": documents,
|
||||
"metadatas": metadatas,
|
||||
}
|
||||
)
|
||||
|
||||
def _create_collection(self, collection_name: str, dimension: int):
|
||||
schema = self.client.create_schema(
|
||||
auto_id=False,
|
||||
enable_dynamic_field=True,
|
||||
)
|
||||
schema.add_field(
|
||||
field_name="id",
|
||||
datatype=DataType.VARCHAR,
|
||||
is_primary=True,
|
||||
max_length=65535,
|
||||
)
|
||||
schema.add_field(
|
||||
field_name="vector",
|
||||
datatype=DataType.FLOAT_VECTOR,
|
||||
dim=dimension,
|
||||
description="vector",
|
||||
)
|
||||
schema.add_field(field_name="data", datatype=DataType.JSON, description="data")
|
||||
schema.add_field(
|
||||
field_name="metadata", datatype=DataType.JSON, description="metadata"
|
||||
)
|
||||
|
||||
index_params = self.client.prepare_index_params()
|
||||
index_params.add_index(
|
||||
field_name="vector",
|
||||
index_type="HNSW",
|
||||
metric_type="COSINE",
|
||||
params={"M": 16, "efConstruction": 100},
|
||||
)
|
||||
|
||||
self.client.create_collection(
|
||||
collection_name=f"{self.collection_prefix}_{collection_name}",
|
||||
schema=schema,
|
||||
index_params=index_params,
|
||||
)
|
||||
|
||||
def has_collection(self, collection_name: str) -> bool:
|
||||
# Check if the collection exists based on the collection name.
|
||||
collection_name = collection_name.replace("-", "_")
|
||||
return self.client.has_collection(
|
||||
collection_name=f"{self.collection_prefix}_{collection_name}"
|
||||
)
|
||||
|
||||
def delete_collection(self, collection_name: str):
|
||||
# Delete the collection based on the collection name.
|
||||
collection_name = collection_name.replace("-", "_")
|
||||
return self.client.drop_collection(
|
||||
collection_name=f"{self.collection_prefix}_{collection_name}"
|
||||
)
|
||||
|
||||
def search(
|
||||
self, collection_name: str, vectors: list[list[float | int]], limit: int
|
||||
) -> Optional[SearchResult]:
|
||||
# Search for the nearest neighbor items based on the vectors and return 'limit' number of results.
|
||||
collection_name = collection_name.replace("-", "_")
|
||||
result = self.client.search(
|
||||
collection_name=f"{self.collection_prefix}_{collection_name}",
|
||||
data=vectors,
|
||||
limit=limit,
|
||||
output_fields=["data", "metadata"],
|
||||
)
|
||||
|
||||
return self._result_to_search_result(result)
|
||||
|
||||
def query(self, collection_name: str, filter: dict, limit: Optional[int] = None):
|
||||
# Construct the filter string for querying
|
||||
collection_name = collection_name.replace("-", "_")
|
||||
if not self.has_collection(collection_name):
|
||||
return None
|
||||
|
||||
filter_string = " && ".join(
|
||||
[
|
||||
f'metadata["{key}"] == {json.dumps(value)}'
|
||||
for key, value in filter.items()
|
||||
]
|
||||
)
|
||||
|
||||
max_limit = 16383 # The maximum number of records per request
|
||||
all_results = []
|
||||
|
||||
if limit is None:
|
||||
limit = float("inf") # Use infinity as a placeholder for no limit
|
||||
|
||||
# Initialize offset and remaining to handle pagination
|
||||
offset = 0
|
||||
remaining = limit
|
||||
|
||||
try:
|
||||
# Loop until there are no more items to fetch or the desired limit is reached
|
||||
while remaining > 0:
|
||||
print("remaining", remaining)
|
||||
current_fetch = min(
|
||||
max_limit, remaining
|
||||
) # Determine how many items to fetch in this iteration
|
||||
|
||||
results = self.client.query(
|
||||
collection_name=f"{self.collection_prefix}_{collection_name}",
|
||||
filter=filter_string,
|
||||
output_fields=["*"],
|
||||
limit=current_fetch,
|
||||
offset=offset,
|
||||
)
|
||||
|
||||
if not results:
|
||||
break
|
||||
|
||||
all_results.extend(results)
|
||||
results_count = len(results)
|
||||
remaining -= (
|
||||
results_count # Decrease remaining by the number of items fetched
|
||||
)
|
||||
offset += results_count
|
||||
|
||||
# Break the loop if the results returned are less than the requested fetch count
|
||||
if results_count < current_fetch:
|
||||
break
|
||||
|
||||
print(all_results)
|
||||
return self._result_to_get_result([all_results])
|
||||
except Exception as e:
|
||||
print(e)
|
||||
return None
|
||||
|
||||
def get(self, collection_name: str) -> Optional[GetResult]:
|
||||
# Get all the items in the collection.
|
||||
collection_name = collection_name.replace("-", "_")
|
||||
result = self.client.query(
|
||||
collection_name=f"{self.collection_prefix}_{collection_name}",
|
||||
filter='id != ""',
|
||||
)
|
||||
return self._result_to_get_result([result])
|
||||
|
||||
def insert(self, collection_name: str, items: list[VectorItem]):
|
||||
# Insert the items into the collection, if the collection does not exist, it will be created.
|
||||
collection_name = collection_name.replace("-", "_")
|
||||
if not self.client.has_collection(
|
||||
collection_name=f"{self.collection_prefix}_{collection_name}"
|
||||
):
|
||||
self._create_collection(
|
||||
collection_name=collection_name, dimension=len(items[0]["vector"])
|
||||
)
|
||||
|
||||
return self.client.insert(
|
||||
collection_name=f"{self.collection_prefix}_{collection_name}",
|
||||
data=[
|
||||
{
|
||||
"id": item["id"],
|
||||
"vector": item["vector"],
|
||||
"data": {"text": item["text"]},
|
||||
"metadata": item["metadata"],
|
||||
}
|
||||
for item in items
|
||||
],
|
||||
)
|
||||
|
||||
def upsert(self, collection_name: str, items: list[VectorItem]):
|
||||
# Update the items in the collection, if the items are not present, insert them. If the collection does not exist, it will be created.
|
||||
collection_name = collection_name.replace("-", "_")
|
||||
if not self.client.has_collection(
|
||||
collection_name=f"{self.collection_prefix}_{collection_name}"
|
||||
):
|
||||
self._create_collection(
|
||||
collection_name=collection_name, dimension=len(items[0]["vector"])
|
||||
)
|
||||
|
||||
return self.client.upsert(
|
||||
collection_name=f"{self.collection_prefix}_{collection_name}",
|
||||
data=[
|
||||
{
|
||||
"id": item["id"],
|
||||
"vector": item["vector"],
|
||||
"data": {"text": item["text"]},
|
||||
"metadata": item["metadata"],
|
||||
}
|
||||
for item in items
|
||||
],
|
||||
)
|
||||
|
||||
def delete(
|
||||
self,
|
||||
collection_name: str,
|
||||
ids: Optional[list[str]] = None,
|
||||
filter: Optional[dict] = None,
|
||||
):
|
||||
# Delete the items from the collection based on the ids.
|
||||
collection_name = collection_name.replace("-", "_")
|
||||
if ids:
|
||||
return self.client.delete(
|
||||
collection_name=f"{self.collection_prefix}_{collection_name}",
|
||||
ids=ids,
|
||||
)
|
||||
elif filter:
|
||||
# Convert the filter dictionary to a string using JSON_CONTAINS.
|
||||
filter_string = " && ".join(
|
||||
[
|
||||
f'metadata["{key}"] == {json.dumps(value)}'
|
||||
for key, value in filter.items()
|
||||
]
|
||||
)
|
||||
|
||||
return self.client.delete(
|
||||
collection_name=f"{self.collection_prefix}_{collection_name}",
|
||||
filter=filter_string,
|
||||
)
|
||||
|
||||
def reset(self):
|
||||
# Resets the database. This will delete all collections and item entries.
|
||||
collection_names = self.client.list_collections()
|
||||
for collection_name in collection_names:
|
||||
if collection_name.startswith(self.collection_prefix):
|
||||
self.client.drop_collection(collection_name=collection_name)
|
||||
179
backend/open_webui/apps/retrieval/vector/dbs/qdrant.py
Normal file
179
backend/open_webui/apps/retrieval/vector/dbs/qdrant.py
Normal file
@@ -0,0 +1,179 @@
|
||||
from typing import Optional
|
||||
|
||||
from qdrant_client import QdrantClient as Qclient
|
||||
from qdrant_client.http.models import PointStruct
|
||||
from qdrant_client.models import models
|
||||
|
||||
from open_webui.apps.retrieval.vector.main import VectorItem, SearchResult, GetResult
|
||||
from open_webui.config import QDRANT_URI
|
||||
|
||||
NO_LIMIT = 999999999
|
||||
|
||||
|
||||
class QdrantClient:
|
||||
def __init__(self):
|
||||
self.collection_prefix = "open-webui"
|
||||
self.QDRANT_URI = QDRANT_URI
|
||||
self.client = Qclient(url=self.QDRANT_URI) if self.QDRANT_URI else None
|
||||
|
||||
def _result_to_get_result(self, points) -> GetResult:
|
||||
ids = []
|
||||
documents = []
|
||||
metadatas = []
|
||||
|
||||
for point in points:
|
||||
payload = point.payload
|
||||
ids.append(point.id)
|
||||
documents.append(payload["text"])
|
||||
metadatas.append(payload["metadata"])
|
||||
|
||||
return GetResult(
|
||||
**{
|
||||
"ids": [ids],
|
||||
"documents": [documents],
|
||||
"metadatas": [metadatas],
|
||||
}
|
||||
)
|
||||
|
||||
def _create_collection(self, collection_name: str, dimension: int):
|
||||
collection_name_with_prefix = f"{self.collection_prefix}_{collection_name}"
|
||||
self.client.create_collection(
|
||||
collection_name=collection_name_with_prefix,
|
||||
vectors_config=models.VectorParams(
|
||||
size=dimension, distance=models.Distance.COSINE
|
||||
),
|
||||
)
|
||||
|
||||
print(f"collection {collection_name_with_prefix} successfully created!")
|
||||
|
||||
def _create_collection_if_not_exists(self, collection_name, dimension):
|
||||
if not self.has_collection(collection_name=collection_name):
|
||||
self._create_collection(
|
||||
collection_name=collection_name, dimension=dimension
|
||||
)
|
||||
|
||||
def _create_points(self, items: list[VectorItem]):
|
||||
return [
|
||||
PointStruct(
|
||||
id=item["id"],
|
||||
vector=item["vector"],
|
||||
payload={"text": item["text"], "metadata": item["metadata"]},
|
||||
)
|
||||
for item in items
|
||||
]
|
||||
|
||||
def has_collection(self, collection_name: str) -> bool:
|
||||
return self.client.collection_exists(
|
||||
f"{self.collection_prefix}_{collection_name}"
|
||||
)
|
||||
|
||||
def delete_collection(self, collection_name: str):
|
||||
return self.client.delete_collection(
|
||||
collection_name=f"{self.collection_prefix}_{collection_name}"
|
||||
)
|
||||
|
||||
def search(
|
||||
self, collection_name: str, vectors: list[list[float | int]], limit: int
|
||||
) -> Optional[SearchResult]:
|
||||
# Search for the nearest neighbor items based on the vectors and return 'limit' number of results.
|
||||
if limit is None:
|
||||
limit = NO_LIMIT # otherwise qdrant would set limit to 10!
|
||||
|
||||
query_response = self.client.query_points(
|
||||
collection_name=f"{self.collection_prefix}_{collection_name}",
|
||||
query=vectors[0],
|
||||
limit=limit,
|
||||
)
|
||||
get_result = self._result_to_get_result(query_response.points)
|
||||
return SearchResult(
|
||||
ids=get_result.ids,
|
||||
documents=get_result.documents,
|
||||
metadatas=get_result.metadatas,
|
||||
distances=[[point.score for point in query_response.points]],
|
||||
)
|
||||
|
||||
def query(self, collection_name: str, filter: dict, limit: Optional[int] = None):
|
||||
# Construct the filter string for querying
|
||||
if not self.has_collection(collection_name):
|
||||
return None
|
||||
try:
|
||||
if limit is None:
|
||||
limit = NO_LIMIT # otherwise qdrant would set limit to 10!
|
||||
|
||||
field_conditions = []
|
||||
for key, value in filter.items():
|
||||
field_conditions.append(
|
||||
models.FieldCondition(
|
||||
key=f"metadata.{key}", match=models.MatchValue(value=value)
|
||||
)
|
||||
)
|
||||
|
||||
points = self.client.query_points(
|
||||
collection_name=f"{self.collection_prefix}_{collection_name}",
|
||||
query_filter=models.Filter(should=field_conditions),
|
||||
limit=limit,
|
||||
)
|
||||
return self._result_to_get_result(points.points)
|
||||
except Exception as e:
|
||||
print(e)
|
||||
return None
|
||||
|
||||
def get(self, collection_name: str) -> Optional[GetResult]:
|
||||
# Get all the items in the collection.
|
||||
points = self.client.query_points(
|
||||
collection_name=f"{self.collection_prefix}_{collection_name}",
|
||||
limit=NO_LIMIT, # otherwise qdrant would set limit to 10!
|
||||
)
|
||||
return self._result_to_get_result(points.points)
|
||||
|
||||
def insert(self, collection_name: str, items: list[VectorItem]):
|
||||
# Insert the items into the collection, if the collection does not exist, it will be created.
|
||||
self._create_collection_if_not_exists(collection_name, len(items[0]["vector"]))
|
||||
points = self._create_points(items)
|
||||
self.client.upload_points(f"{self.collection_prefix}_{collection_name}", points)
|
||||
|
||||
def upsert(self, collection_name: str, items: list[VectorItem]):
|
||||
# Update the items in the collection, if the items are not present, insert them. If the collection does not exist, it will be created.
|
||||
self._create_collection_if_not_exists(collection_name, len(items[0]["vector"]))
|
||||
points = self._create_points(items)
|
||||
return self.client.upsert(f"{self.collection_prefix}_{collection_name}", points)
|
||||
|
||||
def delete(
|
||||
self,
|
||||
collection_name: str,
|
||||
ids: Optional[list[str]] = None,
|
||||
filter: Optional[dict] = None,
|
||||
):
|
||||
# Delete the items from the collection based on the ids.
|
||||
field_conditions = []
|
||||
|
||||
if ids:
|
||||
for id_value in ids:
|
||||
field_conditions.append(
|
||||
models.FieldCondition(
|
||||
key="metadata.id",
|
||||
match=models.MatchValue(value=id_value),
|
||||
),
|
||||
),
|
||||
elif filter:
|
||||
for key, value in filter.items():
|
||||
field_conditions.append(
|
||||
models.FieldCondition(
|
||||
key=f"metadata.{key}",
|
||||
match=models.MatchValue(value=value),
|
||||
),
|
||||
),
|
||||
|
||||
return self.client.delete(
|
||||
collection_name=f"{self.collection_prefix}_{collection_name}",
|
||||
points_selector=models.FilterSelector(
|
||||
filter=models.Filter(must=field_conditions)
|
||||
),
|
||||
)
|
||||
|
||||
def reset(self):
|
||||
# Resets the database. This will delete all collections and item entries.
|
||||
collection_names = self.client.get_collections().collections
|
||||
for collection_name in collection_names:
|
||||
if collection_name.name.startswith(self.collection_prefix):
|
||||
self.client.delete_collection(collection_name=collection_name.name)
|
||||
19
backend/open_webui/apps/retrieval/vector/main.py
Normal file
19
backend/open_webui/apps/retrieval/vector/main.py
Normal file
@@ -0,0 +1,19 @@
|
||||
from pydantic import BaseModel
|
||||
from typing import Optional, List, Any
|
||||
|
||||
|
||||
class VectorItem(BaseModel):
|
||||
id: str
|
||||
text: str
|
||||
vector: List[float | int]
|
||||
metadata: Any
|
||||
|
||||
|
||||
class GetResult(BaseModel):
|
||||
ids: Optional[List[List[str]]]
|
||||
documents: Optional[List[List[str]]]
|
||||
metadatas: Optional[List[List[Any]]]
|
||||
|
||||
|
||||
class SearchResult(GetResult):
|
||||
distances: Optional[List[List[float | int]]]
|
||||
@@ -1,9 +1,9 @@
|
||||
import logging
|
||||
from typing import Optional
|
||||
import requests
|
||||
|
||||
from apps.rag.search.main import SearchResult, get_filtered_results
|
||||
from config import SRC_LOG_LEVELS
|
||||
import requests
|
||||
from open_webui.apps.retrieval.web.main import SearchResult, get_filtered_results
|
||||
from open_webui.env import SRC_LOG_LEVELS
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
log.setLevel(SRC_LOG_LEVELS["RAG"])
|
||||
@@ -1,8 +1,9 @@
|
||||
import logging
|
||||
from typing import Optional
|
||||
from apps.rag.search.main import SearchResult, get_filtered_results
|
||||
|
||||
from open_webui.apps.retrieval.web.main import SearchResult, get_filtered_results
|
||||
from duckduckgo_search import DDGS
|
||||
from config import SRC_LOG_LEVELS
|
||||
from open_webui.env import SRC_LOG_LEVELS
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
log.setLevel(SRC_LOG_LEVELS["RAG"])
|
||||
@@ -1,10 +1,9 @@
|
||||
import json
|
||||
import logging
|
||||
from typing import Optional
|
||||
import requests
|
||||
|
||||
from apps.rag.search.main import SearchResult, get_filtered_results
|
||||
from config import SRC_LOG_LEVELS
|
||||
import requests
|
||||
from open_webui.apps.retrieval.web.main import SearchResult, get_filtered_results
|
||||
from open_webui.env import SRC_LOG_LEVELS
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
log.setLevel(SRC_LOG_LEVELS["RAG"])
|
||||
@@ -1,9 +1,9 @@
|
||||
import logging
|
||||
import requests
|
||||
from yarl import URL
|
||||
|
||||
from apps.rag.search.main import SearchResult
|
||||
from config import SRC_LOG_LEVELS
|
||||
import requests
|
||||
from open_webui.apps.retrieval.web.main import SearchResult
|
||||
from open_webui.env import SRC_LOG_LEVELS
|
||||
from yarl import URL
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
log.setLevel(SRC_LOG_LEVELS["RAG"])
|
||||
@@ -1,5 +1,6 @@
|
||||
from typing import Optional
|
||||
from urllib.parse import urlparse
|
||||
|
||||
from pydantic import BaseModel
|
||||
|
||||
|
||||
@@ -8,7 +9,8 @@ def get_filtered_results(results, filter_list):
|
||||
return results
|
||||
filtered_results = []
|
||||
for result in results:
|
||||
domain = urlparse(result["url"]).netloc
|
||||
url = result.get("url") or result.get("link", "")
|
||||
domain = urlparse(url).netloc
|
||||
if any(domain.endswith(filtered_domain) for filtered_domain in filter_list):
|
||||
filtered_results.append(result)
|
||||
return filtered_results
|
||||
48
backend/open_webui/apps/retrieval/web/searchapi.py
Normal file
48
backend/open_webui/apps/retrieval/web/searchapi.py
Normal file
@@ -0,0 +1,48 @@
|
||||
import logging
|
||||
from typing import Optional
|
||||
from urllib.parse import urlencode
|
||||
|
||||
import requests
|
||||
from open_webui.apps.retrieval.web.main import SearchResult, get_filtered_results
|
||||
from open_webui.env import SRC_LOG_LEVELS
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
log.setLevel(SRC_LOG_LEVELS["RAG"])
|
||||
|
||||
|
||||
def search_searchapi(
|
||||
api_key: str,
|
||||
engine: str,
|
||||
query: str,
|
||||
count: int,
|
||||
filter_list: Optional[list[str]] = None,
|
||||
) -> list[SearchResult]:
|
||||
"""Search using searchapi.io's API and return the results as a list of SearchResult objects.
|
||||
|
||||
Args:
|
||||
api_key (str): A searchapi.io API key
|
||||
query (str): The query to search for
|
||||
"""
|
||||
url = "https://www.searchapi.io/api/v1/search"
|
||||
|
||||
engine = engine or "google"
|
||||
|
||||
payload = {"engine": engine, "q": query, "api_key": api_key}
|
||||
|
||||
url = f"{url}?{urlencode(payload)}"
|
||||
response = requests.request("GET", url)
|
||||
|
||||
json_response = response.json()
|
||||
log.info(f"results from searchapi search: {json_response}")
|
||||
|
||||
results = sorted(
|
||||
json_response.get("organic_results", []), key=lambda x: x.get("position", 0)
|
||||
)
|
||||
if filter_list:
|
||||
results = get_filtered_results(results, filter_list)
|
||||
return [
|
||||
SearchResult(
|
||||
link=result["link"], title=result["title"], snippet=result["snippet"]
|
||||
)
|
||||
for result in results[:count]
|
||||
]
|
||||
@@ -1,10 +1,9 @@
|
||||
import logging
|
||||
import requests
|
||||
|
||||
from typing import Optional
|
||||
|
||||
from apps.rag.search.main import SearchResult, get_filtered_results
|
||||
from config import SRC_LOG_LEVELS
|
||||
import requests
|
||||
from open_webui.apps.retrieval.web.main import SearchResult, get_filtered_results
|
||||
from open_webui.env import SRC_LOG_LEVELS
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
log.setLevel(SRC_LOG_LEVELS["RAG"])
|
||||
@@ -1,10 +1,10 @@
|
||||
import json
|
||||
import logging
|
||||
from typing import Optional
|
||||
import requests
|
||||
|
||||
from apps.rag.search.main import SearchResult, get_filtered_results
|
||||
from config import SRC_LOG_LEVELS
|
||||
import requests
|
||||
from open_webui.apps.retrieval.web.main import SearchResult, get_filtered_results
|
||||
from open_webui.env import SRC_LOG_LEVELS
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
log.setLevel(SRC_LOG_LEVELS["RAG"])
|
||||
@@ -1,11 +1,10 @@
|
||||
import json
|
||||
import logging
|
||||
from typing import Optional
|
||||
import requests
|
||||
from urllib.parse import urlencode
|
||||
|
||||
from apps.rag.search.main import SearchResult, get_filtered_results
|
||||
from config import SRC_LOG_LEVELS
|
||||
import requests
|
||||
from open_webui.apps.retrieval.web.main import SearchResult, get_filtered_results
|
||||
from open_webui.env import SRC_LOG_LEVELS
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
log.setLevel(SRC_LOG_LEVELS["RAG"])
|
||||
@@ -1,10 +1,9 @@
|
||||
import json
|
||||
import logging
|
||||
from typing import Optional
|
||||
import requests
|
||||
|
||||
from apps.rag.search.main import SearchResult, get_filtered_results
|
||||
from config import SRC_LOG_LEVELS
|
||||
import requests
|
||||
from open_webui.apps.retrieval.web.main import SearchResult, get_filtered_results
|
||||
from open_webui.env import SRC_LOG_LEVELS
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
log.setLevel(SRC_LOG_LEVELS["RAG"])
|
||||
@@ -1,9 +1,8 @@
|
||||
import logging
|
||||
|
||||
import requests
|
||||
|
||||
from apps.rag.search.main import SearchResult
|
||||
from config import SRC_LOG_LEVELS
|
||||
from open_webui.apps.retrieval.web.main import SearchResult
|
||||
from open_webui.env import SRC_LOG_LEVELS
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
log.setLevel(SRC_LOG_LEVELS["RAG"])
|
||||
357
backend/open_webui/apps/retrieval/web/testdata/searchapi.json
vendored
Normal file
357
backend/open_webui/apps/retrieval/web/testdata/searchapi.json
vendored
Normal file
File diff suppressed because one or more lines are too long
97
backend/open_webui/apps/retrieval/web/utils.py
Normal file
97
backend/open_webui/apps/retrieval/web/utils.py
Normal file
@@ -0,0 +1,97 @@
|
||||
import socket
|
||||
import urllib.parse
|
||||
import validators
|
||||
from typing import Union, Sequence, Iterator
|
||||
|
||||
from langchain_community.document_loaders import (
|
||||
WebBaseLoader,
|
||||
)
|
||||
from langchain_core.documents import Document
|
||||
|
||||
|
||||
from open_webui.constants import ERROR_MESSAGES
|
||||
from open_webui.config import ENABLE_RAG_LOCAL_WEB_FETCH
|
||||
from open_webui.env import SRC_LOG_LEVELS
|
||||
|
||||
import logging
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
log.setLevel(SRC_LOG_LEVELS["RAG"])
|
||||
|
||||
|
||||
def validate_url(url: Union[str, Sequence[str]]):
|
||||
if isinstance(url, str):
|
||||
if isinstance(validators.url(url), validators.ValidationError):
|
||||
raise ValueError(ERROR_MESSAGES.INVALID_URL)
|
||||
if not ENABLE_RAG_LOCAL_WEB_FETCH:
|
||||
# Local web fetch is disabled, filter out any URLs that resolve to private IP addresses
|
||||
parsed_url = urllib.parse.urlparse(url)
|
||||
# Get IPv4 and IPv6 addresses
|
||||
ipv4_addresses, ipv6_addresses = resolve_hostname(parsed_url.hostname)
|
||||
# Check if any of the resolved addresses are private
|
||||
# This is technically still vulnerable to DNS rebinding attacks, as we don't control WebBaseLoader
|
||||
for ip in ipv4_addresses:
|
||||
if validators.ipv4(ip, private=True):
|
||||
raise ValueError(ERROR_MESSAGES.INVALID_URL)
|
||||
for ip in ipv6_addresses:
|
||||
if validators.ipv6(ip, private=True):
|
||||
raise ValueError(ERROR_MESSAGES.INVALID_URL)
|
||||
return True
|
||||
elif isinstance(url, Sequence):
|
||||
return all(validate_url(u) for u in url)
|
||||
else:
|
||||
return False
|
||||
|
||||
|
||||
def resolve_hostname(hostname):
|
||||
# Get address information
|
||||
addr_info = socket.getaddrinfo(hostname, None)
|
||||
|
||||
# Extract IP addresses from address information
|
||||
ipv4_addresses = [info[4][0] for info in addr_info if info[0] == socket.AF_INET]
|
||||
ipv6_addresses = [info[4][0] for info in addr_info if info[0] == socket.AF_INET6]
|
||||
|
||||
return ipv4_addresses, ipv6_addresses
|
||||
|
||||
|
||||
class SafeWebBaseLoader(WebBaseLoader):
|
||||
"""WebBaseLoader with enhanced error handling for URLs."""
|
||||
|
||||
def lazy_load(self) -> Iterator[Document]:
|
||||
"""Lazy load text from the url(s) in web_path with error handling."""
|
||||
for path in self.web_paths:
|
||||
try:
|
||||
soup = self._scrape(path, bs_kwargs=self.bs_kwargs)
|
||||
text = soup.get_text(**self.bs_get_text_kwargs)
|
||||
|
||||
# Build metadata
|
||||
metadata = {"source": path}
|
||||
if title := soup.find("title"):
|
||||
metadata["title"] = title.get_text()
|
||||
if description := soup.find("meta", attrs={"name": "description"}):
|
||||
metadata["description"] = description.get(
|
||||
"content", "No description found."
|
||||
)
|
||||
if html := soup.find("html"):
|
||||
metadata["language"] = html.get("lang", "No language found.")
|
||||
|
||||
yield Document(page_content=text, metadata=metadata)
|
||||
except Exception as e:
|
||||
# Log the error and continue with the next URL
|
||||
log.error(f"Error loading {path}: {e}")
|
||||
|
||||
|
||||
def get_web_loader(
|
||||
url: Union[str, Sequence[str]],
|
||||
verify_ssl: bool = True,
|
||||
requests_per_second: int = 2,
|
||||
):
|
||||
# Check if the URL is valid
|
||||
if not validate_url(url):
|
||||
raise ValueError(ERROR_MESSAGES.INVALID_URL)
|
||||
return SafeWebBaseLoader(
|
||||
url,
|
||||
verify_ssl=verify_ssl,
|
||||
requests_per_second=requests_per_second,
|
||||
continue_on_failure=True,
|
||||
)
|
||||
219
backend/open_webui/apps/socket/main.py
Normal file
219
backend/open_webui/apps/socket/main.py
Normal file
@@ -0,0 +1,219 @@
|
||||
import asyncio
|
||||
import socketio
|
||||
import logging
|
||||
import sys
|
||||
import time
|
||||
|
||||
from open_webui.apps.webui.models.users import Users
|
||||
from open_webui.env import (
|
||||
ENABLE_WEBSOCKET_SUPPORT,
|
||||
WEBSOCKET_MANAGER,
|
||||
WEBSOCKET_REDIS_URL,
|
||||
)
|
||||
from open_webui.utils.utils import decode_token
|
||||
from open_webui.apps.socket.utils import RedisDict
|
||||
|
||||
from open_webui.env import (
|
||||
GLOBAL_LOG_LEVEL,
|
||||
SRC_LOG_LEVELS,
|
||||
)
|
||||
|
||||
|
||||
logging.basicConfig(stream=sys.stdout, level=GLOBAL_LOG_LEVEL)
|
||||
log = logging.getLogger(__name__)
|
||||
log.setLevel(SRC_LOG_LEVELS["SOCKET"])
|
||||
|
||||
|
||||
if WEBSOCKET_MANAGER == "redis":
|
||||
mgr = socketio.AsyncRedisManager(WEBSOCKET_REDIS_URL)
|
||||
sio = socketio.AsyncServer(
|
||||
cors_allowed_origins=[],
|
||||
async_mode="asgi",
|
||||
transports=(
|
||||
["polling", "websocket"] if ENABLE_WEBSOCKET_SUPPORT else ["polling"]
|
||||
),
|
||||
allow_upgrades=ENABLE_WEBSOCKET_SUPPORT,
|
||||
always_connect=True,
|
||||
client_manager=mgr,
|
||||
)
|
||||
else:
|
||||
sio = socketio.AsyncServer(
|
||||
cors_allowed_origins=[],
|
||||
async_mode="asgi",
|
||||
transports=(
|
||||
["polling", "websocket"] if ENABLE_WEBSOCKET_SUPPORT else ["polling"]
|
||||
),
|
||||
allow_upgrades=ENABLE_WEBSOCKET_SUPPORT,
|
||||
always_connect=True,
|
||||
)
|
||||
|
||||
|
||||
# Dictionary to maintain the user pool
|
||||
|
||||
if WEBSOCKET_MANAGER == "redis":
|
||||
SESSION_POOL = RedisDict("open-webui:session_pool", redis_url=WEBSOCKET_REDIS_URL)
|
||||
USER_POOL = RedisDict("open-webui:user_pool", redis_url=WEBSOCKET_REDIS_URL)
|
||||
USAGE_POOL = RedisDict("open-webui:usage_pool", redis_url=WEBSOCKET_REDIS_URL)
|
||||
else:
|
||||
SESSION_POOL = {}
|
||||
USER_POOL = {}
|
||||
USAGE_POOL = {}
|
||||
|
||||
|
||||
# Timeout duration in seconds
|
||||
TIMEOUT_DURATION = 3
|
||||
|
||||
|
||||
async def periodic_usage_pool_cleanup():
|
||||
while True:
|
||||
now = int(time.time())
|
||||
for model_id, connections in list(USAGE_POOL.items()):
|
||||
# Creating a list of sids to remove if they have timed out
|
||||
expired_sids = [
|
||||
sid
|
||||
for sid, details in connections.items()
|
||||
if now - details["updated_at"] > TIMEOUT_DURATION
|
||||
]
|
||||
|
||||
for sid in expired_sids:
|
||||
del connections[sid]
|
||||
|
||||
if not connections:
|
||||
log.debug(f"Cleaning up model {model_id} from usage pool")
|
||||
del USAGE_POOL[model_id]
|
||||
else:
|
||||
USAGE_POOL[model_id] = connections
|
||||
|
||||
# Emit updated usage information after cleaning
|
||||
await sio.emit("usage", {"models": get_models_in_use()})
|
||||
|
||||
await asyncio.sleep(TIMEOUT_DURATION)
|
||||
|
||||
|
||||
app = socketio.ASGIApp(
|
||||
sio,
|
||||
socketio_path="/ws/socket.io",
|
||||
)
|
||||
|
||||
|
||||
def get_models_in_use():
|
||||
# List models that are currently in use
|
||||
models_in_use = list(USAGE_POOL.keys())
|
||||
return models_in_use
|
||||
|
||||
|
||||
@sio.on("usage")
|
||||
async def usage(sid, data):
|
||||
model_id = data["model"]
|
||||
# Record the timestamp for the last update
|
||||
current_time = int(time.time())
|
||||
|
||||
# Store the new usage data and task
|
||||
USAGE_POOL[model_id] = {
|
||||
**(USAGE_POOL[model_id] if model_id in USAGE_POOL else {}),
|
||||
sid: {"updated_at": current_time},
|
||||
}
|
||||
|
||||
# Broadcast the usage data to all clients
|
||||
await sio.emit("usage", {"models": get_models_in_use()})
|
||||
|
||||
|
||||
@sio.event
|
||||
async def connect(sid, environ, auth):
|
||||
user = None
|
||||
if auth and "token" in auth:
|
||||
data = decode_token(auth["token"])
|
||||
|
||||
if data is not None and "id" in data:
|
||||
user = Users.get_user_by_id(data["id"])
|
||||
|
||||
if user:
|
||||
SESSION_POOL[sid] = user.id
|
||||
if user.id in USER_POOL:
|
||||
USER_POOL[user.id].append(sid)
|
||||
else:
|
||||
USER_POOL[user.id] = [sid]
|
||||
|
||||
# print(f"user {user.name}({user.id}) connected with session ID {sid}")
|
||||
await sio.emit("user-count", {"count": len(USER_POOL.items())})
|
||||
await sio.emit("usage", {"models": get_models_in_use()})
|
||||
|
||||
|
||||
@sio.on("user-join")
|
||||
async def user_join(sid, data):
|
||||
# print("user-join", sid, data)
|
||||
|
||||
auth = data["auth"] if "auth" in data else None
|
||||
if not auth or "token" not in auth:
|
||||
return
|
||||
|
||||
data = decode_token(auth["token"])
|
||||
if data is None or "id" not in data:
|
||||
return
|
||||
|
||||
user = Users.get_user_by_id(data["id"])
|
||||
if not user:
|
||||
return
|
||||
|
||||
SESSION_POOL[sid] = user.id
|
||||
if user.id in USER_POOL:
|
||||
USER_POOL[user.id].append(sid)
|
||||
else:
|
||||
USER_POOL[user.id] = [sid]
|
||||
|
||||
# print(f"user {user.name}({user.id}) connected with session ID {sid}")
|
||||
|
||||
await sio.emit("user-count", {"count": len(USER_POOL.items())})
|
||||
|
||||
|
||||
@sio.on("user-count")
|
||||
async def user_count(sid):
|
||||
await sio.emit("user-count", {"count": len(USER_POOL.items())})
|
||||
|
||||
|
||||
@sio.event
|
||||
async def disconnect(sid):
|
||||
if sid in SESSION_POOL:
|
||||
user_id = SESSION_POOL[sid]
|
||||
del SESSION_POOL[sid]
|
||||
|
||||
USER_POOL[user_id] = [_sid for _sid in USER_POOL[user_id] if _sid != sid]
|
||||
|
||||
if len(USER_POOL[user_id]) == 0:
|
||||
del USER_POOL[user_id]
|
||||
|
||||
await sio.emit("user-count", {"count": len(USER_POOL)})
|
||||
else:
|
||||
pass
|
||||
# print(f"Unknown session ID {sid} disconnected")
|
||||
|
||||
|
||||
def get_event_emitter(request_info):
|
||||
async def __event_emitter__(event_data):
|
||||
await sio.emit(
|
||||
"chat-events",
|
||||
{
|
||||
"chat_id": request_info["chat_id"],
|
||||
"message_id": request_info["message_id"],
|
||||
"data": event_data,
|
||||
},
|
||||
to=request_info["session_id"],
|
||||
)
|
||||
|
||||
return __event_emitter__
|
||||
|
||||
|
||||
def get_event_call(request_info):
|
||||
async def __event_call__(event_data):
|
||||
response = await sio.call(
|
||||
"chat-events",
|
||||
{
|
||||
"chat_id": request_info["chat_id"],
|
||||
"message_id": request_info["message_id"],
|
||||
"data": event_data,
|
||||
},
|
||||
to=request_info["session_id"],
|
||||
)
|
||||
return response
|
||||
|
||||
return __event_call__
|
||||
59
backend/open_webui/apps/socket/utils.py
Normal file
59
backend/open_webui/apps/socket/utils.py
Normal file
@@ -0,0 +1,59 @@
|
||||
import json
|
||||
import redis
|
||||
|
||||
|
||||
class RedisDict:
|
||||
def __init__(self, name, redis_url):
|
||||
self.name = name
|
||||
self.redis = redis.Redis.from_url(redis_url, decode_responses=True)
|
||||
|
||||
def __setitem__(self, key, value):
|
||||
serialized_value = json.dumps(value)
|
||||
self.redis.hset(self.name, key, serialized_value)
|
||||
|
||||
def __getitem__(self, key):
|
||||
value = self.redis.hget(self.name, key)
|
||||
if value is None:
|
||||
raise KeyError(key)
|
||||
return json.loads(value)
|
||||
|
||||
def __delitem__(self, key):
|
||||
result = self.redis.hdel(self.name, key)
|
||||
if result == 0:
|
||||
raise KeyError(key)
|
||||
|
||||
def __contains__(self, key):
|
||||
return self.redis.hexists(self.name, key)
|
||||
|
||||
def __len__(self):
|
||||
return self.redis.hlen(self.name)
|
||||
|
||||
def keys(self):
|
||||
return self.redis.hkeys(self.name)
|
||||
|
||||
def values(self):
|
||||
return [json.loads(v) for v in self.redis.hvals(self.name)]
|
||||
|
||||
def items(self):
|
||||
return [(k, json.loads(v)) for k, v in self.redis.hgetall(self.name).items()]
|
||||
|
||||
def get(self, key, default=None):
|
||||
try:
|
||||
return self[key]
|
||||
except KeyError:
|
||||
return default
|
||||
|
||||
def clear(self):
|
||||
self.redis.delete(self.name)
|
||||
|
||||
def update(self, other=None, **kwargs):
|
||||
if other is not None:
|
||||
for k, v in other.items() if hasattr(other, "items") else other:
|
||||
self[k] = v
|
||||
for k, v in kwargs.items():
|
||||
self[k] = v
|
||||
|
||||
def setdefault(self, key, default=None):
|
||||
if key not in self:
|
||||
self[key] = default
|
||||
return self[key]
|
||||
@@ -1,20 +1,25 @@
|
||||
import os
|
||||
import logging
|
||||
import json
|
||||
import logging
|
||||
from contextlib import contextmanager
|
||||
from typing import Any, Optional
|
||||
|
||||
from open_webui.apps.webui.internal.wrappers import register_connection
|
||||
from open_webui.env import (
|
||||
OPEN_WEBUI_DIR,
|
||||
DATABASE_URL,
|
||||
SRC_LOG_LEVELS,
|
||||
DATABASE_POOL_MAX_OVERFLOW,
|
||||
DATABASE_POOL_RECYCLE,
|
||||
DATABASE_POOL_SIZE,
|
||||
DATABASE_POOL_TIMEOUT,
|
||||
)
|
||||
from peewee_migrate import Router
|
||||
from apps.webui.internal.wrappers import register_connection
|
||||
|
||||
from typing import Optional, Any
|
||||
from typing_extensions import Self
|
||||
|
||||
from sqlalchemy import create_engine, types, Dialect
|
||||
from sqlalchemy import Dialect, create_engine, types
|
||||
from sqlalchemy.ext.declarative import declarative_base
|
||||
from sqlalchemy.orm import sessionmaker, scoped_session
|
||||
from sqlalchemy.orm import scoped_session, sessionmaker
|
||||
from sqlalchemy.pool import QueuePool, NullPool
|
||||
from sqlalchemy.sql.type_api import _T
|
||||
|
||||
from config import SRC_LOG_LEVELS, DATA_DIR, DATABASE_URL, BACKEND_DIR
|
||||
from typing_extensions import Self
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
log.setLevel(SRC_LOG_LEVELS["DB"])
|
||||
@@ -42,34 +47,21 @@ class JSONField(types.TypeDecorator):
|
||||
return json.loads(value)
|
||||
|
||||
|
||||
# Check if the file exists
|
||||
if os.path.exists(f"{DATA_DIR}/ollama.db"):
|
||||
# Rename the file
|
||||
os.rename(f"{DATA_DIR}/ollama.db", f"{DATA_DIR}/webui.db")
|
||||
log.info("Database migrated from Ollama-WebUI successfully.")
|
||||
else:
|
||||
pass
|
||||
|
||||
|
||||
# Workaround to handle the peewee migration
|
||||
# This is required to ensure the peewee migration is handled before the alembic migration
|
||||
def handle_peewee_migration(DATABASE_URL):
|
||||
# db = None
|
||||
try:
|
||||
# Replace the postgresql:// with postgres:// and %40 with @ in the DATABASE_URL
|
||||
db = register_connection(
|
||||
DATABASE_URL.replace("postgresql://", "postgres://").replace("%40", "@")
|
||||
)
|
||||
migrate_dir = BACKEND_DIR / "apps" / "webui" / "internal" / "migrations"
|
||||
# Replace the postgresql:// with postgres:// to handle the peewee migration
|
||||
db = register_connection(DATABASE_URL.replace("postgresql://", "postgres://"))
|
||||
migrate_dir = OPEN_WEBUI_DIR / "apps" / "webui" / "internal" / "migrations"
|
||||
router = Router(db, logger=log, migrate_dir=migrate_dir)
|
||||
router.run()
|
||||
db.close()
|
||||
|
||||
# check if db connection has been closed
|
||||
|
||||
except Exception as e:
|
||||
log.error(f"Failed to initialize the database connection: {e}")
|
||||
raise
|
||||
|
||||
finally:
|
||||
# Properly closing the database connection
|
||||
if db and not db.is_closed():
|
||||
@@ -88,7 +80,20 @@ if "sqlite" in SQLALCHEMY_DATABASE_URL:
|
||||
SQLALCHEMY_DATABASE_URL, connect_args={"check_same_thread": False}
|
||||
)
|
||||
else:
|
||||
engine = create_engine(SQLALCHEMY_DATABASE_URL, pool_pre_ping=True)
|
||||
if DATABASE_POOL_SIZE > 0:
|
||||
engine = create_engine(
|
||||
SQLALCHEMY_DATABASE_URL,
|
||||
pool_size=DATABASE_POOL_SIZE,
|
||||
max_overflow=DATABASE_POOL_MAX_OVERFLOW,
|
||||
pool_timeout=DATABASE_POOL_TIMEOUT,
|
||||
pool_recycle=DATABASE_POOL_RECYCLE,
|
||||
pool_pre_ping=True,
|
||||
poolclass=QueuePool,
|
||||
)
|
||||
else:
|
||||
engine = create_engine(
|
||||
SQLALCHEMY_DATABASE_URL, pool_pre_ping=True, poolclass=NullPool
|
||||
)
|
||||
|
||||
|
||||
SessionLocal = sessionmaker(
|
||||
@@ -98,7 +103,6 @@ Base = declarative_base()
|
||||
Session = scoped_session(SessionLocal)
|
||||
|
||||
|
||||
# Dependency
|
||||
def get_session():
|
||||
db = SessionLocal()
|
||||
try:
|
||||
@@ -30,7 +30,7 @@ import peewee as pw
|
||||
from peewee_migrate import Migrator
|
||||
import json
|
||||
|
||||
from utils.misc import parse_ollama_modelfile
|
||||
from open_webui.utils.misc import parse_ollama_modelfile
|
||||
|
||||
with suppress(ImportError):
|
||||
import playhouse.postgres_ext as pw_pext
|
||||
@@ -1,13 +1,13 @@
|
||||
from contextvars import ContextVar
|
||||
from peewee import *
|
||||
from peewee import PostgresqlDatabase, InterfaceError as PeeWeeInterfaceError
|
||||
|
||||
import logging
|
||||
from contextvars import ContextVar
|
||||
|
||||
from open_webui.env import SRC_LOG_LEVELS
|
||||
from peewee import *
|
||||
from peewee import InterfaceError as PeeWeeInterfaceError
|
||||
from peewee import PostgresqlDatabase
|
||||
from playhouse.db_url import connect, parse
|
||||
from playhouse.shortcuts import ReconnectMixin
|
||||
|
||||
from config import SRC_LOG_LEVELS
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
log.setLevel(SRC_LOG_LEVELS["DB"])
|
||||
|
||||
@@ -43,7 +43,7 @@ class ReconnectingPostgresqlDatabase(CustomReconnectMixin, PostgresqlDatabase):
|
||||
|
||||
|
||||
def register_connection(db_url):
|
||||
db = connect(db_url)
|
||||
db = connect(db_url, unquote_password=True)
|
||||
if isinstance(db, PostgresqlDatabase):
|
||||
# Enable autoconnect for SQLite databases, managed by Peewee
|
||||
db.autoconnect = True
|
||||
@@ -51,16 +51,10 @@ def register_connection(db_url):
|
||||
log.info("Connected to PostgreSQL database")
|
||||
|
||||
# Get the connection details
|
||||
connection = parse(db_url)
|
||||
connection = parse(db_url, unquote_password=True)
|
||||
|
||||
# Use our custom database class that supports reconnection
|
||||
db = ReconnectingPostgresqlDatabase(
|
||||
connection["database"],
|
||||
user=connection["user"],
|
||||
password=connection["password"],
|
||||
host=connection["host"],
|
||||
port=connection["port"],
|
||||
)
|
||||
db = ReconnectingPostgresqlDatabase(**connection)
|
||||
db.connect(reuse_if_open=True)
|
||||
elif isinstance(db, SqliteDatabase):
|
||||
# Enable autoconnect for SQLite databases, managed by Peewee
|
||||
@@ -1,65 +1,80 @@
|
||||
from fastapi import FastAPI
|
||||
from fastapi.responses import StreamingResponse
|
||||
from fastapi.middleware.cors import CORSMiddleware
|
||||
from apps.webui.routers import (
|
||||
import inspect
|
||||
import json
|
||||
import logging
|
||||
import time
|
||||
from typing import AsyncGenerator, Generator, Iterator
|
||||
|
||||
from open_webui.apps.socket.main import get_event_call, get_event_emitter
|
||||
from open_webui.apps.webui.models.functions import Functions
|
||||
from open_webui.apps.webui.models.models import Models
|
||||
from open_webui.apps.webui.routers import (
|
||||
auths,
|
||||
users,
|
||||
chats,
|
||||
documents,
|
||||
tools,
|
||||
models,
|
||||
prompts,
|
||||
folders,
|
||||
configs,
|
||||
memories,
|
||||
utils,
|
||||
files,
|
||||
functions,
|
||||
memories,
|
||||
models,
|
||||
knowledge,
|
||||
prompts,
|
||||
evaluations,
|
||||
tools,
|
||||
users,
|
||||
utils,
|
||||
)
|
||||
from apps.webui.models.functions import Functions
|
||||
from apps.webui.models.models import Models
|
||||
from apps.webui.utils import load_function_module_by_id
|
||||
|
||||
from utils.misc import (
|
||||
from open_webui.apps.webui.utils import load_function_module_by_id
|
||||
from open_webui.config import (
|
||||
ADMIN_EMAIL,
|
||||
CORS_ALLOW_ORIGIN,
|
||||
DEFAULT_MODELS,
|
||||
DEFAULT_PROMPT_SUGGESTIONS,
|
||||
DEFAULT_USER_ROLE,
|
||||
ENABLE_COMMUNITY_SHARING,
|
||||
ENABLE_LOGIN_FORM,
|
||||
ENABLE_MESSAGE_RATING,
|
||||
ENABLE_SIGNUP,
|
||||
ENABLE_EVALUATION_ARENA_MODELS,
|
||||
EVALUATION_ARENA_MODELS,
|
||||
DEFAULT_ARENA_MODEL,
|
||||
JWT_EXPIRES_IN,
|
||||
ENABLE_OAUTH_ROLE_MANAGEMENT,
|
||||
OAUTH_ROLES_CLAIM,
|
||||
OAUTH_EMAIL_CLAIM,
|
||||
OAUTH_PICTURE_CLAIM,
|
||||
OAUTH_USERNAME_CLAIM,
|
||||
OAUTH_ALLOWED_ROLES,
|
||||
OAUTH_ADMIN_ROLES,
|
||||
SHOW_ADMIN_DETAILS,
|
||||
USER_PERMISSIONS,
|
||||
WEBHOOK_URL,
|
||||
WEBUI_AUTH,
|
||||
WEBUI_BANNERS,
|
||||
AppConfig,
|
||||
)
|
||||
from open_webui.env import (
|
||||
WEBUI_AUTH_TRUSTED_EMAIL_HEADER,
|
||||
WEBUI_AUTH_TRUSTED_NAME_HEADER,
|
||||
)
|
||||
from fastapi import FastAPI
|
||||
from fastapi.middleware.cors import CORSMiddleware
|
||||
from fastapi.responses import StreamingResponse
|
||||
from pydantic import BaseModel
|
||||
from open_webui.utils.misc import (
|
||||
openai_chat_chunk_message_template,
|
||||
openai_chat_completion_message_template,
|
||||
)
|
||||
from open_webui.utils.payload import (
|
||||
apply_model_params_to_body_openai,
|
||||
apply_model_system_prompt_to_body,
|
||||
)
|
||||
|
||||
|
||||
from config import (
|
||||
SHOW_ADMIN_DETAILS,
|
||||
ADMIN_EMAIL,
|
||||
WEBUI_AUTH,
|
||||
DEFAULT_MODELS,
|
||||
DEFAULT_PROMPT_SUGGESTIONS,
|
||||
DEFAULT_USER_ROLE,
|
||||
ENABLE_SIGNUP,
|
||||
ENABLE_LOGIN_FORM,
|
||||
USER_PERMISSIONS,
|
||||
WEBHOOK_URL,
|
||||
WEBUI_AUTH_TRUSTED_EMAIL_HEADER,
|
||||
WEBUI_AUTH_TRUSTED_NAME_HEADER,
|
||||
JWT_EXPIRES_IN,
|
||||
WEBUI_BANNERS,
|
||||
ENABLE_COMMUNITY_SHARING,
|
||||
AppConfig,
|
||||
OAUTH_USERNAME_CLAIM,
|
||||
OAUTH_PICTURE_CLAIM,
|
||||
OAUTH_EMAIL_CLAIM,
|
||||
)
|
||||
|
||||
from apps.socket.main import get_event_call, get_event_emitter
|
||||
|
||||
import inspect
|
||||
import json
|
||||
|
||||
from typing import Iterator, Generator, AsyncGenerator
|
||||
from pydantic import BaseModel
|
||||
from open_webui.utils.tools import get_tools
|
||||
|
||||
app = FastAPI()
|
||||
|
||||
origins = ["*"]
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
app.state.config = AppConfig()
|
||||
|
||||
@@ -82,18 +97,27 @@ app.state.config.WEBHOOK_URL = WEBHOOK_URL
|
||||
app.state.config.BANNERS = WEBUI_BANNERS
|
||||
|
||||
app.state.config.ENABLE_COMMUNITY_SHARING = ENABLE_COMMUNITY_SHARING
|
||||
app.state.config.ENABLE_MESSAGE_RATING = ENABLE_MESSAGE_RATING
|
||||
|
||||
app.state.config.ENABLE_EVALUATION_ARENA_MODELS = ENABLE_EVALUATION_ARENA_MODELS
|
||||
app.state.config.EVALUATION_ARENA_MODELS = EVALUATION_ARENA_MODELS
|
||||
|
||||
app.state.config.OAUTH_USERNAME_CLAIM = OAUTH_USERNAME_CLAIM
|
||||
app.state.config.OAUTH_PICTURE_CLAIM = OAUTH_PICTURE_CLAIM
|
||||
app.state.config.OAUTH_EMAIL_CLAIM = OAUTH_EMAIL_CLAIM
|
||||
|
||||
app.state.config.ENABLE_OAUTH_ROLE_MANAGEMENT = ENABLE_OAUTH_ROLE_MANAGEMENT
|
||||
app.state.config.OAUTH_ROLES_CLAIM = OAUTH_ROLES_CLAIM
|
||||
app.state.config.OAUTH_ALLOWED_ROLES = OAUTH_ALLOWED_ROLES
|
||||
app.state.config.OAUTH_ADMIN_ROLES = OAUTH_ADMIN_ROLES
|
||||
|
||||
app.state.MODELS = {}
|
||||
app.state.TOOLS = {}
|
||||
app.state.FUNCTIONS = {}
|
||||
|
||||
app.add_middleware(
|
||||
CORSMiddleware,
|
||||
allow_origins=origins,
|
||||
allow_origins=CORS_ALLOW_ORIGIN,
|
||||
allow_credentials=True,
|
||||
allow_methods=["*"],
|
||||
allow_headers=["*"],
|
||||
@@ -101,19 +125,24 @@ app.add_middleware(
|
||||
|
||||
|
||||
app.include_router(configs.router, prefix="/configs", tags=["configs"])
|
||||
|
||||
app.include_router(auths.router, prefix="/auths", tags=["auths"])
|
||||
app.include_router(users.router, prefix="/users", tags=["users"])
|
||||
|
||||
app.include_router(chats.router, prefix="/chats", tags=["chats"])
|
||||
|
||||
app.include_router(documents.router, prefix="/documents", tags=["documents"])
|
||||
app.include_router(models.router, prefix="/models", tags=["models"])
|
||||
app.include_router(knowledge.router, prefix="/knowledge", tags=["knowledge"])
|
||||
app.include_router(prompts.router, prefix="/prompts", tags=["prompts"])
|
||||
|
||||
app.include_router(memories.router, prefix="/memories", tags=["memories"])
|
||||
app.include_router(files.router, prefix="/files", tags=["files"])
|
||||
app.include_router(tools.router, prefix="/tools", tags=["tools"])
|
||||
app.include_router(functions.router, prefix="/functions", tags=["functions"])
|
||||
|
||||
app.include_router(memories.router, prefix="/memories", tags=["memories"])
|
||||
app.include_router(evaluations.router, prefix="/evaluations", tags=["evaluations"])
|
||||
|
||||
app.include_router(folders.router, prefix="/folders", tags=["folders"])
|
||||
app.include_router(files.router, prefix="/files", tags=["files"])
|
||||
|
||||
app.include_router(utils.router, prefix="/utils", tags=["utils"])
|
||||
|
||||
|
||||
@@ -127,6 +156,47 @@ async def get_status():
|
||||
}
|
||||
|
||||
|
||||
async def get_all_models():
|
||||
models = []
|
||||
pipe_models = await get_pipe_models()
|
||||
models = models + pipe_models
|
||||
|
||||
if app.state.config.ENABLE_EVALUATION_ARENA_MODELS:
|
||||
arena_models = []
|
||||
if len(app.state.config.EVALUATION_ARENA_MODELS) > 0:
|
||||
arena_models = [
|
||||
{
|
||||
"id": model["id"],
|
||||
"name": model["name"],
|
||||
"info": {
|
||||
"meta": model["meta"],
|
||||
},
|
||||
"object": "model",
|
||||
"created": int(time.time()),
|
||||
"owned_by": "arena",
|
||||
"arena": True,
|
||||
}
|
||||
for model in app.state.config.EVALUATION_ARENA_MODELS
|
||||
]
|
||||
else:
|
||||
# Add default arena model
|
||||
arena_models = [
|
||||
{
|
||||
"id": DEFAULT_ARENA_MODEL["id"],
|
||||
"name": DEFAULT_ARENA_MODEL["name"],
|
||||
"info": {
|
||||
"meta": DEFAULT_ARENA_MODEL["meta"],
|
||||
},
|
||||
"object": "model",
|
||||
"created": int(time.time()),
|
||||
"owned_by": "arena",
|
||||
"arena": True,
|
||||
}
|
||||
]
|
||||
models = models + arena_models
|
||||
return models
|
||||
|
||||
|
||||
def get_function_module(pipe_id: str):
|
||||
# Check if function is already loaded
|
||||
if pipe_id not in app.state.FUNCTIONS:
|
||||
@@ -150,29 +220,33 @@ async def get_pipe_models():
|
||||
|
||||
# Check if function is a manifold
|
||||
if hasattr(function_module, "pipes"):
|
||||
manifold_pipes = []
|
||||
sub_pipes = []
|
||||
|
||||
# Check if pipes is a function or a list
|
||||
if callable(function_module.pipes):
|
||||
manifold_pipes = function_module.pipes()
|
||||
else:
|
||||
manifold_pipes = function_module.pipes
|
||||
|
||||
for p in manifold_pipes:
|
||||
manifold_pipe_id = f'{pipe.id}.{p["id"]}'
|
||||
manifold_pipe_name = p["name"]
|
||||
try:
|
||||
if callable(function_module.pipes):
|
||||
sub_pipes = function_module.pipes()
|
||||
else:
|
||||
sub_pipes = function_module.pipes
|
||||
except Exception as e:
|
||||
log.exception(e)
|
||||
sub_pipes = []
|
||||
|
||||
print(sub_pipes)
|
||||
|
||||
for p in sub_pipes:
|
||||
sub_pipe_id = f'{pipe.id}.{p["id"]}'
|
||||
sub_pipe_name = p["name"]
|
||||
|
||||
if hasattr(function_module, "name"):
|
||||
manifold_pipe_name = f"{function_module.name}{manifold_pipe_name}"
|
||||
sub_pipe_name = f"{function_module.name}{sub_pipe_name}"
|
||||
|
||||
pipe_flag = {"type": pipe.type}
|
||||
if hasattr(function_module, "ChatValves"):
|
||||
pipe_flag["valves_spec"] = function_module.ChatValves.schema()
|
||||
|
||||
pipe_models.append(
|
||||
{
|
||||
"id": manifold_pipe_id,
|
||||
"name": manifold_pipe_name,
|
||||
"id": sub_pipe_id,
|
||||
"name": sub_pipe_name,
|
||||
"object": "model",
|
||||
"created": pipe.created_at,
|
||||
"owned_by": "openai",
|
||||
@@ -181,8 +255,6 @@ async def get_pipe_models():
|
||||
)
|
||||
else:
|
||||
pipe_flag = {"type": "pipe"}
|
||||
if hasattr(function_module, "ChatValves"):
|
||||
pipe_flag["valves_spec"] = function_module.ChatValves.schema()
|
||||
|
||||
pipe_models.append(
|
||||
{
|
||||
@@ -241,50 +313,77 @@ def get_pipe_id(form_data: dict) -> str:
|
||||
return pipe_id
|
||||
|
||||
|
||||
def get_function_params(function_module, form_data, user, extra_params={}):
|
||||
def get_function_params(function_module, form_data, user, extra_params=None):
|
||||
if extra_params is None:
|
||||
extra_params = {}
|
||||
|
||||
pipe_id = get_pipe_id(form_data)
|
||||
|
||||
# Get the signature of the function
|
||||
sig = inspect.signature(function_module.pipe)
|
||||
params = {"body": form_data}
|
||||
|
||||
for key, value in extra_params.items():
|
||||
if key in sig.parameters:
|
||||
params[key] = value
|
||||
|
||||
if "__user__" in sig.parameters:
|
||||
__user__ = {
|
||||
"id": user.id,
|
||||
"email": user.email,
|
||||
"name": user.name,
|
||||
"role": user.role,
|
||||
}
|
||||
params = {"body": form_data} | {
|
||||
k: v for k, v in extra_params.items() if k in sig.parameters
|
||||
}
|
||||
|
||||
if "__user__" in params and hasattr(function_module, "UserValves"):
|
||||
user_valves = Functions.get_user_valves_by_id_and_user_id(pipe_id, user.id)
|
||||
try:
|
||||
if hasattr(function_module, "UserValves"):
|
||||
__user__["valves"] = function_module.UserValves(
|
||||
**Functions.get_user_valves_by_id_and_user_id(pipe_id, user.id)
|
||||
)
|
||||
params["__user__"]["valves"] = function_module.UserValves(**user_valves)
|
||||
except Exception as e:
|
||||
print(e)
|
||||
log.exception(e)
|
||||
params["__user__"]["valves"] = function_module.UserValves()
|
||||
|
||||
params["__user__"] = __user__
|
||||
return params
|
||||
|
||||
|
||||
async def generate_function_chat_completion(form_data, user):
|
||||
model_id = form_data.get("model")
|
||||
model_info = Models.get_model_by_id(model_id)
|
||||
metadata = form_data.pop("metadata", None)
|
||||
|
||||
metadata = form_data.pop("metadata", {})
|
||||
|
||||
files = metadata.get("files", [])
|
||||
tool_ids = metadata.get("tool_ids", [])
|
||||
# Check if tool_ids is None
|
||||
if tool_ids is None:
|
||||
tool_ids = []
|
||||
|
||||
__event_emitter__ = None
|
||||
__event_call__ = None
|
||||
__task__ = None
|
||||
__task_body__ = None
|
||||
|
||||
if metadata:
|
||||
if all(k in metadata for k in ("session_id", "chat_id", "message_id")):
|
||||
__event_emitter__ = get_event_emitter(metadata)
|
||||
__event_call__ = get_event_call(metadata)
|
||||
__task__ = metadata.get("task", None)
|
||||
__task_body__ = metadata.get("task_body", None)
|
||||
|
||||
extra_params = {
|
||||
"__event_emitter__": __event_emitter__,
|
||||
"__event_call__": __event_call__,
|
||||
"__task__": __task__,
|
||||
"__task_body__": __task_body__,
|
||||
"__files__": files,
|
||||
"__user__": {
|
||||
"id": user.id,
|
||||
"email": user.email,
|
||||
"name": user.name,
|
||||
"role": user.role,
|
||||
},
|
||||
}
|
||||
extra_params["__tools__"] = get_tools(
|
||||
app,
|
||||
tool_ids,
|
||||
user,
|
||||
{
|
||||
**extra_params,
|
||||
"__model__": app.state.MODELS[form_data["model"]],
|
||||
"__messages__": form_data["messages"],
|
||||
"__files__": files,
|
||||
},
|
||||
)
|
||||
|
||||
if model_info:
|
||||
if model_info.base_model_id:
|
||||
@@ -298,18 +397,9 @@ async def generate_function_chat_completion(form_data, user):
|
||||
function_module = get_function_module(pipe_id)
|
||||
|
||||
pipe = function_module.pipe
|
||||
params = get_function_params(
|
||||
function_module,
|
||||
form_data,
|
||||
user,
|
||||
{
|
||||
"__event_emitter__": __event_emitter__,
|
||||
"__event_call__": __event_call__,
|
||||
"__task__": __task__,
|
||||
},
|
||||
)
|
||||
params = get_function_params(function_module, form_data, user, extra_params)
|
||||
|
||||
if form_data["stream"]:
|
||||
if form_data.get("stream", False):
|
||||
|
||||
async def stream_content():
|
||||
try:
|
||||
@@ -1,15 +1,13 @@
|
||||
from pydantic import BaseModel
|
||||
from typing import Optional
|
||||
import uuid
|
||||
import logging
|
||||
from sqlalchemy import String, Column, Boolean, Text
|
||||
import uuid
|
||||
from typing import Optional
|
||||
|
||||
from apps.webui.models.users import UserModel, Users
|
||||
from utils.utils import verify_password
|
||||
|
||||
from apps.webui.internal.db import Base, get_db
|
||||
|
||||
from config import SRC_LOG_LEVELS
|
||||
from open_webui.apps.webui.internal.db import Base, get_db
|
||||
from open_webui.apps.webui.models.users import UserModel, Users
|
||||
from open_webui.env import SRC_LOG_LEVELS
|
||||
from pydantic import BaseModel
|
||||
from sqlalchemy import Boolean, Column, String, Text
|
||||
from open_webui.utils.utils import verify_password
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
log.setLevel(SRC_LOG_LEVELS["MODELS"])
|
||||
@@ -92,7 +90,6 @@ class AddUserForm(SignupForm):
|
||||
|
||||
|
||||
class AuthsTable:
|
||||
|
||||
def insert_new_auth(
|
||||
self,
|
||||
email: str,
|
||||
@@ -103,7 +100,6 @@ class AuthsTable:
|
||||
oauth_sub: Optional[str] = None,
|
||||
) -> Optional[UserModel]:
|
||||
with get_db() as db:
|
||||
|
||||
log.info("insert_new_auth")
|
||||
|
||||
id = str(uuid.uuid4())
|
||||
@@ -130,7 +126,6 @@ class AuthsTable:
|
||||
log.info(f"authenticate_user: {email}")
|
||||
try:
|
||||
with get_db() as db:
|
||||
|
||||
auth = db.query(Auth).filter_by(email=email, active=True).first()
|
||||
if auth:
|
||||
if verify_password(password, auth.password):
|
||||
@@ -189,7 +184,6 @@ class AuthsTable:
|
||||
def delete_auth_by_id(self, id: str) -> bool:
|
||||
try:
|
||||
with get_db() as db:
|
||||
|
||||
# Delete User
|
||||
result = Users.delete_user_by_id(id)
|
||||
|
||||
805
backend/open_webui/apps/webui/models/chats.py
Normal file
805
backend/open_webui/apps/webui/models/chats.py
Normal file
@@ -0,0 +1,805 @@
|
||||
import json
|
||||
import time
|
||||
import uuid
|
||||
from typing import Optional
|
||||
|
||||
from open_webui.apps.webui.internal.db import Base, get_db
|
||||
from open_webui.apps.webui.models.tags import TagModel, Tag, Tags
|
||||
|
||||
|
||||
from pydantic import BaseModel, ConfigDict
|
||||
from sqlalchemy import BigInteger, Boolean, Column, String, Text, JSON
|
||||
from sqlalchemy import or_, func, select, and_, text
|
||||
from sqlalchemy.sql import exists
|
||||
|
||||
####################
|
||||
# Chat DB Schema
|
||||
####################
|
||||
|
||||
|
||||
class Chat(Base):
|
||||
__tablename__ = "chat"
|
||||
|
||||
id = Column(String, primary_key=True)
|
||||
user_id = Column(String)
|
||||
title = Column(Text)
|
||||
chat = Column(JSON)
|
||||
|
||||
created_at = Column(BigInteger)
|
||||
updated_at = Column(BigInteger)
|
||||
|
||||
share_id = Column(Text, unique=True, nullable=True)
|
||||
archived = Column(Boolean, default=False)
|
||||
pinned = Column(Boolean, default=False, nullable=True)
|
||||
|
||||
meta = Column(JSON, server_default="{}")
|
||||
folder_id = Column(Text, nullable=True)
|
||||
|
||||
|
||||
class ChatModel(BaseModel):
|
||||
model_config = ConfigDict(from_attributes=True)
|
||||
|
||||
id: str
|
||||
user_id: str
|
||||
title: str
|
||||
chat: dict
|
||||
|
||||
created_at: int # timestamp in epoch
|
||||
updated_at: int # timestamp in epoch
|
||||
|
||||
share_id: Optional[str] = None
|
||||
archived: bool = False
|
||||
pinned: Optional[bool] = False
|
||||
|
||||
meta: dict = {}
|
||||
folder_id: Optional[str] = None
|
||||
|
||||
|
||||
####################
|
||||
# Forms
|
||||
####################
|
||||
|
||||
|
||||
class ChatForm(BaseModel):
|
||||
chat: dict
|
||||
|
||||
|
||||
class ChatImportForm(ChatForm):
|
||||
meta: Optional[dict] = {}
|
||||
pinned: Optional[bool] = False
|
||||
folder_id: Optional[str] = None
|
||||
|
||||
|
||||
class ChatTitleMessagesForm(BaseModel):
|
||||
title: str
|
||||
messages: list[dict]
|
||||
|
||||
|
||||
class ChatTitleForm(BaseModel):
|
||||
title: str
|
||||
|
||||
|
||||
class ChatResponse(BaseModel):
|
||||
id: str
|
||||
user_id: str
|
||||
title: str
|
||||
chat: dict
|
||||
updated_at: int # timestamp in epoch
|
||||
created_at: int # timestamp in epoch
|
||||
share_id: Optional[str] = None # id of the chat to be shared
|
||||
archived: bool
|
||||
pinned: Optional[bool] = False
|
||||
meta: dict = {}
|
||||
folder_id: Optional[str] = None
|
||||
|
||||
|
||||
class ChatTitleIdResponse(BaseModel):
|
||||
id: str
|
||||
title: str
|
||||
updated_at: int
|
||||
created_at: int
|
||||
|
||||
|
||||
class ChatTable:
|
||||
def insert_new_chat(self, user_id: str, form_data: ChatForm) -> Optional[ChatModel]:
|
||||
with get_db() as db:
|
||||
id = str(uuid.uuid4())
|
||||
chat = ChatModel(
|
||||
**{
|
||||
"id": id,
|
||||
"user_id": user_id,
|
||||
"title": (
|
||||
form_data.chat["title"]
|
||||
if "title" in form_data.chat
|
||||
else "New Chat"
|
||||
),
|
||||
"chat": form_data.chat,
|
||||
"created_at": int(time.time()),
|
||||
"updated_at": int(time.time()),
|
||||
}
|
||||
)
|
||||
|
||||
result = Chat(**chat.model_dump())
|
||||
db.add(result)
|
||||
db.commit()
|
||||
db.refresh(result)
|
||||
return ChatModel.model_validate(result) if result else None
|
||||
|
||||
def import_chat(
|
||||
self, user_id: str, form_data: ChatImportForm
|
||||
) -> Optional[ChatModel]:
|
||||
with get_db() as db:
|
||||
id = str(uuid.uuid4())
|
||||
chat = ChatModel(
|
||||
**{
|
||||
"id": id,
|
||||
"user_id": user_id,
|
||||
"title": (
|
||||
form_data.chat["title"]
|
||||
if "title" in form_data.chat
|
||||
else "New Chat"
|
||||
),
|
||||
"chat": form_data.chat,
|
||||
"meta": form_data.meta,
|
||||
"pinned": form_data.pinned,
|
||||
"folder_id": form_data.folder_id,
|
||||
"created_at": int(time.time()),
|
||||
"updated_at": int(time.time()),
|
||||
}
|
||||
)
|
||||
|
||||
result = Chat(**chat.model_dump())
|
||||
db.add(result)
|
||||
db.commit()
|
||||
db.refresh(result)
|
||||
return ChatModel.model_validate(result) if result else None
|
||||
|
||||
def update_chat_by_id(self, id: str, chat: dict) -> Optional[ChatModel]:
|
||||
try:
|
||||
with get_db() as db:
|
||||
chat_item = db.get(Chat, id)
|
||||
chat_item.chat = chat
|
||||
chat_item.title = chat["title"] if "title" in chat else "New Chat"
|
||||
chat_item.updated_at = int(time.time())
|
||||
db.commit()
|
||||
db.refresh(chat_item)
|
||||
|
||||
return ChatModel.model_validate(chat_item)
|
||||
except Exception:
|
||||
return None
|
||||
|
||||
def insert_shared_chat_by_chat_id(self, chat_id: str) -> Optional[ChatModel]:
|
||||
with get_db() as db:
|
||||
# Get the existing chat to share
|
||||
chat = db.get(Chat, chat_id)
|
||||
# Check if the chat is already shared
|
||||
if chat.share_id:
|
||||
return self.get_chat_by_id_and_user_id(chat.share_id, "shared")
|
||||
# Create a new chat with the same data, but with a new ID
|
||||
shared_chat = ChatModel(
|
||||
**{
|
||||
"id": str(uuid.uuid4()),
|
||||
"user_id": f"shared-{chat_id}",
|
||||
"title": chat.title,
|
||||
"chat": chat.chat,
|
||||
"created_at": chat.created_at,
|
||||
"updated_at": int(time.time()),
|
||||
}
|
||||
)
|
||||
shared_result = Chat(**shared_chat.model_dump())
|
||||
db.add(shared_result)
|
||||
db.commit()
|
||||
db.refresh(shared_result)
|
||||
|
||||
# Update the original chat with the share_id
|
||||
result = (
|
||||
db.query(Chat)
|
||||
.filter_by(id=chat_id)
|
||||
.update({"share_id": shared_chat.id})
|
||||
)
|
||||
db.commit()
|
||||
return shared_chat if (shared_result and result) else None
|
||||
|
||||
def update_shared_chat_by_chat_id(self, chat_id: str) -> Optional[ChatModel]:
|
||||
try:
|
||||
with get_db() as db:
|
||||
print("update_shared_chat_by_id")
|
||||
chat = db.get(Chat, chat_id)
|
||||
print(chat)
|
||||
chat.title = chat.title
|
||||
chat.chat = chat.chat
|
||||
db.commit()
|
||||
db.refresh(chat)
|
||||
|
||||
return self.get_chat_by_id(chat.share_id)
|
||||
except Exception:
|
||||
return None
|
||||
|
||||
def delete_shared_chat_by_chat_id(self, chat_id: str) -> bool:
|
||||
try:
|
||||
with get_db() as db:
|
||||
db.query(Chat).filter_by(user_id=f"shared-{chat_id}").delete()
|
||||
db.commit()
|
||||
|
||||
return True
|
||||
except Exception:
|
||||
return False
|
||||
|
||||
def update_chat_share_id_by_id(
|
||||
self, id: str, share_id: Optional[str]
|
||||
) -> Optional[ChatModel]:
|
||||
try:
|
||||
with get_db() as db:
|
||||
chat = db.get(Chat, id)
|
||||
chat.share_id = share_id
|
||||
db.commit()
|
||||
db.refresh(chat)
|
||||
return ChatModel.model_validate(chat)
|
||||
except Exception:
|
||||
return None
|
||||
|
||||
def toggle_chat_pinned_by_id(self, id: str) -> Optional[ChatModel]:
|
||||
try:
|
||||
with get_db() as db:
|
||||
chat = db.get(Chat, id)
|
||||
chat.pinned = not chat.pinned
|
||||
chat.updated_at = int(time.time())
|
||||
db.commit()
|
||||
db.refresh(chat)
|
||||
return ChatModel.model_validate(chat)
|
||||
except Exception:
|
||||
return None
|
||||
|
||||
def toggle_chat_archive_by_id(self, id: str) -> Optional[ChatModel]:
|
||||
try:
|
||||
with get_db() as db:
|
||||
chat = db.get(Chat, id)
|
||||
chat.archived = not chat.archived
|
||||
chat.updated_at = int(time.time())
|
||||
db.commit()
|
||||
db.refresh(chat)
|
||||
return ChatModel.model_validate(chat)
|
||||
except Exception:
|
||||
return None
|
||||
|
||||
def archive_all_chats_by_user_id(self, user_id: str) -> bool:
|
||||
try:
|
||||
with get_db() as db:
|
||||
db.query(Chat).filter_by(user_id=user_id).update({"archived": True})
|
||||
db.commit()
|
||||
return True
|
||||
except Exception:
|
||||
return False
|
||||
|
||||
def get_archived_chat_list_by_user_id(
|
||||
self, user_id: str, skip: int = 0, limit: int = 50
|
||||
) -> list[ChatModel]:
|
||||
with get_db() as db:
|
||||
all_chats = (
|
||||
db.query(Chat)
|
||||
.filter_by(user_id=user_id, archived=True)
|
||||
.order_by(Chat.updated_at.desc())
|
||||
# .limit(limit).offset(skip)
|
||||
.all()
|
||||
)
|
||||
return [ChatModel.model_validate(chat) for chat in all_chats]
|
||||
|
||||
def get_chat_list_by_user_id(
|
||||
self,
|
||||
user_id: str,
|
||||
include_archived: bool = False,
|
||||
skip: int = 0,
|
||||
limit: int = 50,
|
||||
) -> list[ChatModel]:
|
||||
with get_db() as db:
|
||||
query = db.query(Chat).filter_by(user_id=user_id).filter_by(folder_id=None)
|
||||
if not include_archived:
|
||||
query = query.filter_by(archived=False)
|
||||
|
||||
query = query.order_by(Chat.updated_at.desc())
|
||||
|
||||
if skip:
|
||||
query = query.offset(skip)
|
||||
if limit:
|
||||
query = query.limit(limit)
|
||||
|
||||
all_chats = query.all()
|
||||
return [ChatModel.model_validate(chat) for chat in all_chats]
|
||||
|
||||
def get_chat_title_id_list_by_user_id(
|
||||
self,
|
||||
user_id: str,
|
||||
include_archived: bool = False,
|
||||
skip: Optional[int] = None,
|
||||
limit: Optional[int] = None,
|
||||
) -> list[ChatTitleIdResponse]:
|
||||
with get_db() as db:
|
||||
query = db.query(Chat).filter_by(user_id=user_id).filter_by(folder_id=None)
|
||||
query = query.filter(or_(Chat.pinned == False, Chat.pinned == None))
|
||||
|
||||
if not include_archived:
|
||||
query = query.filter_by(archived=False)
|
||||
|
||||
query = query.order_by(Chat.updated_at.desc()).with_entities(
|
||||
Chat.id, Chat.title, Chat.updated_at, Chat.created_at
|
||||
)
|
||||
|
||||
if skip:
|
||||
query = query.offset(skip)
|
||||
if limit:
|
||||
query = query.limit(limit)
|
||||
|
||||
all_chats = query.all()
|
||||
|
||||
# result has to be destrctured from sqlalchemy `row` and mapped to a dict since the `ChatModel`is not the returned dataclass.
|
||||
return [
|
||||
ChatTitleIdResponse.model_validate(
|
||||
{
|
||||
"id": chat[0],
|
||||
"title": chat[1],
|
||||
"updated_at": chat[2],
|
||||
"created_at": chat[3],
|
||||
}
|
||||
)
|
||||
for chat in all_chats
|
||||
]
|
||||
|
||||
def get_chat_list_by_chat_ids(
|
||||
self, chat_ids: list[str], skip: int = 0, limit: int = 50
|
||||
) -> list[ChatModel]:
|
||||
with get_db() as db:
|
||||
all_chats = (
|
||||
db.query(Chat)
|
||||
.filter(Chat.id.in_(chat_ids))
|
||||
.filter_by(archived=False)
|
||||
.order_by(Chat.updated_at.desc())
|
||||
.all()
|
||||
)
|
||||
return [ChatModel.model_validate(chat) for chat in all_chats]
|
||||
|
||||
def get_chat_by_id(self, id: str) -> Optional[ChatModel]:
|
||||
try:
|
||||
with get_db() as db:
|
||||
chat = db.get(Chat, id)
|
||||
return ChatModel.model_validate(chat)
|
||||
except Exception:
|
||||
return None
|
||||
|
||||
def get_chat_by_share_id(self, id: str) -> Optional[ChatModel]:
|
||||
try:
|
||||
with get_db() as db:
|
||||
chat = db.query(Chat).filter_by(share_id=id).first()
|
||||
|
||||
if chat:
|
||||
return self.get_chat_by_id(id)
|
||||
else:
|
||||
return None
|
||||
except Exception:
|
||||
return None
|
||||
|
||||
def get_chat_by_id_and_user_id(self, id: str, user_id: str) -> Optional[ChatModel]:
|
||||
try:
|
||||
with get_db() as db:
|
||||
chat = db.query(Chat).filter_by(id=id, user_id=user_id).first()
|
||||
return ChatModel.model_validate(chat)
|
||||
except Exception:
|
||||
return None
|
||||
|
||||
def get_chats(self, skip: int = 0, limit: int = 50) -> list[ChatModel]:
|
||||
with get_db() as db:
|
||||
all_chats = (
|
||||
db.query(Chat)
|
||||
# .limit(limit).offset(skip)
|
||||
.order_by(Chat.updated_at.desc())
|
||||
)
|
||||
return [ChatModel.model_validate(chat) for chat in all_chats]
|
||||
|
||||
def get_chats_by_user_id(self, user_id: str) -> list[ChatModel]:
|
||||
with get_db() as db:
|
||||
all_chats = (
|
||||
db.query(Chat)
|
||||
.filter_by(user_id=user_id)
|
||||
.order_by(Chat.updated_at.desc())
|
||||
)
|
||||
return [ChatModel.model_validate(chat) for chat in all_chats]
|
||||
|
||||
def get_pinned_chats_by_user_id(self, user_id: str) -> list[ChatModel]:
|
||||
with get_db() as db:
|
||||
all_chats = (
|
||||
db.query(Chat)
|
||||
.filter_by(user_id=user_id, pinned=True, archived=False)
|
||||
.order_by(Chat.updated_at.desc())
|
||||
)
|
||||
return [ChatModel.model_validate(chat) for chat in all_chats]
|
||||
|
||||
def get_archived_chats_by_user_id(self, user_id: str) -> list[ChatModel]:
|
||||
with get_db() as db:
|
||||
all_chats = (
|
||||
db.query(Chat)
|
||||
.filter_by(user_id=user_id, archived=True)
|
||||
.order_by(Chat.updated_at.desc())
|
||||
)
|
||||
return [ChatModel.model_validate(chat) for chat in all_chats]
|
||||
|
||||
def get_chats_by_user_id_and_search_text(
|
||||
self,
|
||||
user_id: str,
|
||||
search_text: str,
|
||||
include_archived: bool = False,
|
||||
skip: int = 0,
|
||||
limit: int = 60,
|
||||
) -> list[ChatModel]:
|
||||
"""
|
||||
Filters chats based on a search query using Python, allowing pagination using skip and limit.
|
||||
"""
|
||||
search_text = search_text.lower().strip()
|
||||
|
||||
if not search_text:
|
||||
return self.get_chat_list_by_user_id(user_id, include_archived, skip, limit)
|
||||
|
||||
search_text_words = search_text.split(" ")
|
||||
|
||||
# search_text might contain 'tag:tag_name' format so we need to extract the tag_name, split the search_text and remove the tags
|
||||
tag_ids = [
|
||||
word.replace("tag:", "").replace(" ", "_").lower()
|
||||
for word in search_text_words
|
||||
if word.startswith("tag:")
|
||||
]
|
||||
|
||||
search_text_words = [
|
||||
word for word in search_text_words if not word.startswith("tag:")
|
||||
]
|
||||
|
||||
search_text = " ".join(search_text_words)
|
||||
|
||||
with get_db() as db:
|
||||
query = db.query(Chat).filter(Chat.user_id == user_id)
|
||||
|
||||
if not include_archived:
|
||||
query = query.filter(Chat.archived == False)
|
||||
|
||||
query = query.order_by(Chat.updated_at.desc())
|
||||
|
||||
# Check if the database dialect is either 'sqlite' or 'postgresql'
|
||||
dialect_name = db.bind.dialect.name
|
||||
if dialect_name == "sqlite":
|
||||
# SQLite case: using JSON1 extension for JSON searching
|
||||
query = query.filter(
|
||||
(
|
||||
Chat.title.ilike(
|
||||
f"%{search_text}%"
|
||||
) # Case-insensitive search in title
|
||||
| text(
|
||||
"""
|
||||
EXISTS (
|
||||
SELECT 1
|
||||
FROM json_each(Chat.chat, '$.messages') AS message
|
||||
WHERE LOWER(message.value->>'content') LIKE '%' || :search_text || '%'
|
||||
)
|
||||
"""
|
||||
)
|
||||
).params(search_text=search_text)
|
||||
)
|
||||
|
||||
# Check if there are any tags to filter, it should have all the tags
|
||||
if "none" in tag_ids:
|
||||
query = query.filter(
|
||||
text(
|
||||
"""
|
||||
NOT EXISTS (
|
||||
SELECT 1
|
||||
FROM json_each(Chat.meta, '$.tags') AS tag
|
||||
)
|
||||
"""
|
||||
)
|
||||
)
|
||||
elif tag_ids:
|
||||
query = query.filter(
|
||||
and_(
|
||||
*[
|
||||
text(
|
||||
f"""
|
||||
EXISTS (
|
||||
SELECT 1
|
||||
FROM json_each(Chat.meta, '$.tags') AS tag
|
||||
WHERE tag.value = :tag_id_{tag_idx}
|
||||
)
|
||||
"""
|
||||
).params(**{f"tag_id_{tag_idx}": tag_id})
|
||||
for tag_idx, tag_id in enumerate(tag_ids)
|
||||
]
|
||||
)
|
||||
)
|
||||
|
||||
elif dialect_name == "postgresql":
|
||||
# PostgreSQL relies on proper JSON query for search
|
||||
query = query.filter(
|
||||
(
|
||||
Chat.title.ilike(
|
||||
f"%{search_text}%"
|
||||
) # Case-insensitive search in title
|
||||
| text(
|
||||
"""
|
||||
EXISTS (
|
||||
SELECT 1
|
||||
FROM json_array_elements(Chat.chat->'messages') AS message
|
||||
WHERE LOWER(message->>'content') LIKE '%' || :search_text || '%'
|
||||
)
|
||||
"""
|
||||
)
|
||||
).params(search_text=search_text)
|
||||
)
|
||||
|
||||
# Check if there are any tags to filter, it should have all the tags
|
||||
if "none" in tag_ids:
|
||||
query = query.filter(
|
||||
text(
|
||||
"""
|
||||
NOT EXISTS (
|
||||
SELECT 1
|
||||
FROM json_array_elements_text(Chat.meta->'tags') AS tag
|
||||
)
|
||||
"""
|
||||
)
|
||||
)
|
||||
elif tag_ids:
|
||||
query = query.filter(
|
||||
and_(
|
||||
*[
|
||||
text(
|
||||
f"""
|
||||
EXISTS (
|
||||
SELECT 1
|
||||
FROM json_array_elements_text(Chat.meta->'tags') AS tag
|
||||
WHERE tag = :tag_id_{tag_idx}
|
||||
)
|
||||
"""
|
||||
).params(**{f"tag_id_{tag_idx}": tag_id})
|
||||
for tag_idx, tag_id in enumerate(tag_ids)
|
||||
]
|
||||
)
|
||||
)
|
||||
else:
|
||||
raise NotImplementedError(
|
||||
f"Unsupported dialect: {db.bind.dialect.name}"
|
||||
)
|
||||
|
||||
# Perform pagination at the SQL level
|
||||
all_chats = query.offset(skip).limit(limit).all()
|
||||
|
||||
print(len(all_chats))
|
||||
|
||||
# Validate and return chats
|
||||
return [ChatModel.model_validate(chat) for chat in all_chats]
|
||||
|
||||
def get_chats_by_folder_id_and_user_id(
|
||||
self, folder_id: str, user_id: str
|
||||
) -> list[ChatModel]:
|
||||
with get_db() as db:
|
||||
query = db.query(Chat).filter_by(folder_id=folder_id, user_id=user_id)
|
||||
query = query.filter(or_(Chat.pinned == False, Chat.pinned == None))
|
||||
query = query.filter_by(archived=False)
|
||||
|
||||
query = query.order_by(Chat.updated_at.desc())
|
||||
|
||||
all_chats = query.all()
|
||||
return [ChatModel.model_validate(chat) for chat in all_chats]
|
||||
|
||||
def get_chats_by_folder_ids_and_user_id(
|
||||
self, folder_ids: list[str], user_id: str
|
||||
) -> list[ChatModel]:
|
||||
with get_db() as db:
|
||||
query = db.query(Chat).filter(
|
||||
Chat.folder_id.in_(folder_ids), Chat.user_id == user_id
|
||||
)
|
||||
query = query.filter(or_(Chat.pinned == False, Chat.pinned == None))
|
||||
query = query.filter_by(archived=False)
|
||||
|
||||
query = query.order_by(Chat.updated_at.desc())
|
||||
|
||||
all_chats = query.all()
|
||||
return [ChatModel.model_validate(chat) for chat in all_chats]
|
||||
|
||||
def update_chat_folder_id_by_id_and_user_id(
|
||||
self, id: str, user_id: str, folder_id: str
|
||||
) -> Optional[ChatModel]:
|
||||
try:
|
||||
with get_db() as db:
|
||||
chat = db.get(Chat, id)
|
||||
chat.folder_id = folder_id
|
||||
chat.updated_at = int(time.time())
|
||||
chat.pinned = False
|
||||
db.commit()
|
||||
db.refresh(chat)
|
||||
return ChatModel.model_validate(chat)
|
||||
except Exception:
|
||||
return None
|
||||
|
||||
def get_chat_tags_by_id_and_user_id(self, id: str, user_id: str) -> list[TagModel]:
|
||||
with get_db() as db:
|
||||
chat = db.get(Chat, id)
|
||||
tags = chat.meta.get("tags", [])
|
||||
return [Tags.get_tag_by_name_and_user_id(tag, user_id) for tag in tags]
|
||||
|
||||
def get_chat_list_by_user_id_and_tag_name(
|
||||
self, user_id: str, tag_name: str, skip: int = 0, limit: int = 50
|
||||
) -> list[ChatModel]:
|
||||
with get_db() as db:
|
||||
query = db.query(Chat).filter_by(user_id=user_id)
|
||||
tag_id = tag_name.replace(" ", "_").lower()
|
||||
|
||||
print(db.bind.dialect.name)
|
||||
if db.bind.dialect.name == "sqlite":
|
||||
# SQLite JSON1 querying for tags within the meta JSON field
|
||||
query = query.filter(
|
||||
text(
|
||||
f"EXISTS (SELECT 1 FROM json_each(Chat.meta, '$.tags') WHERE json_each.value = :tag_id)"
|
||||
)
|
||||
).params(tag_id=tag_id)
|
||||
elif db.bind.dialect.name == "postgresql":
|
||||
# PostgreSQL JSON query for tags within the meta JSON field (for `json` type)
|
||||
query = query.filter(
|
||||
text(
|
||||
"EXISTS (SELECT 1 FROM json_array_elements_text(Chat.meta->'tags') elem WHERE elem = :tag_id)"
|
||||
)
|
||||
).params(tag_id=tag_id)
|
||||
else:
|
||||
raise NotImplementedError(
|
||||
f"Unsupported dialect: {db.bind.dialect.name}"
|
||||
)
|
||||
|
||||
all_chats = query.all()
|
||||
print("all_chats", all_chats)
|
||||
return [ChatModel.model_validate(chat) for chat in all_chats]
|
||||
|
||||
def add_chat_tag_by_id_and_user_id_and_tag_name(
|
||||
self, id: str, user_id: str, tag_name: str
|
||||
) -> Optional[ChatModel]:
|
||||
tag = Tags.get_tag_by_name_and_user_id(tag_name, user_id)
|
||||
if tag is None:
|
||||
tag = Tags.insert_new_tag(tag_name, user_id)
|
||||
try:
|
||||
with get_db() as db:
|
||||
chat = db.get(Chat, id)
|
||||
|
||||
tag_id = tag.id
|
||||
if tag_id not in chat.meta.get("tags", []):
|
||||
chat.meta = {
|
||||
**chat.meta,
|
||||
"tags": list(set(chat.meta.get("tags", []) + [tag_id])),
|
||||
}
|
||||
|
||||
db.commit()
|
||||
db.refresh(chat)
|
||||
return ChatModel.model_validate(chat)
|
||||
except Exception:
|
||||
return None
|
||||
|
||||
def count_chats_by_tag_name_and_user_id(self, tag_name: str, user_id: str) -> int:
|
||||
with get_db() as db: # Assuming `get_db()` returns a session object
|
||||
query = db.query(Chat).filter_by(user_id=user_id, archived=False)
|
||||
|
||||
# Normalize the tag_name for consistency
|
||||
tag_id = tag_name.replace(" ", "_").lower()
|
||||
|
||||
if db.bind.dialect.name == "sqlite":
|
||||
# SQLite JSON1 support for querying the tags inside the `meta` JSON field
|
||||
query = query.filter(
|
||||
text(
|
||||
f"EXISTS (SELECT 1 FROM json_each(Chat.meta, '$.tags') WHERE json_each.value = :tag_id)"
|
||||
)
|
||||
).params(tag_id=tag_id)
|
||||
|
||||
elif db.bind.dialect.name == "postgresql":
|
||||
# PostgreSQL JSONB support for querying the tags inside the `meta` JSON field
|
||||
query = query.filter(
|
||||
text(
|
||||
"EXISTS (SELECT 1 FROM json_array_elements_text(Chat.meta->'tags') elem WHERE elem = :tag_id)"
|
||||
)
|
||||
).params(tag_id=tag_id)
|
||||
|
||||
else:
|
||||
raise NotImplementedError(
|
||||
f"Unsupported dialect: {db.bind.dialect.name}"
|
||||
)
|
||||
|
||||
# Get the count of matching records
|
||||
count = query.count()
|
||||
|
||||
# Debugging output for inspection
|
||||
print(f"Count of chats for tag '{tag_name}':", count)
|
||||
|
||||
return count
|
||||
|
||||
def delete_tag_by_id_and_user_id_and_tag_name(
|
||||
self, id: str, user_id: str, tag_name: str
|
||||
) -> bool:
|
||||
try:
|
||||
with get_db() as db:
|
||||
chat = db.get(Chat, id)
|
||||
tags = chat.meta.get("tags", [])
|
||||
tag_id = tag_name.replace(" ", "_").lower()
|
||||
|
||||
tags = [tag for tag in tags if tag != tag_id]
|
||||
chat.meta = {
|
||||
**chat.meta,
|
||||
"tags": list(set(tags)),
|
||||
}
|
||||
db.commit()
|
||||
return True
|
||||
except Exception:
|
||||
return False
|
||||
|
||||
def delete_all_tags_by_id_and_user_id(self, id: str, user_id: str) -> bool:
|
||||
try:
|
||||
with get_db() as db:
|
||||
chat = db.get(Chat, id)
|
||||
chat.meta = {
|
||||
**chat.meta,
|
||||
"tags": [],
|
||||
}
|
||||
db.commit()
|
||||
|
||||
return True
|
||||
except Exception:
|
||||
return False
|
||||
|
||||
def delete_chat_by_id(self, id: str) -> bool:
|
||||
try:
|
||||
with get_db() as db:
|
||||
db.query(Chat).filter_by(id=id).delete()
|
||||
db.commit()
|
||||
|
||||
return True and self.delete_shared_chat_by_chat_id(id)
|
||||
except Exception:
|
||||
return False
|
||||
|
||||
def delete_chat_by_id_and_user_id(self, id: str, user_id: str) -> bool:
|
||||
try:
|
||||
with get_db() as db:
|
||||
db.query(Chat).filter_by(id=id, user_id=user_id).delete()
|
||||
db.commit()
|
||||
|
||||
return True and self.delete_shared_chat_by_chat_id(id)
|
||||
except Exception:
|
||||
return False
|
||||
|
||||
def delete_chats_by_user_id(self, user_id: str) -> bool:
|
||||
try:
|
||||
with get_db() as db:
|
||||
self.delete_shared_chats_by_user_id(user_id)
|
||||
|
||||
db.query(Chat).filter_by(user_id=user_id).delete()
|
||||
db.commit()
|
||||
|
||||
return True
|
||||
except Exception:
|
||||
return False
|
||||
|
||||
def delete_chats_by_user_id_and_folder_id(
|
||||
self, user_id: str, folder_id: str
|
||||
) -> bool:
|
||||
try:
|
||||
with get_db() as db:
|
||||
db.query(Chat).filter_by(user_id=user_id, folder_id=folder_id).delete()
|
||||
db.commit()
|
||||
|
||||
return True
|
||||
except Exception:
|
||||
return False
|
||||
|
||||
def delete_shared_chats_by_user_id(self, user_id: str) -> bool:
|
||||
try:
|
||||
with get_db() as db:
|
||||
chats_by_user = db.query(Chat).filter_by(user_id=user_id).all()
|
||||
shared_chat_ids = [f"shared-{chat.id}" for chat in chats_by_user]
|
||||
|
||||
db.query(Chat).filter(Chat.user_id.in_(shared_chat_ids)).delete()
|
||||
db.commit()
|
||||
|
||||
return True
|
||||
except Exception:
|
||||
return False
|
||||
|
||||
|
||||
Chats = ChatTable()
|
||||
@@ -1,15 +1,12 @@
|
||||
from pydantic import BaseModel, ConfigDict
|
||||
from typing import Optional
|
||||
import time
|
||||
import logging
|
||||
|
||||
from sqlalchemy import String, Column, BigInteger, Text
|
||||
|
||||
from apps.webui.internal.db import Base, get_db
|
||||
|
||||
import json
|
||||
import logging
|
||||
import time
|
||||
from typing import Optional
|
||||
|
||||
from config import SRC_LOG_LEVELS
|
||||
from open_webui.apps.webui.internal.db import Base, get_db
|
||||
from open_webui.env import SRC_LOG_LEVELS
|
||||
from pydantic import BaseModel, ConfigDict
|
||||
from sqlalchemy import BigInteger, Column, String, Text
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
log.setLevel(SRC_LOG_LEVELS["MODELS"])
|
||||
@@ -70,12 +67,10 @@ class DocumentForm(DocumentUpdateForm):
|
||||
|
||||
|
||||
class DocumentsTable:
|
||||
|
||||
def insert_new_doc(
|
||||
self, user_id: str, form_data: DocumentForm
|
||||
) -> Optional[DocumentModel]:
|
||||
with get_db() as db:
|
||||
|
||||
document = DocumentModel(
|
||||
**{
|
||||
**form_data.model_dump(),
|
||||
@@ -99,7 +94,6 @@ class DocumentsTable:
|
||||
def get_doc_by_name(self, name: str) -> Optional[DocumentModel]:
|
||||
try:
|
||||
with get_db() as db:
|
||||
|
||||
document = db.query(Document).filter_by(name=name).first()
|
||||
return DocumentModel.model_validate(document) if document else None
|
||||
except Exception:
|
||||
@@ -107,7 +101,6 @@ class DocumentsTable:
|
||||
|
||||
def get_docs(self) -> list[DocumentModel]:
|
||||
with get_db() as db:
|
||||
|
||||
return [
|
||||
DocumentModel.model_validate(doc) for doc in db.query(Document).all()
|
||||
]
|
||||
@@ -117,7 +110,6 @@ class DocumentsTable:
|
||||
) -> Optional[DocumentModel]:
|
||||
try:
|
||||
with get_db() as db:
|
||||
|
||||
db.query(Document).filter_by(name=name).update(
|
||||
{
|
||||
"title": form_data.title,
|
||||
@@ -140,7 +132,6 @@ class DocumentsTable:
|
||||
doc_content = {**doc_content, **updated}
|
||||
|
||||
with get_db() as db:
|
||||
|
||||
db.query(Document).filter_by(name=name).update(
|
||||
{
|
||||
"content": json.dumps(doc_content),
|
||||
@@ -156,7 +147,6 @@ class DocumentsTable:
|
||||
def delete_doc_by_name(self, name: str) -> bool:
|
||||
try:
|
||||
with get_db() as db:
|
||||
|
||||
db.query(Document).filter_by(name=name).delete()
|
||||
db.commit()
|
||||
return True
|
||||
254
backend/open_webui/apps/webui/models/feedbacks.py
Normal file
254
backend/open_webui/apps/webui/models/feedbacks.py
Normal file
@@ -0,0 +1,254 @@
|
||||
import logging
|
||||
import time
|
||||
import uuid
|
||||
from typing import Optional
|
||||
|
||||
from open_webui.apps.webui.internal.db import Base, get_db
|
||||
from open_webui.apps.webui.models.chats import Chats
|
||||
|
||||
from open_webui.env import SRC_LOG_LEVELS
|
||||
from pydantic import BaseModel, ConfigDict
|
||||
from sqlalchemy import BigInteger, Column, Text, JSON, Boolean
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
log.setLevel(SRC_LOG_LEVELS["MODELS"])
|
||||
|
||||
|
||||
####################
|
||||
# Feedback DB Schema
|
||||
####################
|
||||
|
||||
|
||||
class Feedback(Base):
|
||||
__tablename__ = "feedback"
|
||||
id = Column(Text, primary_key=True)
|
||||
user_id = Column(Text)
|
||||
version = Column(BigInteger, default=0)
|
||||
type = Column(Text)
|
||||
data = Column(JSON, nullable=True)
|
||||
meta = Column(JSON, nullable=True)
|
||||
snapshot = Column(JSON, nullable=True)
|
||||
created_at = Column(BigInteger)
|
||||
updated_at = Column(BigInteger)
|
||||
|
||||
|
||||
class FeedbackModel(BaseModel):
|
||||
id: str
|
||||
user_id: str
|
||||
version: int
|
||||
type: str
|
||||
data: Optional[dict] = None
|
||||
meta: Optional[dict] = None
|
||||
snapshot: Optional[dict] = None
|
||||
created_at: int
|
||||
updated_at: int
|
||||
|
||||
model_config = ConfigDict(from_attributes=True)
|
||||
|
||||
|
||||
####################
|
||||
# Forms
|
||||
####################
|
||||
|
||||
|
||||
class FeedbackResponse(BaseModel):
|
||||
id: str
|
||||
user_id: str
|
||||
version: int
|
||||
type: str
|
||||
data: Optional[dict] = None
|
||||
meta: Optional[dict] = None
|
||||
created_at: int
|
||||
updated_at: int
|
||||
|
||||
|
||||
class RatingData(BaseModel):
|
||||
rating: Optional[str | int] = None
|
||||
model_id: Optional[str] = None
|
||||
sibling_model_ids: Optional[list[str]] = None
|
||||
reason: Optional[str] = None
|
||||
comment: Optional[str] = None
|
||||
model_config = ConfigDict(extra="allow", protected_namespaces=())
|
||||
|
||||
|
||||
class MetaData(BaseModel):
|
||||
arena: Optional[bool] = None
|
||||
chat_id: Optional[str] = None
|
||||
message_id: Optional[str] = None
|
||||
tags: Optional[list[str]] = None
|
||||
model_config = ConfigDict(extra="allow")
|
||||
|
||||
|
||||
class SnapshotData(BaseModel):
|
||||
chat: Optional[dict] = None
|
||||
model_config = ConfigDict(extra="allow")
|
||||
|
||||
|
||||
class FeedbackForm(BaseModel):
|
||||
type: str
|
||||
data: Optional[RatingData] = None
|
||||
meta: Optional[dict] = None
|
||||
snapshot: Optional[SnapshotData] = None
|
||||
model_config = ConfigDict(extra="allow")
|
||||
|
||||
|
||||
class FeedbackTable:
|
||||
def insert_new_feedback(
|
||||
self, user_id: str, form_data: FeedbackForm
|
||||
) -> Optional[FeedbackModel]:
|
||||
with get_db() as db:
|
||||
id = str(uuid.uuid4())
|
||||
feedback = FeedbackModel(
|
||||
**{
|
||||
"id": id,
|
||||
"user_id": user_id,
|
||||
"version": 0,
|
||||
**form_data.model_dump(),
|
||||
"created_at": int(time.time()),
|
||||
"updated_at": int(time.time()),
|
||||
}
|
||||
)
|
||||
try:
|
||||
result = Feedback(**feedback.model_dump())
|
||||
db.add(result)
|
||||
db.commit()
|
||||
db.refresh(result)
|
||||
if result:
|
||||
return FeedbackModel.model_validate(result)
|
||||
else:
|
||||
return None
|
||||
except Exception as e:
|
||||
print(e)
|
||||
return None
|
||||
|
||||
def get_feedback_by_id(self, id: str) -> Optional[FeedbackModel]:
|
||||
try:
|
||||
with get_db() as db:
|
||||
feedback = db.query(Feedback).filter_by(id=id).first()
|
||||
if not feedback:
|
||||
return None
|
||||
return FeedbackModel.model_validate(feedback)
|
||||
except Exception:
|
||||
return None
|
||||
|
||||
def get_feedback_by_id_and_user_id(
|
||||
self, id: str, user_id: str
|
||||
) -> Optional[FeedbackModel]:
|
||||
try:
|
||||
with get_db() as db:
|
||||
feedback = db.query(Feedback).filter_by(id=id, user_id=user_id).first()
|
||||
if not feedback:
|
||||
return None
|
||||
return FeedbackModel.model_validate(feedback)
|
||||
except Exception:
|
||||
return None
|
||||
|
||||
def get_all_feedbacks(self) -> list[FeedbackModel]:
|
||||
with get_db() as db:
|
||||
return [
|
||||
FeedbackModel.model_validate(feedback)
|
||||
for feedback in db.query(Feedback)
|
||||
.order_by(Feedback.updated_at.desc())
|
||||
.all()
|
||||
]
|
||||
|
||||
def get_feedbacks_by_type(self, type: str) -> list[FeedbackModel]:
|
||||
with get_db() as db:
|
||||
return [
|
||||
FeedbackModel.model_validate(feedback)
|
||||
for feedback in db.query(Feedback)
|
||||
.filter_by(type=type)
|
||||
.order_by(Feedback.updated_at.desc())
|
||||
.all()
|
||||
]
|
||||
|
||||
def get_feedbacks_by_user_id(self, user_id: str) -> list[FeedbackModel]:
|
||||
with get_db() as db:
|
||||
return [
|
||||
FeedbackModel.model_validate(feedback)
|
||||
for feedback in db.query(Feedback)
|
||||
.filter_by(user_id=user_id)
|
||||
.order_by(Feedback.updated_at.desc())
|
||||
.all()
|
||||
]
|
||||
|
||||
def update_feedback_by_id(
|
||||
self, id: str, form_data: FeedbackForm
|
||||
) -> Optional[FeedbackModel]:
|
||||
with get_db() as db:
|
||||
feedback = db.query(Feedback).filter_by(id=id).first()
|
||||
if not feedback:
|
||||
return None
|
||||
|
||||
if form_data.data:
|
||||
feedback.data = form_data.data.model_dump()
|
||||
if form_data.meta:
|
||||
feedback.meta = form_data.meta
|
||||
if form_data.snapshot:
|
||||
feedback.snapshot = form_data.snapshot.model_dump()
|
||||
|
||||
feedback.updated_at = int(time.time())
|
||||
|
||||
db.commit()
|
||||
return FeedbackModel.model_validate(feedback)
|
||||
|
||||
def update_feedback_by_id_and_user_id(
|
||||
self, id: str, user_id: str, form_data: FeedbackForm
|
||||
) -> Optional[FeedbackModel]:
|
||||
with get_db() as db:
|
||||
feedback = db.query(Feedback).filter_by(id=id, user_id=user_id).first()
|
||||
if not feedback:
|
||||
return None
|
||||
|
||||
if form_data.data:
|
||||
feedback.data = form_data.data.model_dump()
|
||||
if form_data.meta:
|
||||
feedback.meta = form_data.meta
|
||||
if form_data.snapshot:
|
||||
feedback.snapshot = form_data.snapshot.model_dump()
|
||||
|
||||
feedback.updated_at = int(time.time())
|
||||
|
||||
db.commit()
|
||||
return FeedbackModel.model_validate(feedback)
|
||||
|
||||
def delete_feedback_by_id(self, id: str) -> bool:
|
||||
with get_db() as db:
|
||||
feedback = db.query(Feedback).filter_by(id=id).first()
|
||||
if not feedback:
|
||||
return False
|
||||
db.delete(feedback)
|
||||
db.commit()
|
||||
return True
|
||||
|
||||
def delete_feedback_by_id_and_user_id(self, id: str, user_id: str) -> bool:
|
||||
with get_db() as db:
|
||||
feedback = db.query(Feedback).filter_by(id=id, user_id=user_id).first()
|
||||
if not feedback:
|
||||
return False
|
||||
db.delete(feedback)
|
||||
db.commit()
|
||||
return True
|
||||
|
||||
def delete_feedbacks_by_user_id(self, user_id: str) -> bool:
|
||||
with get_db() as db:
|
||||
feedbacks = db.query(Feedback).filter_by(user_id=user_id).all()
|
||||
if not feedbacks:
|
||||
return False
|
||||
for feedback in feedbacks:
|
||||
db.delete(feedback)
|
||||
db.commit()
|
||||
return True
|
||||
|
||||
def delete_all_feedbacks(self) -> bool:
|
||||
with get_db() as db:
|
||||
feedbacks = db.query(Feedback).all()
|
||||
if not feedbacks:
|
||||
return False
|
||||
for feedback in feedbacks:
|
||||
db.delete(feedback)
|
||||
db.commit()
|
||||
return True
|
||||
|
||||
|
||||
Feedbacks = FeedbackTable()
|
||||
228
backend/open_webui/apps/webui/models/files.py
Normal file
228
backend/open_webui/apps/webui/models/files.py
Normal file
@@ -0,0 +1,228 @@
|
||||
import logging
|
||||
import time
|
||||
from typing import Optional
|
||||
|
||||
from open_webui.apps.webui.internal.db import Base, JSONField, get_db
|
||||
from open_webui.env import SRC_LOG_LEVELS
|
||||
from pydantic import BaseModel, ConfigDict
|
||||
from sqlalchemy import BigInteger, Column, String, Text, JSON
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
log.setLevel(SRC_LOG_LEVELS["MODELS"])
|
||||
|
||||
####################
|
||||
# Files DB Schema
|
||||
####################
|
||||
|
||||
|
||||
class File(Base):
|
||||
__tablename__ = "file"
|
||||
id = Column(String, primary_key=True)
|
||||
user_id = Column(String)
|
||||
hash = Column(Text, nullable=True)
|
||||
|
||||
filename = Column(Text)
|
||||
path = Column(Text, nullable=True)
|
||||
|
||||
data = Column(JSON, nullable=True)
|
||||
meta = Column(JSON, nullable=True)
|
||||
|
||||
created_at = Column(BigInteger)
|
||||
updated_at = Column(BigInteger)
|
||||
|
||||
|
||||
class FileModel(BaseModel):
|
||||
model_config = ConfigDict(from_attributes=True)
|
||||
|
||||
id: str
|
||||
user_id: str
|
||||
hash: Optional[str] = None
|
||||
|
||||
filename: str
|
||||
path: Optional[str] = None
|
||||
|
||||
data: Optional[dict] = None
|
||||
meta: Optional[dict] = None
|
||||
|
||||
created_at: int # timestamp in epoch
|
||||
updated_at: int # timestamp in epoch
|
||||
|
||||
|
||||
####################
|
||||
# Forms
|
||||
####################
|
||||
|
||||
|
||||
class FileMeta(BaseModel):
|
||||
name: Optional[str] = None
|
||||
content_type: Optional[str] = None
|
||||
size: Optional[int] = None
|
||||
|
||||
model_config = ConfigDict(extra="allow")
|
||||
|
||||
|
||||
class FileModelResponse(BaseModel):
|
||||
id: str
|
||||
user_id: str
|
||||
hash: Optional[str] = None
|
||||
|
||||
filename: str
|
||||
data: Optional[dict] = None
|
||||
meta: FileMeta
|
||||
|
||||
created_at: int # timestamp in epoch
|
||||
updated_at: int # timestamp in epoch
|
||||
|
||||
|
||||
class FileMetadataResponse(BaseModel):
|
||||
id: str
|
||||
meta: dict
|
||||
created_at: int # timestamp in epoch
|
||||
updated_at: int # timestamp in epoch
|
||||
|
||||
|
||||
class FileForm(BaseModel):
|
||||
id: str
|
||||
hash: Optional[str] = None
|
||||
filename: str
|
||||
path: str
|
||||
data: dict = {}
|
||||
meta: dict = {}
|
||||
|
||||
|
||||
class FilesTable:
|
||||
def insert_new_file(self, user_id: str, form_data: FileForm) -> Optional[FileModel]:
|
||||
with get_db() as db:
|
||||
file = FileModel(
|
||||
**{
|
||||
**form_data.model_dump(),
|
||||
"user_id": user_id,
|
||||
"created_at": int(time.time()),
|
||||
"updated_at": int(time.time()),
|
||||
}
|
||||
)
|
||||
|
||||
try:
|
||||
result = File(**file.model_dump())
|
||||
db.add(result)
|
||||
db.commit()
|
||||
db.refresh(result)
|
||||
if result:
|
||||
return FileModel.model_validate(result)
|
||||
else:
|
||||
return None
|
||||
except Exception as e:
|
||||
print(f"Error creating tool: {e}")
|
||||
return None
|
||||
|
||||
def get_file_by_id(self, id: str) -> Optional[FileModel]:
|
||||
with get_db() as db:
|
||||
try:
|
||||
file = db.get(File, id)
|
||||
return FileModel.model_validate(file)
|
||||
except Exception:
|
||||
return None
|
||||
|
||||
def get_file_metadata_by_id(self, id: str) -> Optional[FileMetadataResponse]:
|
||||
with get_db() as db:
|
||||
try:
|
||||
file = db.get(File, id)
|
||||
return FileMetadataResponse(
|
||||
id=file.id,
|
||||
meta=file.meta,
|
||||
created_at=file.created_at,
|
||||
updated_at=file.updated_at,
|
||||
)
|
||||
except Exception:
|
||||
return None
|
||||
|
||||
def get_files(self) -> list[FileModel]:
|
||||
with get_db() as db:
|
||||
return [FileModel.model_validate(file) for file in db.query(File).all()]
|
||||
|
||||
def get_files_by_ids(self, ids: list[str]) -> list[FileModel]:
|
||||
with get_db() as db:
|
||||
return [
|
||||
FileModel.model_validate(file)
|
||||
for file in db.query(File)
|
||||
.filter(File.id.in_(ids))
|
||||
.order_by(File.updated_at.desc())
|
||||
.all()
|
||||
]
|
||||
|
||||
def get_file_metadatas_by_ids(self, ids: list[str]) -> list[FileMetadataResponse]:
|
||||
with get_db() as db:
|
||||
return [
|
||||
FileMetadataResponse(
|
||||
id=file.id,
|
||||
meta=file.meta,
|
||||
created_at=file.created_at,
|
||||
updated_at=file.updated_at,
|
||||
)
|
||||
for file in db.query(File)
|
||||
.filter(File.id.in_(ids))
|
||||
.order_by(File.updated_at.desc())
|
||||
.all()
|
||||
]
|
||||
|
||||
def get_files_by_user_id(self, user_id: str) -> list[FileModel]:
|
||||
with get_db() as db:
|
||||
return [
|
||||
FileModel.model_validate(file)
|
||||
for file in db.query(File).filter_by(user_id=user_id).all()
|
||||
]
|
||||
|
||||
def update_file_hash_by_id(self, id: str, hash: str) -> Optional[FileModel]:
|
||||
with get_db() as db:
|
||||
try:
|
||||
file = db.query(File).filter_by(id=id).first()
|
||||
file.hash = hash
|
||||
db.commit()
|
||||
|
||||
return FileModel.model_validate(file)
|
||||
except Exception:
|
||||
return None
|
||||
|
||||
def update_file_data_by_id(self, id: str, data: dict) -> Optional[FileModel]:
|
||||
with get_db() as db:
|
||||
try:
|
||||
file = db.query(File).filter_by(id=id).first()
|
||||
file.data = {**(file.data if file.data else {}), **data}
|
||||
db.commit()
|
||||
return FileModel.model_validate(file)
|
||||
except Exception as e:
|
||||
|
||||
return None
|
||||
|
||||
def update_file_metadata_by_id(self, id: str, meta: dict) -> Optional[FileModel]:
|
||||
with get_db() as db:
|
||||
try:
|
||||
file = db.query(File).filter_by(id=id).first()
|
||||
file.meta = {**(file.meta if file.meta else {}), **meta}
|
||||
db.commit()
|
||||
return FileModel.model_validate(file)
|
||||
except Exception:
|
||||
return None
|
||||
|
||||
def delete_file_by_id(self, id: str) -> bool:
|
||||
with get_db() as db:
|
||||
try:
|
||||
db.query(File).filter_by(id=id).delete()
|
||||
db.commit()
|
||||
|
||||
return True
|
||||
except Exception:
|
||||
return False
|
||||
|
||||
def delete_all_files(self) -> bool:
|
||||
with get_db() as db:
|
||||
try:
|
||||
db.query(File).delete()
|
||||
db.commit()
|
||||
|
||||
return True
|
||||
except Exception:
|
||||
return False
|
||||
|
||||
|
||||
Files = FilesTable()
|
||||
271
backend/open_webui/apps/webui/models/folders.py
Normal file
271
backend/open_webui/apps/webui/models/folders.py
Normal file
@@ -0,0 +1,271 @@
|
||||
import logging
|
||||
import time
|
||||
import uuid
|
||||
from typing import Optional
|
||||
|
||||
from open_webui.apps.webui.internal.db import Base, get_db
|
||||
from open_webui.apps.webui.models.chats import Chats
|
||||
|
||||
from open_webui.env import SRC_LOG_LEVELS
|
||||
from pydantic import BaseModel, ConfigDict
|
||||
from sqlalchemy import BigInteger, Column, Text, JSON, Boolean
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
log.setLevel(SRC_LOG_LEVELS["MODELS"])
|
||||
|
||||
|
||||
####################
|
||||
# Folder DB Schema
|
||||
####################
|
||||
|
||||
|
||||
class Folder(Base):
|
||||
__tablename__ = "folder"
|
||||
id = Column(Text, primary_key=True)
|
||||
parent_id = Column(Text, nullable=True)
|
||||
user_id = Column(Text)
|
||||
name = Column(Text)
|
||||
items = Column(JSON, nullable=True)
|
||||
meta = Column(JSON, nullable=True)
|
||||
is_expanded = Column(Boolean, default=False)
|
||||
created_at = Column(BigInteger)
|
||||
updated_at = Column(BigInteger)
|
||||
|
||||
|
||||
class FolderModel(BaseModel):
|
||||
id: str
|
||||
parent_id: Optional[str] = None
|
||||
user_id: str
|
||||
name: str
|
||||
items: Optional[dict] = None
|
||||
meta: Optional[dict] = None
|
||||
is_expanded: bool = False
|
||||
created_at: int
|
||||
updated_at: int
|
||||
|
||||
model_config = ConfigDict(from_attributes=True)
|
||||
|
||||
|
||||
####################
|
||||
# Forms
|
||||
####################
|
||||
|
||||
|
||||
class FolderForm(BaseModel):
|
||||
name: str
|
||||
model_config = ConfigDict(extra="allow")
|
||||
|
||||
|
||||
class FolderTable:
|
||||
def insert_new_folder(
|
||||
self, user_id: str, name: str, parent_id: Optional[str] = None
|
||||
) -> Optional[FolderModel]:
|
||||
with get_db() as db:
|
||||
id = str(uuid.uuid4())
|
||||
folder = FolderModel(
|
||||
**{
|
||||
"id": id,
|
||||
"user_id": user_id,
|
||||
"name": name,
|
||||
"parent_id": parent_id,
|
||||
"created_at": int(time.time()),
|
||||
"updated_at": int(time.time()),
|
||||
}
|
||||
)
|
||||
try:
|
||||
result = Folder(**folder.model_dump())
|
||||
db.add(result)
|
||||
db.commit()
|
||||
db.refresh(result)
|
||||
if result:
|
||||
return FolderModel.model_validate(result)
|
||||
else:
|
||||
return None
|
||||
except Exception as e:
|
||||
print(e)
|
||||
return None
|
||||
|
||||
def get_folder_by_id_and_user_id(
|
||||
self, id: str, user_id: str
|
||||
) -> Optional[FolderModel]:
|
||||
try:
|
||||
with get_db() as db:
|
||||
folder = db.query(Folder).filter_by(id=id, user_id=user_id).first()
|
||||
|
||||
if not folder:
|
||||
return None
|
||||
|
||||
return FolderModel.model_validate(folder)
|
||||
except Exception:
|
||||
return None
|
||||
|
||||
def get_children_folders_by_id_and_user_id(
|
||||
self, id: str, user_id: str
|
||||
) -> Optional[FolderModel]:
|
||||
try:
|
||||
with get_db() as db:
|
||||
folders = []
|
||||
|
||||
def get_children(folder):
|
||||
children = self.get_folders_by_parent_id_and_user_id(
|
||||
folder.id, user_id
|
||||
)
|
||||
for child in children:
|
||||
get_children(child)
|
||||
folders.append(child)
|
||||
|
||||
folder = db.query(Folder).filter_by(id=id, user_id=user_id).first()
|
||||
if not folder:
|
||||
return None
|
||||
|
||||
get_children(folder)
|
||||
return folders
|
||||
except Exception:
|
||||
return None
|
||||
|
||||
def get_folders_by_user_id(self, user_id: str) -> list[FolderModel]:
|
||||
with get_db() as db:
|
||||
return [
|
||||
FolderModel.model_validate(folder)
|
||||
for folder in db.query(Folder).filter_by(user_id=user_id).all()
|
||||
]
|
||||
|
||||
def get_folder_by_parent_id_and_user_id_and_name(
|
||||
self, parent_id: Optional[str], user_id: str, name: str
|
||||
) -> Optional[FolderModel]:
|
||||
try:
|
||||
with get_db() as db:
|
||||
# Check if folder exists
|
||||
folder = (
|
||||
db.query(Folder)
|
||||
.filter_by(parent_id=parent_id, user_id=user_id)
|
||||
.filter(Folder.name.ilike(name))
|
||||
.first()
|
||||
)
|
||||
|
||||
if not folder:
|
||||
return None
|
||||
|
||||
return FolderModel.model_validate(folder)
|
||||
except Exception as e:
|
||||
log.error(f"get_folder_by_parent_id_and_user_id_and_name: {e}")
|
||||
return None
|
||||
|
||||
def get_folders_by_parent_id_and_user_id(
|
||||
self, parent_id: Optional[str], user_id: str
|
||||
) -> list[FolderModel]:
|
||||
with get_db() as db:
|
||||
return [
|
||||
FolderModel.model_validate(folder)
|
||||
for folder in db.query(Folder)
|
||||
.filter_by(parent_id=parent_id, user_id=user_id)
|
||||
.all()
|
||||
]
|
||||
|
||||
def update_folder_parent_id_by_id_and_user_id(
|
||||
self,
|
||||
id: str,
|
||||
user_id: str,
|
||||
parent_id: str,
|
||||
) -> Optional[FolderModel]:
|
||||
try:
|
||||
with get_db() as db:
|
||||
folder = db.query(Folder).filter_by(id=id, user_id=user_id).first()
|
||||
|
||||
if not folder:
|
||||
return None
|
||||
|
||||
folder.parent_id = parent_id
|
||||
folder.updated_at = int(time.time())
|
||||
|
||||
db.commit()
|
||||
|
||||
return FolderModel.model_validate(folder)
|
||||
except Exception as e:
|
||||
log.error(f"update_folder: {e}")
|
||||
return
|
||||
|
||||
def update_folder_name_by_id_and_user_id(
|
||||
self, id: str, user_id: str, name: str
|
||||
) -> Optional[FolderModel]:
|
||||
try:
|
||||
with get_db() as db:
|
||||
folder = db.query(Folder).filter_by(id=id, user_id=user_id).first()
|
||||
|
||||
if not folder:
|
||||
return None
|
||||
|
||||
existing_folder = (
|
||||
db.query(Folder)
|
||||
.filter_by(name=name, parent_id=folder.parent_id, user_id=user_id)
|
||||
.first()
|
||||
)
|
||||
|
||||
if existing_folder:
|
||||
return None
|
||||
|
||||
folder.name = name
|
||||
folder.updated_at = int(time.time())
|
||||
|
||||
db.commit()
|
||||
|
||||
return FolderModel.model_validate(folder)
|
||||
except Exception as e:
|
||||
log.error(f"update_folder: {e}")
|
||||
return
|
||||
|
||||
def update_folder_is_expanded_by_id_and_user_id(
|
||||
self, id: str, user_id: str, is_expanded: bool
|
||||
) -> Optional[FolderModel]:
|
||||
try:
|
||||
with get_db() as db:
|
||||
folder = db.query(Folder).filter_by(id=id, user_id=user_id).first()
|
||||
|
||||
if not folder:
|
||||
return None
|
||||
|
||||
folder.is_expanded = is_expanded
|
||||
folder.updated_at = int(time.time())
|
||||
|
||||
db.commit()
|
||||
|
||||
return FolderModel.model_validate(folder)
|
||||
except Exception as e:
|
||||
log.error(f"update_folder: {e}")
|
||||
return
|
||||
|
||||
def delete_folder_by_id_and_user_id(self, id: str, user_id: str) -> bool:
|
||||
try:
|
||||
with get_db() as db:
|
||||
folder = db.query(Folder).filter_by(id=id, user_id=user_id).first()
|
||||
if not folder:
|
||||
return False
|
||||
|
||||
# Delete all chats in the folder
|
||||
Chats.delete_chats_by_user_id_and_folder_id(user_id, folder.id)
|
||||
|
||||
# Delete all children folders
|
||||
def delete_children(folder):
|
||||
folder_children = self.get_folders_by_parent_id_and_user_id(
|
||||
folder.id, user_id
|
||||
)
|
||||
for folder_child in folder_children:
|
||||
Chats.delete_chats_by_user_id_and_folder_id(
|
||||
user_id, folder_child.id
|
||||
)
|
||||
delete_children(folder_child)
|
||||
|
||||
folder = db.query(Folder).filter_by(id=folder_child.id).first()
|
||||
db.delete(folder)
|
||||
db.commit()
|
||||
|
||||
delete_children(folder)
|
||||
db.delete(folder)
|
||||
db.commit()
|
||||
return True
|
||||
except Exception as e:
|
||||
log.error(f"delete_folder: {e}")
|
||||
return False
|
||||
|
||||
|
||||
Folders = FolderTable()
|
||||
@@ -1,18 +1,12 @@
|
||||
from pydantic import BaseModel, ConfigDict
|
||||
from typing import Union, Optional
|
||||
import time
|
||||
import logging
|
||||
import time
|
||||
from typing import Optional
|
||||
|
||||
from sqlalchemy import Column, String, Text, BigInteger, Boolean
|
||||
|
||||
from apps.webui.internal.db import JSONField, Base, get_db
|
||||
from apps.webui.models.users import Users
|
||||
|
||||
import json
|
||||
import copy
|
||||
|
||||
|
||||
from config import SRC_LOG_LEVELS
|
||||
from open_webui.apps.webui.internal.db import Base, JSONField, get_db
|
||||
from open_webui.apps.webui.models.users import Users
|
||||
from open_webui.env import SRC_LOG_LEVELS
|
||||
from pydantic import BaseModel, ConfigDict
|
||||
from sqlalchemy import BigInteger, Boolean, Column, String, Text
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
log.setLevel(SRC_LOG_LEVELS["MODELS"])
|
||||
@@ -87,11 +81,9 @@ class FunctionValves(BaseModel):
|
||||
|
||||
|
||||
class FunctionsTable:
|
||||
|
||||
def insert_new_function(
|
||||
self, user_id: str, type: str, form_data: FunctionForm
|
||||
) -> Optional[FunctionModel]:
|
||||
|
||||
function = FunctionModel(
|
||||
**{
|
||||
**form_data.model_dump(),
|
||||
@@ -119,7 +111,6 @@ class FunctionsTable:
|
||||
def get_function_by_id(self, id: str) -> Optional[FunctionModel]:
|
||||
try:
|
||||
with get_db() as db:
|
||||
|
||||
function = db.get(Function, id)
|
||||
return FunctionModel.model_validate(function)
|
||||
except Exception:
|
||||
@@ -127,7 +118,6 @@ class FunctionsTable:
|
||||
|
||||
def get_functions(self, active_only=False) -> list[FunctionModel]:
|
||||
with get_db() as db:
|
||||
|
||||
if active_only:
|
||||
return [
|
||||
FunctionModel.model_validate(function)
|
||||
@@ -143,7 +133,6 @@ class FunctionsTable:
|
||||
self, type: str, active_only=False
|
||||
) -> list[FunctionModel]:
|
||||
with get_db() as db:
|
||||
|
||||
if active_only:
|
||||
return [
|
||||
FunctionModel.model_validate(function)
|
||||
@@ -159,7 +148,6 @@ class FunctionsTable:
|
||||
|
||||
def get_global_filter_functions(self) -> list[FunctionModel]:
|
||||
with get_db() as db:
|
||||
|
||||
return [
|
||||
FunctionModel.model_validate(function)
|
||||
for function in db.query(Function)
|
||||
@@ -178,7 +166,6 @@ class FunctionsTable:
|
||||
|
||||
def get_function_valves_by_id(self, id: str) -> Optional[dict]:
|
||||
with get_db() as db:
|
||||
|
||||
try:
|
||||
function = db.get(Function, id)
|
||||
return function.valves if function.valves else {}
|
||||
@@ -190,7 +177,6 @@ class FunctionsTable:
|
||||
self, id: str, valves: dict
|
||||
) -> Optional[FunctionValves]:
|
||||
with get_db() as db:
|
||||
|
||||
try:
|
||||
function = db.get(Function, id)
|
||||
function.valves = valves
|
||||
@@ -204,7 +190,6 @@ class FunctionsTable:
|
||||
def get_user_valves_by_id_and_user_id(
|
||||
self, id: str, user_id: str
|
||||
) -> Optional[dict]:
|
||||
|
||||
try:
|
||||
user = Users.get_user_by_id(user_id)
|
||||
user_settings = user.settings.model_dump() if user.settings else {}
|
||||
@@ -223,7 +208,6 @@ class FunctionsTable:
|
||||
def update_user_valves_by_id_and_user_id(
|
||||
self, id: str, user_id: str, valves: dict
|
||||
) -> Optional[dict]:
|
||||
|
||||
try:
|
||||
user = Users.get_user_by_id(user_id)
|
||||
user_settings = user.settings.model_dump() if user.settings else {}
|
||||
@@ -246,7 +230,6 @@ class FunctionsTable:
|
||||
|
||||
def update_function_by_id(self, id: str, updated: dict) -> Optional[FunctionModel]:
|
||||
with get_db() as db:
|
||||
|
||||
try:
|
||||
db.query(Function).filter_by(id=id).update(
|
||||
{
|
||||
@@ -261,7 +244,6 @@ class FunctionsTable:
|
||||
|
||||
def deactivate_all_functions(self) -> Optional[bool]:
|
||||
with get_db() as db:
|
||||
|
||||
try:
|
||||
db.query(Function).update(
|
||||
{
|
||||
168
backend/open_webui/apps/webui/models/knowledge.py
Normal file
168
backend/open_webui/apps/webui/models/knowledge.py
Normal file
@@ -0,0 +1,168 @@
|
||||
import json
|
||||
import logging
|
||||
import time
|
||||
from typing import Optional
|
||||
import uuid
|
||||
|
||||
from open_webui.apps.webui.internal.db import Base, get_db
|
||||
from open_webui.env import SRC_LOG_LEVELS
|
||||
|
||||
from open_webui.apps.webui.models.files import FileMetadataResponse
|
||||
|
||||
|
||||
from pydantic import BaseModel, ConfigDict
|
||||
from sqlalchemy import BigInteger, Column, String, Text, JSON
|
||||
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
log.setLevel(SRC_LOG_LEVELS["MODELS"])
|
||||
|
||||
####################
|
||||
# Knowledge DB Schema
|
||||
####################
|
||||
|
||||
|
||||
class Knowledge(Base):
|
||||
__tablename__ = "knowledge"
|
||||
|
||||
id = Column(Text, unique=True, primary_key=True)
|
||||
user_id = Column(Text)
|
||||
|
||||
name = Column(Text)
|
||||
description = Column(Text)
|
||||
|
||||
data = Column(JSON, nullable=True)
|
||||
meta = Column(JSON, nullable=True)
|
||||
|
||||
created_at = Column(BigInteger)
|
||||
updated_at = Column(BigInteger)
|
||||
|
||||
|
||||
class KnowledgeModel(BaseModel):
|
||||
model_config = ConfigDict(from_attributes=True)
|
||||
|
||||
id: str
|
||||
user_id: str
|
||||
|
||||
name: str
|
||||
description: str
|
||||
|
||||
data: Optional[dict] = None
|
||||
meta: Optional[dict] = None
|
||||
|
||||
created_at: int # timestamp in epoch
|
||||
updated_at: int # timestamp in epoch
|
||||
|
||||
|
||||
####################
|
||||
# Forms
|
||||
####################
|
||||
|
||||
|
||||
class KnowledgeResponse(BaseModel):
|
||||
id: str
|
||||
name: str
|
||||
description: str
|
||||
data: Optional[dict] = None
|
||||
meta: Optional[dict] = None
|
||||
created_at: int # timestamp in epoch
|
||||
updated_at: int # timestamp in epoch
|
||||
|
||||
files: Optional[list[FileMetadataResponse | dict]] = None
|
||||
|
||||
|
||||
class KnowledgeForm(BaseModel):
|
||||
name: str
|
||||
description: str
|
||||
data: Optional[dict] = None
|
||||
|
||||
|
||||
class KnowledgeUpdateForm(BaseModel):
|
||||
name: Optional[str] = None
|
||||
description: Optional[str] = None
|
||||
data: Optional[dict] = None
|
||||
|
||||
|
||||
class KnowledgeTable:
|
||||
def insert_new_knowledge(
|
||||
self, user_id: str, form_data: KnowledgeForm
|
||||
) -> Optional[KnowledgeModel]:
|
||||
with get_db() as db:
|
||||
knowledge = KnowledgeModel(
|
||||
**{
|
||||
**form_data.model_dump(),
|
||||
"id": str(uuid.uuid4()),
|
||||
"user_id": user_id,
|
||||
"created_at": int(time.time()),
|
||||
"updated_at": int(time.time()),
|
||||
}
|
||||
)
|
||||
|
||||
try:
|
||||
result = Knowledge(**knowledge.model_dump())
|
||||
db.add(result)
|
||||
db.commit()
|
||||
db.refresh(result)
|
||||
if result:
|
||||
return KnowledgeModel.model_validate(result)
|
||||
else:
|
||||
return None
|
||||
except Exception:
|
||||
return None
|
||||
|
||||
def get_knowledge_items(self) -> list[KnowledgeModel]:
|
||||
with get_db() as db:
|
||||
return [
|
||||
KnowledgeModel.model_validate(knowledge)
|
||||
for knowledge in db.query(Knowledge)
|
||||
.order_by(Knowledge.updated_at.desc())
|
||||
.all()
|
||||
]
|
||||
|
||||
def get_knowledge_by_id(self, id: str) -> Optional[KnowledgeModel]:
|
||||
try:
|
||||
with get_db() as db:
|
||||
knowledge = db.query(Knowledge).filter_by(id=id).first()
|
||||
return KnowledgeModel.model_validate(knowledge) if knowledge else None
|
||||
except Exception:
|
||||
return None
|
||||
|
||||
def update_knowledge_by_id(
|
||||
self, id: str, form_data: KnowledgeUpdateForm, overwrite: bool = False
|
||||
) -> Optional[KnowledgeModel]:
|
||||
try:
|
||||
with get_db() as db:
|
||||
knowledge = self.get_knowledge_by_id(id=id)
|
||||
db.query(Knowledge).filter_by(id=id).update(
|
||||
{
|
||||
**form_data.model_dump(exclude_none=True),
|
||||
"updated_at": int(time.time()),
|
||||
}
|
||||
)
|
||||
db.commit()
|
||||
return self.get_knowledge_by_id(id=id)
|
||||
except Exception as e:
|
||||
log.exception(e)
|
||||
return None
|
||||
|
||||
def delete_knowledge_by_id(self, id: str) -> bool:
|
||||
try:
|
||||
with get_db() as db:
|
||||
db.query(Knowledge).filter_by(id=id).delete()
|
||||
db.commit()
|
||||
return True
|
||||
except Exception:
|
||||
return False
|
||||
|
||||
def delete_all_knowledge(self) -> bool:
|
||||
with get_db() as db:
|
||||
try:
|
||||
db.query(Knowledge).delete()
|
||||
db.commit()
|
||||
|
||||
return True
|
||||
except Exception:
|
||||
return False
|
||||
|
||||
|
||||
Knowledges = KnowledgeTable()
|
||||
@@ -1,12 +1,10 @@
|
||||
from pydantic import BaseModel, ConfigDict
|
||||
from typing import Union, Optional
|
||||
|
||||
from sqlalchemy import Column, String, BigInteger, Text
|
||||
|
||||
from apps.webui.internal.db import Base, get_db
|
||||
|
||||
import time
|
||||
import uuid
|
||||
from typing import Optional
|
||||
|
||||
from open_webui.apps.webui.internal.db import Base, get_db
|
||||
from pydantic import BaseModel, ConfigDict
|
||||
from sqlalchemy import BigInteger, Column, String, Text
|
||||
|
||||
####################
|
||||
# Memory DB Schema
|
||||
@@ -39,13 +37,11 @@ class MemoryModel(BaseModel):
|
||||
|
||||
|
||||
class MemoriesTable:
|
||||
|
||||
def insert_new_memory(
|
||||
self,
|
||||
user_id: str,
|
||||
content: str,
|
||||
) -> Optional[MemoryModel]:
|
||||
|
||||
with get_db() as db:
|
||||
id = str(uuid.uuid4())
|
||||
|
||||
@@ -73,7 +69,6 @@ class MemoriesTable:
|
||||
content: str,
|
||||
) -> Optional[MemoryModel]:
|
||||
with get_db() as db:
|
||||
|
||||
try:
|
||||
db.query(Memory).filter_by(id=id).update(
|
||||
{"content": content, "updated_at": int(time.time())}
|
||||
@@ -85,7 +80,6 @@ class MemoriesTable:
|
||||
|
||||
def get_memories(self) -> list[MemoryModel]:
|
||||
with get_db() as db:
|
||||
|
||||
try:
|
||||
memories = db.query(Memory).all()
|
||||
return [MemoryModel.model_validate(memory) for memory in memories]
|
||||
@@ -94,7 +88,6 @@ class MemoriesTable:
|
||||
|
||||
def get_memories_by_user_id(self, user_id: str) -> list[MemoryModel]:
|
||||
with get_db() as db:
|
||||
|
||||
try:
|
||||
memories = db.query(Memory).filter_by(user_id=user_id).all()
|
||||
return [MemoryModel.model_validate(memory) for memory in memories]
|
||||
@@ -103,7 +96,6 @@ class MemoriesTable:
|
||||
|
||||
def get_memory_by_id(self, id: str) -> Optional[MemoryModel]:
|
||||
with get_db() as db:
|
||||
|
||||
try:
|
||||
memory = db.get(Memory, id)
|
||||
return MemoryModel.model_validate(memory)
|
||||
@@ -112,7 +104,6 @@ class MemoriesTable:
|
||||
|
||||
def delete_memory_by_id(self, id: str) -> bool:
|
||||
with get_db() as db:
|
||||
|
||||
try:
|
||||
db.query(Memory).filter_by(id=id).delete()
|
||||
db.commit()
|
||||
@@ -124,7 +115,6 @@ class MemoriesTable:
|
||||
|
||||
def delete_memories_by_user_id(self, user_id: str) -> bool:
|
||||
with get_db() as db:
|
||||
|
||||
try:
|
||||
db.query(Memory).filter_by(user_id=user_id).delete()
|
||||
db.commit()
|
||||
@@ -135,7 +125,6 @@ class MemoriesTable:
|
||||
|
||||
def delete_memory_by_id_and_user_id(self, id: str, user_id: str) -> bool:
|
||||
with get_db() as db:
|
||||
|
||||
try:
|
||||
db.query(Memory).filter_by(id=id, user_id=user_id).delete()
|
||||
db.commit()
|
||||
@@ -1,14 +1,11 @@
|
||||
import logging
|
||||
from typing import Optional, List
|
||||
|
||||
from pydantic import BaseModel, ConfigDict
|
||||
from sqlalchemy import Column, BigInteger, Text
|
||||
|
||||
from apps.webui.internal.db import Base, JSONField, get_db
|
||||
|
||||
from config import SRC_LOG_LEVELS
|
||||
|
||||
import time
|
||||
from typing import Optional
|
||||
|
||||
from open_webui.apps.webui.internal.db import Base, JSONField, get_db
|
||||
from open_webui.env import SRC_LOG_LEVELS
|
||||
from pydantic import BaseModel, ConfigDict
|
||||
from sqlalchemy import BigInteger, Column, Text
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
log.setLevel(SRC_LOG_LEVELS["MODELS"])
|
||||
@@ -1,12 +1,9 @@
|
||||
from pydantic import BaseModel, ConfigDict
|
||||
from typing import Optional
|
||||
import time
|
||||
from typing import Optional
|
||||
|
||||
from sqlalchemy import String, Column, BigInteger, Text
|
||||
|
||||
from apps.webui.internal.db import Base, get_db
|
||||
|
||||
import json
|
||||
from open_webui.apps.webui.internal.db import Base, get_db
|
||||
from pydantic import BaseModel, ConfigDict
|
||||
from sqlalchemy import BigInteger, Column, String, Text
|
||||
|
||||
####################
|
||||
# Prompts DB Schema
|
||||
@@ -45,7 +42,6 @@ class PromptForm(BaseModel):
|
||||
|
||||
|
||||
class PromptsTable:
|
||||
|
||||
def insert_new_prompt(
|
||||
self, user_id: str, form_data: PromptForm
|
||||
) -> Optional[PromptModel]:
|
||||
@@ -61,7 +57,6 @@ class PromptsTable:
|
||||
|
||||
try:
|
||||
with get_db() as db:
|
||||
|
||||
result = Prompt(**prompt.dict())
|
||||
db.add(result)
|
||||
db.commit()
|
||||
@@ -70,13 +65,12 @@ class PromptsTable:
|
||||
return PromptModel.model_validate(result)
|
||||
else:
|
||||
return None
|
||||
except Exception as e:
|
||||
except Exception:
|
||||
return None
|
||||
|
||||
def get_prompt_by_command(self, command: str) -> Optional[PromptModel]:
|
||||
try:
|
||||
with get_db() as db:
|
||||
|
||||
prompt = db.query(Prompt).filter_by(command=command).first()
|
||||
return PromptModel.model_validate(prompt)
|
||||
except Exception:
|
||||
@@ -84,7 +78,6 @@ class PromptsTable:
|
||||
|
||||
def get_prompts(self) -> list[PromptModel]:
|
||||
with get_db() as db:
|
||||
|
||||
return [
|
||||
PromptModel.model_validate(prompt) for prompt in db.query(Prompt).all()
|
||||
]
|
||||
@@ -94,7 +87,6 @@ class PromptsTable:
|
||||
) -> Optional[PromptModel]:
|
||||
try:
|
||||
with get_db() as db:
|
||||
|
||||
prompt = db.query(Prompt).filter_by(command=command).first()
|
||||
prompt.title = form_data.title
|
||||
prompt.content = form_data.content
|
||||
@@ -107,7 +99,6 @@ class PromptsTable:
|
||||
def delete_prompt_by_command(self, command: str) -> bool:
|
||||
try:
|
||||
with get_db() as db:
|
||||
|
||||
db.query(Prompt).filter_by(command=command).delete()
|
||||
db.commit()
|
||||
|
||||
109
backend/open_webui/apps/webui/models/tags.py
Normal file
109
backend/open_webui/apps/webui/models/tags.py
Normal file
@@ -0,0 +1,109 @@
|
||||
import logging
|
||||
import time
|
||||
import uuid
|
||||
from typing import Optional
|
||||
|
||||
from open_webui.apps.webui.internal.db import Base, get_db
|
||||
|
||||
|
||||
from open_webui.env import SRC_LOG_LEVELS
|
||||
from pydantic import BaseModel, ConfigDict
|
||||
from sqlalchemy import BigInteger, Column, String, JSON, PrimaryKeyConstraint
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
log.setLevel(SRC_LOG_LEVELS["MODELS"])
|
||||
|
||||
|
||||
####################
|
||||
# Tag DB Schema
|
||||
####################
|
||||
class Tag(Base):
|
||||
__tablename__ = "tag"
|
||||
id = Column(String)
|
||||
name = Column(String)
|
||||
user_id = Column(String)
|
||||
meta = Column(JSON, nullable=True)
|
||||
|
||||
# Unique constraint ensuring (id, user_id) is unique, not just the `id` column
|
||||
__table_args__ = (PrimaryKeyConstraint("id", "user_id", name="pk_id_user_id"),)
|
||||
|
||||
|
||||
class TagModel(BaseModel):
|
||||
id: str
|
||||
name: str
|
||||
user_id: str
|
||||
meta: Optional[dict] = None
|
||||
model_config = ConfigDict(from_attributes=True)
|
||||
|
||||
|
||||
####################
|
||||
# Forms
|
||||
####################
|
||||
|
||||
|
||||
class TagChatIdForm(BaseModel):
|
||||
name: str
|
||||
chat_id: str
|
||||
|
||||
|
||||
class TagTable:
|
||||
def insert_new_tag(self, name: str, user_id: str) -> Optional[TagModel]:
|
||||
with get_db() as db:
|
||||
id = name.replace(" ", "_").lower()
|
||||
tag = TagModel(**{"id": id, "user_id": user_id, "name": name})
|
||||
try:
|
||||
result = Tag(**tag.model_dump())
|
||||
db.add(result)
|
||||
db.commit()
|
||||
db.refresh(result)
|
||||
if result:
|
||||
return TagModel.model_validate(result)
|
||||
else:
|
||||
return None
|
||||
except Exception as e:
|
||||
print(e)
|
||||
return None
|
||||
|
||||
def get_tag_by_name_and_user_id(
|
||||
self, name: str, user_id: str
|
||||
) -> Optional[TagModel]:
|
||||
try:
|
||||
id = name.replace(" ", "_").lower()
|
||||
with get_db() as db:
|
||||
tag = db.query(Tag).filter_by(id=id, user_id=user_id).first()
|
||||
return TagModel.model_validate(tag)
|
||||
except Exception:
|
||||
return None
|
||||
|
||||
def get_tags_by_user_id(self, user_id: str) -> list[TagModel]:
|
||||
with get_db() as db:
|
||||
return [
|
||||
TagModel.model_validate(tag)
|
||||
for tag in (db.query(Tag).filter_by(user_id=user_id).all())
|
||||
]
|
||||
|
||||
def get_tags_by_ids_and_user_id(
|
||||
self, ids: list[str], user_id: str
|
||||
) -> list[TagModel]:
|
||||
with get_db() as db:
|
||||
return [
|
||||
TagModel.model_validate(tag)
|
||||
for tag in (
|
||||
db.query(Tag).filter(Tag.id.in_(ids), Tag.user_id == user_id).all()
|
||||
)
|
||||
]
|
||||
|
||||
def delete_tag_by_name_and_user_id(self, name: str, user_id: str) -> bool:
|
||||
try:
|
||||
with get_db() as db:
|
||||
id = name.replace(" ", "_").lower()
|
||||
res = db.query(Tag).filter_by(id=id, user_id=user_id).delete()
|
||||
log.debug(f"res: {res}")
|
||||
db.commit()
|
||||
return True
|
||||
except Exception as e:
|
||||
log.error(f"delete_tag: {e}")
|
||||
return False
|
||||
|
||||
|
||||
Tags = TagTable()
|
||||
@@ -1,17 +1,12 @@
|
||||
from pydantic import BaseModel, ConfigDict
|
||||
from typing import Optional
|
||||
import time
|
||||
import logging
|
||||
from sqlalchemy import String, Column, BigInteger, Text
|
||||
import time
|
||||
from typing import Optional
|
||||
|
||||
from apps.webui.internal.db import Base, JSONField, get_db
|
||||
from apps.webui.models.users import Users
|
||||
|
||||
import json
|
||||
import copy
|
||||
|
||||
|
||||
from config import SRC_LOG_LEVELS
|
||||
from open_webui.apps.webui.internal.db import Base, JSONField, get_db
|
||||
from open_webui.apps.webui.models.users import Users
|
||||
from open_webui.env import SRC_LOG_LEVELS
|
||||
from pydantic import BaseModel, ConfigDict
|
||||
from sqlalchemy import BigInteger, Column, String, Text
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
log.setLevel(SRC_LOG_LEVELS["MODELS"])
|
||||
@@ -79,13 +74,10 @@ class ToolValves(BaseModel):
|
||||
|
||||
|
||||
class ToolsTable:
|
||||
|
||||
def insert_new_tool(
|
||||
self, user_id: str, form_data: ToolForm, specs: list[dict]
|
||||
) -> Optional[ToolModel]:
|
||||
|
||||
with get_db() as db:
|
||||
|
||||
tool = ToolModel(
|
||||
**{
|
||||
**form_data.model_dump(),
|
||||
@@ -112,7 +104,6 @@ class ToolsTable:
|
||||
def get_tool_by_id(self, id: str) -> Optional[ToolModel]:
|
||||
try:
|
||||
with get_db() as db:
|
||||
|
||||
tool = db.get(Tool, id)
|
||||
return ToolModel.model_validate(tool)
|
||||
except Exception:
|
||||
@@ -125,7 +116,6 @@ class ToolsTable:
|
||||
def get_tool_valves_by_id(self, id: str) -> Optional[dict]:
|
||||
try:
|
||||
with get_db() as db:
|
||||
|
||||
tool = db.get(Tool, id)
|
||||
return tool.valves if tool.valves else {}
|
||||
except Exception as e:
|
||||
@@ -135,7 +125,6 @@ class ToolsTable:
|
||||
def update_tool_valves_by_id(self, id: str, valves: dict) -> Optional[ToolValves]:
|
||||
try:
|
||||
with get_db() as db:
|
||||
|
||||
db.query(Tool).filter_by(id=id).update(
|
||||
{"valves": valves, "updated_at": int(time.time())}
|
||||
)
|
||||
@@ -1,13 +1,10 @@
|
||||
from pydantic import BaseModel, ConfigDict, parse_obj_as
|
||||
from typing import Union, Optional
|
||||
import time
|
||||
from typing import Optional
|
||||
|
||||
from sqlalchemy import String, Column, BigInteger, Text
|
||||
|
||||
from utils.misc import get_gravatar_url
|
||||
|
||||
from apps.webui.internal.db import Base, JSONField, Session, get_db
|
||||
from apps.webui.models.chats import Chats
|
||||
from open_webui.apps.webui.internal.db import Base, JSONField, get_db
|
||||
from open_webui.apps.webui.models.chats import Chats
|
||||
from pydantic import BaseModel, ConfigDict
|
||||
from sqlalchemy import BigInteger, Column, String, Text
|
||||
|
||||
####################
|
||||
# User DB Schema
|
||||
@@ -78,7 +75,6 @@ class UserUpdateForm(BaseModel):
|
||||
|
||||
|
||||
class UsersTable:
|
||||
|
||||
def insert_new_user(
|
||||
self,
|
||||
id: str,
|
||||
@@ -116,13 +112,12 @@ class UsersTable:
|
||||
with get_db() as db:
|
||||
user = db.query(User).filter_by(id=id).first()
|
||||
return UserModel.model_validate(user)
|
||||
except Exception as e:
|
||||
except Exception:
|
||||
return None
|
||||
|
||||
def get_user_by_api_key(self, api_key: str) -> Optional[UserModel]:
|
||||
try:
|
||||
with get_db() as db:
|
||||
|
||||
user = db.query(User).filter_by(api_key=api_key).first()
|
||||
return UserModel.model_validate(user)
|
||||
except Exception:
|
||||
@@ -131,7 +126,6 @@ class UsersTable:
|
||||
def get_user_by_email(self, email: str) -> Optional[UserModel]:
|
||||
try:
|
||||
with get_db() as db:
|
||||
|
||||
user = db.query(User).filter_by(email=email).first()
|
||||
return UserModel.model_validate(user)
|
||||
except Exception:
|
||||
@@ -140,7 +134,6 @@ class UsersTable:
|
||||
def get_user_by_oauth_sub(self, sub: str) -> Optional[UserModel]:
|
||||
try:
|
||||
with get_db() as db:
|
||||
|
||||
user = db.query(User).filter_by(oauth_sub=sub).first()
|
||||
return UserModel.model_validate(user)
|
||||
except Exception:
|
||||
@@ -195,7 +188,6 @@ class UsersTable:
|
||||
def update_user_last_active_by_id(self, id: str) -> Optional[UserModel]:
|
||||
try:
|
||||
with get_db() as db:
|
||||
|
||||
db.query(User).filter_by(id=id).update(
|
||||
{"last_active_at": int(time.time())}
|
||||
)
|
||||
@@ -228,7 +220,7 @@ class UsersTable:
|
||||
user = db.query(User).filter_by(id=id).first()
|
||||
return UserModel.model_validate(user)
|
||||
# return UserModel(**user.dict())
|
||||
except Exception as e:
|
||||
except Exception:
|
||||
return None
|
||||
|
||||
def delete_user_by_id(self, id: str) -> bool:
|
||||
@@ -262,7 +254,7 @@ class UsersTable:
|
||||
with get_db() as db:
|
||||
user = db.query(User).filter_by(id=id).first()
|
||||
return user.api_key
|
||||
except Exception as e:
|
||||
except Exception:
|
||||
return None
|
||||
|
||||
|
||||
@@ -1,43 +1,43 @@
|
||||
import logging
|
||||
|
||||
from fastapi import Request, UploadFile, File
|
||||
from fastapi import Depends, HTTPException, status
|
||||
from fastapi.responses import Response
|
||||
|
||||
from fastapi import APIRouter
|
||||
from pydantic import BaseModel
|
||||
import re
|
||||
import uuid
|
||||
import csv
|
||||
import time
|
||||
import datetime
|
||||
|
||||
from apps.webui.models.auths import (
|
||||
SigninForm,
|
||||
SignupForm,
|
||||
from open_webui.apps.webui.models.auths import (
|
||||
AddUserForm,
|
||||
UpdateProfileForm,
|
||||
UpdatePasswordForm,
|
||||
UserResponse,
|
||||
SigninResponse,
|
||||
Auths,
|
||||
ApiKey,
|
||||
Auths,
|
||||
Token,
|
||||
SigninForm,
|
||||
SigninResponse,
|
||||
SignupForm,
|
||||
UpdatePasswordForm,
|
||||
UpdateProfileForm,
|
||||
UserResponse,
|
||||
)
|
||||
from apps.webui.models.users import Users
|
||||
|
||||
from utils.utils import (
|
||||
get_password_hash,
|
||||
get_current_user,
|
||||
get_admin_user,
|
||||
create_token,
|
||||
create_api_key,
|
||||
)
|
||||
from utils.misc import parse_duration, validate_email_format
|
||||
from utils.webhook import post_webhook
|
||||
from constants import ERROR_MESSAGES, WEBHOOK_MESSAGES
|
||||
from config import (
|
||||
WEBUI_AUTH,
|
||||
from open_webui.apps.webui.models.users import Users
|
||||
from open_webui.config import WEBUI_AUTH
|
||||
from open_webui.constants import ERROR_MESSAGES, WEBHOOK_MESSAGES
|
||||
from open_webui.env import (
|
||||
WEBUI_AUTH_TRUSTED_EMAIL_HEADER,
|
||||
WEBUI_AUTH_TRUSTED_NAME_HEADER,
|
||||
WEBUI_SESSION_COOKIE_SAME_SITE,
|
||||
WEBUI_SESSION_COOKIE_SECURE,
|
||||
)
|
||||
from fastapi import APIRouter, Depends, HTTPException, Request, status
|
||||
from fastapi.responses import Response
|
||||
from pydantic import BaseModel
|
||||
from open_webui.utils.misc import parse_duration, validate_email_format
|
||||
from open_webui.utils.utils import (
|
||||
create_api_key,
|
||||
create_token,
|
||||
get_admin_user,
|
||||
get_verified_user,
|
||||
get_current_user,
|
||||
get_password_hash,
|
||||
)
|
||||
from open_webui.utils.webhook import post_webhook
|
||||
from typing import Optional
|
||||
|
||||
router = APIRouter()
|
||||
|
||||
@@ -46,23 +46,44 @@ router = APIRouter()
|
||||
############################
|
||||
|
||||
|
||||
@router.get("/", response_model=UserResponse)
|
||||
class SessionUserResponse(Token, UserResponse):
|
||||
expires_at: Optional[int] = None
|
||||
|
||||
|
||||
@router.get("/", response_model=SessionUserResponse)
|
||||
async def get_session_user(
|
||||
request: Request, response: Response, user=Depends(get_current_user)
|
||||
):
|
||||
expires_delta = parse_duration(request.app.state.config.JWT_EXPIRES_IN)
|
||||
expires_at = None
|
||||
if expires_delta:
|
||||
expires_at = int(time.time()) + int(expires_delta.total_seconds())
|
||||
|
||||
token = create_token(
|
||||
data={"id": user.id},
|
||||
expires_delta=parse_duration(request.app.state.config.JWT_EXPIRES_IN),
|
||||
expires_delta=expires_delta,
|
||||
)
|
||||
|
||||
datetime_expires_at = (
|
||||
datetime.datetime.fromtimestamp(expires_at, datetime.timezone.utc)
|
||||
if expires_at
|
||||
else None
|
||||
)
|
||||
|
||||
# Set the cookie token
|
||||
response.set_cookie(
|
||||
key="token",
|
||||
value=token,
|
||||
expires=datetime_expires_at,
|
||||
httponly=True, # Ensures the cookie is not accessible via JavaScript
|
||||
samesite=WEBUI_SESSION_COOKIE_SAME_SITE,
|
||||
secure=WEBUI_SESSION_COOKIE_SECURE,
|
||||
)
|
||||
|
||||
return {
|
||||
"token": token,
|
||||
"token_type": "Bearer",
|
||||
"expires_at": expires_at,
|
||||
"id": user.id,
|
||||
"email": user.email,
|
||||
"name": user.name,
|
||||
@@ -78,7 +99,7 @@ async def get_session_user(
|
||||
|
||||
@router.post("/update/profile", response_model=UserResponse)
|
||||
async def update_profile(
|
||||
form_data: UpdateProfileForm, session_user=Depends(get_current_user)
|
||||
form_data: UpdateProfileForm, session_user=Depends(get_verified_user)
|
||||
):
|
||||
if session_user:
|
||||
user = Users.update_user_by_id(
|
||||
@@ -121,7 +142,7 @@ async def update_password(
|
||||
############################
|
||||
|
||||
|
||||
@router.post("/signin", response_model=SigninResponse)
|
||||
@router.post("/signin", response_model=SessionUserResponse)
|
||||
async def signin(request: Request, response: Response, form_data: SigninForm):
|
||||
if WEBUI_AUTH_TRUSTED_EMAIL_HEADER:
|
||||
if WEBUI_AUTH_TRUSTED_EMAIL_HEADER not in request.headers:
|
||||
@@ -163,21 +184,37 @@ async def signin(request: Request, response: Response, form_data: SigninForm):
|
||||
user = Auths.authenticate_user(form_data.email.lower(), form_data.password)
|
||||
|
||||
if user:
|
||||
|
||||
expires_delta = parse_duration(request.app.state.config.JWT_EXPIRES_IN)
|
||||
expires_at = None
|
||||
if expires_delta:
|
||||
expires_at = int(time.time()) + int(expires_delta.total_seconds())
|
||||
|
||||
token = create_token(
|
||||
data={"id": user.id},
|
||||
expires_delta=parse_duration(request.app.state.config.JWT_EXPIRES_IN),
|
||||
expires_delta=expires_delta,
|
||||
)
|
||||
|
||||
datetime_expires_at = (
|
||||
datetime.datetime.fromtimestamp(expires_at, datetime.timezone.utc)
|
||||
if expires_at
|
||||
else None
|
||||
)
|
||||
|
||||
# Set the cookie token
|
||||
response.set_cookie(
|
||||
key="token",
|
||||
value=token,
|
||||
expires=datetime_expires_at,
|
||||
httponly=True, # Ensures the cookie is not accessible via JavaScript
|
||||
samesite=WEBUI_SESSION_COOKIE_SAME_SITE,
|
||||
secure=WEBUI_SESSION_COOKIE_SECURE,
|
||||
)
|
||||
|
||||
return {
|
||||
"token": token,
|
||||
"token_type": "Bearer",
|
||||
"expires_at": expires_at,
|
||||
"id": user.id,
|
||||
"email": user.email,
|
||||
"name": user.name,
|
||||
@@ -193,12 +230,21 @@ async def signin(request: Request, response: Response, form_data: SigninForm):
|
||||
############################
|
||||
|
||||
|
||||
@router.post("/signup", response_model=SigninResponse)
|
||||
@router.post("/signup", response_model=SessionUserResponse)
|
||||
async def signup(request: Request, response: Response, form_data: SignupForm):
|
||||
if not request.app.state.config.ENABLE_SIGNUP and WEBUI_AUTH:
|
||||
raise HTTPException(
|
||||
status.HTTP_403_FORBIDDEN, detail=ERROR_MESSAGES.ACCESS_PROHIBITED
|
||||
)
|
||||
if WEBUI_AUTH:
|
||||
if (
|
||||
not request.app.state.config.ENABLE_SIGNUP
|
||||
or not request.app.state.config.ENABLE_LOGIN_FORM
|
||||
):
|
||||
raise HTTPException(
|
||||
status.HTTP_403_FORBIDDEN, detail=ERROR_MESSAGES.ACCESS_PROHIBITED
|
||||
)
|
||||
else:
|
||||
if Users.get_num_users() != 0:
|
||||
raise HTTPException(
|
||||
status.HTTP_403_FORBIDDEN, detail=ERROR_MESSAGES.ACCESS_PROHIBITED
|
||||
)
|
||||
|
||||
if not validate_email_format(form_data.email.lower()):
|
||||
raise HTTPException(
|
||||
@@ -224,17 +270,30 @@ async def signup(request: Request, response: Response, form_data: SignupForm):
|
||||
)
|
||||
|
||||
if user:
|
||||
expires_delta = parse_duration(request.app.state.config.JWT_EXPIRES_IN)
|
||||
expires_at = None
|
||||
if expires_delta:
|
||||
expires_at = int(time.time()) + int(expires_delta.total_seconds())
|
||||
|
||||
token = create_token(
|
||||
data={"id": user.id},
|
||||
expires_delta=parse_duration(request.app.state.config.JWT_EXPIRES_IN),
|
||||
expires_delta=expires_delta,
|
||||
)
|
||||
|
||||
datetime_expires_at = (
|
||||
datetime.datetime.fromtimestamp(expires_at, datetime.timezone.utc)
|
||||
if expires_at
|
||||
else None
|
||||
)
|
||||
# response.set_cookie(key='token', value=token, httponly=True)
|
||||
|
||||
# Set the cookie token
|
||||
response.set_cookie(
|
||||
key="token",
|
||||
value=token,
|
||||
expires=datetime_expires_at,
|
||||
httponly=True, # Ensures the cookie is not accessible via JavaScript
|
||||
samesite=WEBUI_SESSION_COOKIE_SAME_SITE,
|
||||
secure=WEBUI_SESSION_COOKIE_SECURE,
|
||||
)
|
||||
|
||||
if request.app.state.config.WEBHOOK_URL:
|
||||
@@ -251,6 +310,7 @@ async def signup(request: Request, response: Response, form_data: SignupForm):
|
||||
return {
|
||||
"token": token,
|
||||
"token_type": "Bearer",
|
||||
"expires_at": expires_at,
|
||||
"id": user.id,
|
||||
"email": user.email,
|
||||
"name": user.name,
|
||||
@@ -263,6 +323,12 @@ async def signup(request: Request, response: Response, form_data: SignupForm):
|
||||
raise HTTPException(500, detail=ERROR_MESSAGES.DEFAULT(err))
|
||||
|
||||
|
||||
@router.get("/signout")
|
||||
async def signout(response: Response):
|
||||
response.delete_cookie("token")
|
||||
return {"status": True}
|
||||
|
||||
|
||||
############################
|
||||
# AddUser
|
||||
############################
|
||||
@@ -270,7 +336,6 @@ async def signup(request: Request, response: Response, form_data: SignupForm):
|
||||
|
||||
@router.post("/add", response_model=SigninResponse)
|
||||
async def add_user(form_data: AddUserForm, user=Depends(get_admin_user)):
|
||||
|
||||
if not validate_email_format(form_data.email.lower()):
|
||||
raise HTTPException(
|
||||
status.HTTP_400_BAD_REQUEST, detail=ERROR_MESSAGES.INVALID_EMAIL_FORMAT
|
||||
@@ -280,7 +345,6 @@ async def add_user(form_data: AddUserForm, user=Depends(get_admin_user)):
|
||||
raise HTTPException(400, detail=ERROR_MESSAGES.EMAIL_TAKEN)
|
||||
|
||||
try:
|
||||
|
||||
print(form_data)
|
||||
hashed = get_password_hash(form_data.password)
|
||||
user = Auths.insert_new_auth(
|
||||
@@ -352,6 +416,7 @@ async def get_admin_config(request: Request, user=Depends(get_admin_user)):
|
||||
"DEFAULT_USER_ROLE": request.app.state.config.DEFAULT_USER_ROLE,
|
||||
"JWT_EXPIRES_IN": request.app.state.config.JWT_EXPIRES_IN,
|
||||
"ENABLE_COMMUNITY_SHARING": request.app.state.config.ENABLE_COMMUNITY_SHARING,
|
||||
"ENABLE_MESSAGE_RATING": request.app.state.config.ENABLE_MESSAGE_RATING,
|
||||
}
|
||||
|
||||
|
||||
@@ -361,6 +426,7 @@ class AdminConfig(BaseModel):
|
||||
DEFAULT_USER_ROLE: str
|
||||
JWT_EXPIRES_IN: str
|
||||
ENABLE_COMMUNITY_SHARING: bool
|
||||
ENABLE_MESSAGE_RATING: bool
|
||||
|
||||
|
||||
@router.post("/admin/config")
|
||||
@@ -382,6 +448,7 @@ async def update_admin_config(
|
||||
request.app.state.config.ENABLE_COMMUNITY_SHARING = (
|
||||
form_data.ENABLE_COMMUNITY_SHARING
|
||||
)
|
||||
request.app.state.config.ENABLE_MESSAGE_RATING = form_data.ENABLE_MESSAGE_RATING
|
||||
|
||||
return {
|
||||
"SHOW_ADMIN_DETAILS": request.app.state.config.SHOW_ADMIN_DETAILS,
|
||||
@@ -389,6 +456,7 @@ async def update_admin_config(
|
||||
"DEFAULT_USER_ROLE": request.app.state.config.DEFAULT_USER_ROLE,
|
||||
"JWT_EXPIRES_IN": request.app.state.config.JWT_EXPIRES_IN,
|
||||
"ENABLE_COMMUNITY_SHARING": request.app.state.config.ENABLE_COMMUNITY_SHARING,
|
||||
"ENABLE_MESSAGE_RATING": request.app.state.config.ENABLE_MESSAGE_RATING,
|
||||
}
|
||||
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user