mirror of
https://github.com/oobabooga/text-generation-webui.git
synced 2026-03-20 12:24:38 +01:00
Compare commits
2564 commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
256431f258 | ||
|
|
88a318894c | ||
|
|
44810751de | ||
|
|
6c05a964a7 | ||
|
|
737ded6959 | ||
|
|
50685c93f2 | ||
|
|
9d9f5d9860 | ||
|
|
5cfe9fe295 | ||
|
|
b76a289e04 | ||
|
|
c0de1d176c | ||
|
|
4f80b20859 | ||
|
|
f8ff7cf99e | ||
|
|
92d376e420 | ||
|
|
f6a749a151 | ||
|
|
1a2b840938 | ||
|
|
bfea49b197 | ||
|
|
80d0c03bab | ||
|
|
9119ce0680 | ||
|
|
5763cab3c4 | ||
|
|
f0c16813ef | ||
|
|
2d3a3794c9 | ||
|
|
9955e54a1f | ||
|
|
d1aba08561 | ||
|
|
c126530061 | ||
|
|
b9bdbd638e | ||
|
|
9eacd4a207 | ||
|
|
e11425d5f8 | ||
|
|
4ae2bd86e2 | ||
|
|
9f657d3976 | ||
|
|
c09a367c64 | ||
|
|
beab346f48 | ||
|
|
573617157a | ||
|
|
d0a4993cf4 | ||
|
|
c7953fb923 | ||
|
|
c908ac00d7 | ||
|
|
8bff331893 | ||
|
|
cb08ba63dc | ||
|
|
09a6549816 | ||
|
|
accb2ef661 | ||
|
|
998b9bfb2a | ||
|
|
5f1707af35 | ||
|
|
16636c04b8 | ||
|
|
e8d1c66303 | ||
|
|
cb88066d15 | ||
|
|
0cd245bcbb | ||
|
|
24e7e77b55 | ||
|
|
cabb95f0d6 | ||
|
|
5362bbb413 | ||
|
|
d4c22ced83 | ||
|
|
aab2596d29 | ||
|
|
e0a38da9f3 | ||
|
|
e50b823eee | ||
|
|
b7670cc762 | ||
|
|
d0b72c73c0 | ||
|
|
c39c187f47 | ||
|
|
4628825651 | ||
|
|
fef95b9e56 | ||
|
|
5833d94d7f | ||
|
|
a4bef860b6 | ||
|
|
5ddc1002d2 | ||
|
|
c094bc943c | ||
|
|
85ec85e569 | ||
|
|
04213dff14 | ||
|
|
24fdcc52b3 | ||
|
|
58f26a4cc7 | ||
|
|
0e35421593 | ||
|
|
1ed56aee85 | ||
|
|
286ae475f6 | ||
|
|
4c7a56c18d | ||
|
|
a09f21b9de | ||
|
|
1b7e6c5705 | ||
|
|
f8936ec47c | ||
|
|
5c02b7f603 | ||
|
|
09d5e049d6 | ||
|
|
fdd8e5b1fd | ||
|
|
4f82b71ef3 | ||
|
|
bbd43d9463 | ||
|
|
3e6bd1a310 | ||
|
|
9a7428b627 | ||
|
|
2d0cc7726e | ||
|
|
d45c9b3c59 | ||
|
|
2466305f76 | ||
|
|
a916fb0e5c | ||
|
|
fb1b3b6ddf | ||
|
|
5a017aa338 | ||
|
|
4b6c9db1c9 | ||
|
|
09723c9988 | ||
|
|
2549f7c33b | ||
|
|
b5cac2e3b2 | ||
|
|
0d62038710 | ||
|
|
cf9ad8eafe | ||
|
|
980a9d1657 | ||
|
|
bb00d96dc3 | ||
|
|
66c976e995 | ||
|
|
24977846fb | ||
|
|
7a63a56043 | ||
|
|
f1cfeae372 | ||
|
|
3304b57bdf | ||
|
|
8aeaa76365 | ||
|
|
6ec4ca8b10 | ||
|
|
307c085d1b | ||
|
|
c604ca66de | ||
|
|
15792c3cb8 | ||
|
|
3b71932658 | ||
|
|
83b7e47d77 | ||
|
|
7f485274eb | ||
|
|
39e6c997cc | ||
|
|
970055ca00 | ||
|
|
d6643bb4bc | ||
|
|
9753b2342b | ||
|
|
eb4a20137a | ||
|
|
634609acca | ||
|
|
40f1837b42 | ||
|
|
f6ffecfff2 | ||
|
|
5a91b8462f | ||
|
|
7a8ca9f2b0 | ||
|
|
7170a16b91 | ||
|
|
b3705d87bf | ||
|
|
0132966d09 | ||
|
|
baf4e13ff1 | ||
|
|
6ff111d18e | ||
|
|
aeeff41cc0 | ||
|
|
0cecc0a041 | ||
|
|
e1bf0b866f | ||
|
|
3b7cf44406 | ||
|
|
b686193fe2 | ||
|
|
328215b0c7 | ||
|
|
304510eb3d | ||
|
|
085c4ef5d7 | ||
|
|
aa634c77c0 | ||
|
|
abc699db9b | ||
|
|
f2fe001cc4 | ||
|
|
7ea5513263 | ||
|
|
5fa709a3f4 | ||
|
|
e8e0d02406 | ||
|
|
1eead661c3 | ||
|
|
d48b53422f | ||
|
|
2beaa4b971 | ||
|
|
5f6754c267 | ||
|
|
b8b4471ab5 | ||
|
|
d03923924a | ||
|
|
044566d42d | ||
|
|
f5acf55207 | ||
|
|
3531069824 | ||
|
|
160f7ad6b4 | ||
|
|
8e24a20873 | ||
|
|
3bab7fbfd4 | ||
|
|
e7e0df0101 | ||
|
|
3323dedd08 | ||
|
|
36dbc4ccce | ||
|
|
86d59b4404 | ||
|
|
0e0e3ceb97 | ||
|
|
6d7018069c | ||
|
|
f9ed8820de | ||
|
|
3880c1a406 | ||
|
|
93ebfa2b7e | ||
|
|
d0ac58ad31 | ||
|
|
f06583b2b9 | ||
|
|
8be444a559 | ||
|
|
1729fb07b9 | ||
|
|
eba262d47a | ||
|
|
521ddbb722 | ||
|
|
66fb79fe15 | ||
|
|
e81a47f708 | ||
|
|
27bcc45c18 | ||
|
|
8a9afcbec6 | ||
|
|
2e7e966ef2 | ||
|
|
ddcad3cc51 | ||
|
|
8d43123f73 | ||
|
|
e2548f69a9 | ||
|
|
4c406e024f | ||
|
|
249bd6eea2 | ||
|
|
f52d9336e5 | ||
|
|
9824c82cb6 | ||
|
|
2f08dce7b0 | ||
|
|
134ac8fc29 | ||
|
|
409db3df1e | ||
|
|
86d8291e58 | ||
|
|
33ff3773a0 | ||
|
|
7a1fa8c9ea | ||
|
|
275810c843 | ||
|
|
438e59498e | ||
|
|
63f28cb4a2 | ||
|
|
33a38d7ece | ||
|
|
c2e494963f | ||
|
|
5b18be8582 | ||
|
|
d337ba0390 | ||
|
|
5be68cc073 | ||
|
|
1ffe540c97 | ||
|
|
1c2548fd89 | ||
|
|
da2d4f1a6a | ||
|
|
d278bb46a2 | ||
|
|
b16a1a874a | ||
|
|
45188eccef | ||
|
|
268cc3f100 | ||
|
|
69fa4dd0b1 | ||
|
|
fbfcd59fe0 | ||
|
|
d45aa6606a | ||
|
|
0804296f4d | ||
|
|
6a08e79fa5 | ||
|
|
ff48956cb0 | ||
|
|
5a22970ba8 | ||
|
|
387cf9d8df | ||
|
|
942ff8fcb4 | ||
|
|
da3010c3ed | ||
|
|
83cc207ef7 | ||
|
|
2ac4eb33c8 | ||
|
|
7bf15ad933 | ||
|
|
1d1f4dfc88 | ||
|
|
abb7cc02e9 | ||
|
|
68109bc5da | ||
|
|
952e2c404a | ||
|
|
cdf0e392e6 | ||
|
|
eb90daf098 | ||
|
|
0ffb75de7c | ||
|
|
d8af0505a8 | ||
|
|
9b916f02cd | ||
|
|
5d93f4e800 | ||
|
|
64eb77e782 | ||
|
|
22141679e3 | ||
|
|
65de4c30c8 | ||
|
|
f010aa1612 | ||
|
|
f4d787ab8d | ||
|
|
8a3d866401 | ||
|
|
11dc6fdfce | ||
|
|
7d42b6900e | ||
|
|
8cbb7661a8 | ||
|
|
866c48e55b | ||
|
|
b3fd0d16e0 | ||
|
|
d584ede72e | ||
|
|
c0bff831e3 | ||
|
|
2260e530c9 | ||
|
|
e9f22813e4 | ||
|
|
3519890c8e | ||
|
|
9c604628a0 | ||
|
|
fbd2acfa19 | ||
|
|
5fd79b23d1 | ||
|
|
b8fcc8ea32 | ||
|
|
d7dd533b99 | ||
|
|
9576c5a5f4 | ||
|
|
9814d3d0ae | ||
|
|
38d0eeefc0 | ||
|
|
ddd74324fe | ||
|
|
efc72d5c32 | ||
|
|
aecbc5a8ac | ||
|
|
c54e8a2b3d | ||
|
|
dc2bbf1861 | ||
|
|
cae1fef42d | ||
|
|
7493fe7841 | ||
|
|
21b979c02a | ||
|
|
a731861127 | ||
|
|
910456ba31 | ||
|
|
d79cdc614c | ||
|
|
332fd40653 | ||
|
|
50a35b483c | ||
|
|
45fbec0320 | ||
|
|
b0968ed8b4 | ||
|
|
36747cf99c | ||
|
|
2fcbadec67 | ||
|
|
bb3b7bc197 | ||
|
|
6e2c4e9c23 | ||
|
|
a2ed640aa6 | ||
|
|
1066fe8c21 | ||
|
|
9530d3a6d8 | ||
|
|
a0b5599e9b | ||
|
|
09d88f91e8 | ||
|
|
34804f9354 | ||
|
|
6e8fb0e7b1 | ||
|
|
9fe40ff90f | ||
|
|
8e762e04b4 | ||
|
|
aa16266c38 | ||
|
|
85269d7fbb | ||
|
|
c4ebab9b29 | ||
|
|
bb004bacb1 | ||
|
|
502f59d39b | ||
|
|
4d94f66832 | ||
|
|
e7c8b51fec | ||
|
|
652d13c003 | ||
|
|
b758059e95 | ||
|
|
1ec9f708e5 | ||
|
|
3b8369a679 | ||
|
|
058e78411d | ||
|
|
17bd8d10f0 | ||
|
|
85f2df92e9 | ||
|
|
1762312fb4 | ||
|
|
160a25165a | ||
|
|
f93cc4b5c3 | ||
|
|
c026dbaf64 | ||
|
|
194e4c285f | ||
|
|
1c36559e2b | ||
|
|
02518a96a9 | ||
|
|
0100ad1bd7 | ||
|
|
6411142111 | ||
|
|
455dc06db0 | ||
|
|
1a9ed1fe98 | ||
|
|
17b12567d8 | ||
|
|
e20b2d38ff | ||
|
|
6ca99910ba | ||
|
|
11937de517 | ||
|
|
eba8a59466 | ||
|
|
5848c7884d | ||
|
|
c11c14590a | ||
|
|
0dd468245c | ||
|
|
b63d57158d | ||
|
|
afa29b9554 | ||
|
|
8eac99599a | ||
|
|
b4f06a50b0 | ||
|
|
15c6e43597 | ||
|
|
56f2a9512f | ||
|
|
3ef428efaa | ||
|
|
c7ad28a4cd | ||
|
|
b451bac082 | ||
|
|
47a0fcd614 | ||
|
|
ac31a7c008 | ||
|
|
a90739f498 | ||
|
|
ffef3c7b1d | ||
|
|
5763947c37 | ||
|
|
2793153717 | ||
|
|
7fb9f19bd8 | ||
|
|
a838223d18 | ||
|
|
14dbc3488e | ||
|
|
235b94f097 | ||
|
|
c357eed4c7 | ||
|
|
c93d27add3 | ||
|
|
fbca54957e | ||
|
|
49c60882bf | ||
|
|
59285d501d | ||
|
|
373baa5c9c | ||
|
|
906dc54969 | ||
|
|
4468c49439 | ||
|
|
5ad174fad2 | ||
|
|
5433ef3333 | ||
|
|
9448bf1caa | ||
|
|
97281ff831 | ||
|
|
9d07d3a229 | ||
|
|
6291e72129 | ||
|
|
b3666e140d | ||
|
|
a83821e941 | ||
|
|
24fd963c38 | ||
|
|
e24ba92ef2 | ||
|
|
bd9f2de73a | ||
|
|
661e42d2b7 | ||
|
|
5327bc9397 | ||
|
|
78b315344a | ||
|
|
3cad0cd4c1 | ||
|
|
400bb0694b | ||
|
|
8f0048663d | ||
|
|
b0baf7518b | ||
|
|
1afe0827ba | ||
|
|
0d4eff284c | ||
|
|
d6f39e1fef | ||
|
|
327a234d23 | ||
|
|
4e4abd0841 | ||
|
|
c45f35ccc2 | ||
|
|
d85b95bb15 | ||
|
|
4a36b7be5b | ||
|
|
3d7e9856a2 | ||
|
|
a26e28bdea | ||
|
|
6a3bf1de92 | ||
|
|
9ad9afad7d | ||
|
|
e7534a90d8 | ||
|
|
6be1bfcc87 | ||
|
|
92d9cd36a6 | ||
|
|
67f9288891 | ||
|
|
16f77b74c4 | ||
|
|
cd645f80f8 | ||
|
|
6871484398 | ||
|
|
338ae36f73 | ||
|
|
c8cd840b24 | ||
|
|
fc67e5e692 | ||
|
|
f4c9e67155 | ||
|
|
9a84a828fc | ||
|
|
138cc654c4 | ||
|
|
24fd2b4dec | ||
|
|
be81f050a7 | ||
|
|
9476123ee6 | ||
|
|
0d85744205 | ||
|
|
771130532c | ||
|
|
a156ebbf76 | ||
|
|
c871d9cdbd | ||
|
|
163d863443 | ||
|
|
c93d567f97 | ||
|
|
b5a6904c4a | ||
|
|
efaf2aef3d | ||
|
|
047855c591 | ||
|
|
308e726e11 | ||
|
|
611399e089 | ||
|
|
968c79db06 | ||
|
|
655c3e86e3 | ||
|
|
c7dd920dc8 | ||
|
|
1831b3fb51 | ||
|
|
dd0b003493 | ||
|
|
a74596374d | ||
|
|
78ff21d512 | ||
|
|
5d734cc7ca | ||
|
|
25360387ec | ||
|
|
7833650aa1 | ||
|
|
bf5d85c922 | ||
|
|
0d03813e98 | ||
|
|
748007f6ee | ||
|
|
af3c70651c | ||
|
|
977ffbaa04 | ||
|
|
e0f0fae59d | ||
|
|
deb37b821b | ||
|
|
7f06aec3a1 | ||
|
|
218dc01b51 | ||
|
|
1aa2b924d2 | ||
|
|
0f3793d608 | ||
|
|
282aa19189 | ||
|
|
93aa7b3ed3 | ||
|
|
d229dfe991 | ||
|
|
292c91abbb | ||
|
|
f660e0836b | ||
|
|
898a3ed2fe | ||
|
|
22997c134e | ||
|
|
38a7fd685d | ||
|
|
64829071e0 | ||
|
|
0eb8543d74 | ||
|
|
b7effb22e0 | ||
|
|
042b828c73 | ||
|
|
8c9df34696 | ||
|
|
1e863a7113 | ||
|
|
005fcf3f98 | ||
|
|
e4412f0634 | ||
|
|
dd6d2223a5 | ||
|
|
9e9ab39892 | ||
|
|
9c0a833a0a | ||
|
|
8087a57fd8 | ||
|
|
7131a478b9 | ||
|
|
fe15b67160 | ||
|
|
8f731a566c | ||
|
|
483927a5be | ||
|
|
557b78d31e | ||
|
|
ba62783b72 | ||
|
|
d3a7710c62 | ||
|
|
f3829b268a | ||
|
|
2395c647d4 | ||
|
|
c6ea67bbdb | ||
|
|
00ed878b05 | ||
|
|
d843afcf66 | ||
|
|
00ebb295d3 | ||
|
|
387e249dec | ||
|
|
8028d88541 | ||
|
|
13876a1ee8 | ||
|
|
7b80e9a2ad | ||
|
|
5631d4e3d6 | ||
|
|
5920ad8834 | ||
|
|
21d790f87e | ||
|
|
3a3e247f3c | ||
|
|
cf1aad2a68 | ||
|
|
96136ea760 | ||
|
|
a3eb67e466 | ||
|
|
08f90f4b64 | ||
|
|
07a2e226c1 | ||
|
|
a2b37adb26 | ||
|
|
084675cf75 | ||
|
|
d78b7d0fad | ||
|
|
fc2eb48664 | ||
|
|
2720955478 | ||
|
|
d9eec31886 | ||
|
|
cb8780a4ce | ||
|
|
cfc83745ec | ||
|
|
a336a8bbeb | ||
|
|
ba6041251d | ||
|
|
a92758a144 | ||
|
|
030ba7bfeb | ||
|
|
0b4518e61c | ||
|
|
73442a2b6d | ||
|
|
8042f76399 | ||
|
|
ccc8a2229d | ||
|
|
750adf793d | ||
|
|
02ca96fa44 | ||
|
|
6a7166fffa | ||
|
|
8fcb4b3102 | ||
|
|
8f660aefe3 | ||
|
|
a531328f7e | ||
|
|
6c165d2e55 | ||
|
|
b657be7381 | ||
|
|
ded6c41cf8 | ||
|
|
c1aa4590ea | ||
|
|
b330ec3517 | ||
|
|
3ad5970374 | ||
|
|
adeca8a658 | ||
|
|
aad0104c1b | ||
|
|
f919cdf881 | ||
|
|
d08800c359 | ||
|
|
3bc48014a5 | ||
|
|
1f77427088 | ||
|
|
2478294c06 | ||
|
|
8be798e15f | ||
|
|
7fe8da8944 | ||
|
|
f247c2ae62 | ||
|
|
fd41f2fafc | ||
|
|
cb00db15c9 | ||
|
|
9e7b326e34 | ||
|
|
1972479610 | ||
|
|
e0f5905a97 | ||
|
|
5b06284a8a | ||
|
|
cbba58bef9 | ||
|
|
8805a50d24 | ||
|
|
7d23a55901 | ||
|
|
08594e5263 | ||
|
|
15f99b1b71 | ||
|
|
6b1b2e2373 | ||
|
|
8a14aa62ff | ||
|
|
8cdb911a6e | ||
|
|
6bf31479d9 | ||
|
|
320f7339cd | ||
|
|
3dec47eaf8 | ||
|
|
35707c2dd8 | ||
|
|
58797a9eb5 | ||
|
|
64eba9576c | ||
|
|
3a91ca2dd1 | ||
|
|
9651b5c873 | ||
|
|
a633793a00 | ||
|
|
dbabe67e77 | ||
|
|
d771ca4a13 | ||
|
|
73a8a737b2 | ||
|
|
57f6e9af5a | ||
|
|
45e2935e87 | ||
|
|
725a8bcf60 | ||
|
|
331eab81f7 | ||
|
|
8c9a7e1334 | ||
|
|
bd05fb899e | ||
|
|
6c2fdfdbda | ||
|
|
41b95e9ec3 | ||
|
|
2f979ce294 | ||
|
|
7301452b41 | ||
|
|
8d7b88106a | ||
|
|
2f6a629393 | ||
|
|
2238302b49 | ||
|
|
0882970a94 | ||
|
|
d8fcc71616 | ||
|
|
e6447cd24a | ||
|
|
c47e6deda2 | ||
|
|
0e3def449a | ||
|
|
0e88a621fd | ||
|
|
1e3c4e8bdb | ||
|
|
765af1ba17 | ||
|
|
a78ca6ffcd | ||
|
|
dfd9c60d80 | ||
|
|
999471256c | ||
|
|
1ba1211ca0 | ||
|
|
b10d525bf7 | ||
|
|
b62c8845f3 | ||
|
|
38c0b4a1ad | ||
|
|
52d1cbbbe9 | ||
|
|
1cb800d392 | ||
|
|
4809ddfeb8 | ||
|
|
4d8dbbab64 | ||
|
|
c5340533c0 | ||
|
|
9ec310d858 | ||
|
|
cc964ee579 | ||
|
|
6fbf162d71 | ||
|
|
1fb5807859 | ||
|
|
0ea62d88f6 | ||
|
|
4663b1a56e | ||
|
|
2f90ac9880 | ||
|
|
c6b4d1e87f | ||
|
|
d86b0ec010 | ||
|
|
eb16f64017 | ||
|
|
a289a92b94 | ||
|
|
d489eb589a | ||
|
|
a6d6bee88c | ||
|
|
2fe79a93cc | ||
|
|
59c6138e98 | ||
|
|
f396b82a4f | ||
|
|
fa9be444fa | ||
|
|
d9db8f63a7 | ||
|
|
3f5ec9644f | ||
|
|
1168004067 | ||
|
|
9e260332cc | ||
|
|
544c3a7c9f | ||
|
|
8fcadff8d3 | ||
|
|
6e9de75727 | ||
|
|
88127f46c1 | ||
|
|
b391ac8eb1 | ||
|
|
88ba4b1ebf | ||
|
|
f1147c9926 | ||
|
|
3e24f455c8 | ||
|
|
0c1403f2c7 | ||
|
|
6ce4b353c4 | ||
|
|
fefdb20f69 | ||
|
|
7c82d65a9d | ||
|
|
fbea21a1f1 | ||
|
|
bfbbfc2361 | ||
|
|
20adc3c967 | ||
|
|
80f6abb07e | ||
|
|
e5b8d4d072 | ||
|
|
701048cf33 | ||
|
|
7d98ca6195 | ||
|
|
0e42575c57 | ||
|
|
498778b8ac | ||
|
|
6bb8212731 | ||
|
|
42e3a7a5ae | ||
|
|
5c5a4dfc14 | ||
|
|
ecd16d6bf9 | ||
|
|
178c3e75cc | ||
|
|
9f28f53cfc | ||
|
|
3b28dc1821 | ||
|
|
3039aeffeb | ||
|
|
5989043537 | ||
|
|
02a3420a50 | ||
|
|
74230f559a | ||
|
|
f08bb9a201 | ||
|
|
d746484521 | ||
|
|
714f745713 | ||
|
|
0c667de7a7 | ||
|
|
ccf5e3e3a7 | ||
|
|
a00983b2ba | ||
|
|
9371867238 | ||
|
|
03fb85e49a | ||
|
|
845432b9b4 | ||
|
|
1d1b20bd77 | ||
|
|
5a8a9c22e8 | ||
|
|
273888f218 | ||
|
|
caf69d871a | ||
|
|
188c7c8f2b | ||
|
|
635e6efd18 | ||
|
|
0f3a88057c | ||
|
|
e523f25b9f | ||
|
|
a7a3a0c700 | ||
|
|
21e0e9f32b | ||
|
|
d1f4622a96 | ||
|
|
e015355e4a | ||
|
|
bd4881c4dc | ||
|
|
b69f435311 | ||
|
|
8b3c7aa795 | ||
|
|
f045b72826 | ||
|
|
c357601c01 | ||
|
|
6c2bdda0f0 | ||
|
|
6338dc0051 | ||
|
|
511bb31646 | ||
|
|
d1e9301a43 | ||
|
|
cd5d867b62 | ||
|
|
3e24a127c7 | ||
|
|
2f544fe199 | ||
|
|
93e08c0d4a | ||
|
|
42191a36ab | ||
|
|
c6c1b725e9 | ||
|
|
86cb5e0587 | ||
|
|
b7d5982944 | ||
|
|
e8266b0356 | ||
|
|
e1034fc79e | ||
|
|
74d98186fc | ||
|
|
ca226a54c6 | ||
|
|
07e6f004c5 | ||
|
|
426e7a4cec | ||
|
|
e52bc0acb2 | ||
|
|
cbef2720ce | ||
|
|
e5767d4fc5 | ||
|
|
60123a67ac | ||
|
|
e6bc7742fb | ||
|
|
959d4ddb91 | ||
|
|
8df1127ce2 | ||
|
|
de4ccffff8 | ||
|
|
0f258774d3 | ||
|
|
4583924ce7 | ||
|
|
c4d738f39f | ||
|
|
c4d5331c03 | ||
|
|
92ec8dda03 | ||
|
|
23bb94a5fb | ||
|
|
877c651c04 | ||
|
|
cbba88f565 | ||
|
|
13373391df | ||
|
|
ab162f976c | ||
|
|
9a58964834 | ||
|
|
c3faecfd27 | ||
|
|
1b19dd77a4 | ||
|
|
02f604479d | ||
|
|
58282f7107 | ||
|
|
bb97ca1b22 | ||
|
|
f154aeafea | ||
|
|
17f9c188bd | ||
|
|
acd57b6a85 | ||
|
|
f08db63fbc | ||
|
|
2517ea9c9e | ||
|
|
90f42f311a | ||
|
|
ee945517ff | ||
|
|
a1b606a6ac | ||
|
|
3344510553 | ||
|
|
645463b9f0 | ||
|
|
09cd1cb4e2 | ||
|
|
c4029914e8 | ||
|
|
84617abdeb | ||
|
|
93cd47c948 | ||
|
|
dcdc42fa06 | ||
|
|
9c6913ad61 | ||
|
|
9bd114b5d7 | ||
|
|
76a722dc90 | ||
|
|
4e0dfbdde3 | ||
|
|
92547becff | ||
|
|
0cb82483ef | ||
|
|
e33921a629 | ||
|
|
6af3598cfa | ||
|
|
0b26650f47 | ||
|
|
6cc7bbf009 | ||
|
|
197b327374 | ||
|
|
2f45d75309 | ||
|
|
7cb2b1bfdb | ||
|
|
8b7eb5c87c | ||
|
|
22cc9e0115 | ||
|
|
678f40297b | ||
|
|
a2cdd06afc | ||
|
|
2d37602382 | ||
|
|
da148232eb | ||
|
|
fc23345c6d | ||
|
|
75217d3713 | ||
|
|
b4edfce993 | ||
|
|
01ef4c61bd | ||
|
|
315e06f695 | ||
|
|
73138a29fa | ||
|
|
87ae09ecd6 | ||
|
|
aa44e542cb | ||
|
|
0d1597616f | ||
|
|
8689d7ecea | ||
|
|
8f49e6144e | ||
|
|
66e991841a | ||
|
|
be3d371290 | ||
|
|
26eda537f0 | ||
|
|
88c0204357 | ||
|
|
97a539cab6 | ||
|
|
faae4dc1b0 | ||
|
|
d0befe0729 | ||
|
|
de24b3bb31 | ||
|
|
db67d69ddc | ||
|
|
cac225b589 | ||
|
|
7ba3d4425f | ||
|
|
34bf93ef47 | ||
|
|
c9c3b716fb | ||
|
|
f77f1504f5 | ||
|
|
949b7ec9cf | ||
|
|
d347b056e3 | ||
|
|
9bcef8a648 | ||
|
|
bc2b0f54e9 | ||
|
|
cc757f6226 | ||
|
|
b279460a81 | ||
|
|
e8dc7b0ee9 | ||
|
|
4fc254c1dd | ||
|
|
609c3ac893 | ||
|
|
db7d717df7 | ||
|
|
e263dbf852 | ||
|
|
09606a38d3 | ||
|
|
ad0be25c46 | ||
|
|
7c0225931a | ||
|
|
1c1cf09a59 | ||
|
|
58c3b549ba | ||
|
|
8e9c0287aa | ||
|
|
8e0ef5b419 | ||
|
|
1d23159837 | ||
|
|
d2da40b0e4 | ||
|
|
879fa3d8c4 | ||
|
|
09eb326486 | ||
|
|
dfab11f0b5 | ||
|
|
9a2353f97b | ||
|
|
322cd28e24 | ||
|
|
7cb650237c | ||
|
|
aab28398ef | ||
|
|
5ba52967ac | ||
|
|
b58e80cb99 | ||
|
|
f4f621b215 | ||
|
|
f337767f36 | ||
|
|
a25a1fc8d0 | ||
|
|
2dee3a66ff | ||
|
|
2cfb77d16f | ||
|
|
b4d2a00e20 | ||
|
|
9ff5961853 | ||
|
|
9d6a7f1bcf | ||
|
|
004fd8316c | ||
|
|
570d5b8936 | ||
|
|
27140f3563 | ||
|
|
2ebc8ff252 | ||
|
|
13a5288d01 | ||
|
|
801db438b0 | ||
|
|
00fbbd6f57 | ||
|
|
e8041069e2 | ||
|
|
fe0685a742 | ||
|
|
036976aeb8 | ||
|
|
43fc170224 | ||
|
|
e9a433832e | ||
|
|
a86a5a026e | ||
|
|
1e96dcf369 | ||
|
|
552cb09f09 | ||
|
|
bc921c66e5 | ||
|
|
4cf39120fc | ||
|
|
75da90190f | ||
|
|
1c1fd3be46 | ||
|
|
3f9eb3aad1 | ||
|
|
18bd78f1f0 | ||
|
|
889153952f | ||
|
|
2dabdbc7da | ||
|
|
c92eba0b0a | ||
|
|
efd9c9707b | ||
|
|
df98f4b331 | ||
|
|
ec73121020 | ||
|
|
331d03c33f | ||
|
|
1443612e72 | ||
|
|
d085dc6a93 | ||
|
|
263b5d5557 | ||
|
|
f5a5d0c0cb | ||
|
|
747a4a0e56 | ||
|
|
14efd42084 | ||
|
|
1602ac1c8f | ||
|
|
eefbf96f6a | ||
|
|
80637cae28 | ||
|
|
f9a007c6a8 | ||
|
|
f3388c2ab4 | ||
|
|
4a369e070a | ||
|
|
0b8d2d65a2 | ||
|
|
06dfb7e772 | ||
|
|
b5e021fc49 | ||
|
|
e976a5ddc7 | ||
|
|
7ed1926ce7 | ||
|
|
ff01bcb870 | ||
|
|
f81b1540ca | ||
|
|
eb0ab9db1d | ||
|
|
78899244d5 | ||
|
|
1f1435997a | ||
|
|
84f66484c5 | ||
|
|
42e7864d62 | ||
|
|
af6bb7513a | ||
|
|
1cab149c1a | ||
|
|
ae150fa24f | ||
|
|
1bdf11b511 | ||
|
|
0dbc4cbc71 | ||
|
|
fe955cac1f | ||
|
|
caf9fca5f3 | ||
|
|
3650a6fd1f | ||
|
|
6436bf1920 | ||
|
|
35ed55d18f | ||
|
|
db847eed4c | ||
|
|
2d263f227d | ||
|
|
379dd01ca7 | ||
|
|
f8f23b5489 | ||
|
|
45f823ddf6 | ||
|
|
d47c8eb956 | ||
|
|
977ec801b7 | ||
|
|
3829507d0f | ||
|
|
3d676cd50f | ||
|
|
66a75c899a | ||
|
|
9bd7359ffa | ||
|
|
93b3752cdf | ||
|
|
b38ec0ec38 | ||
|
|
b30a73016d | ||
|
|
7278548cd1 | ||
|
|
bb409c926e | ||
|
|
45c9ae312c | ||
|
|
2db7745cbd | ||
|
|
ad6d0218ae | ||
|
|
92adceb7b5 | ||
|
|
7a81beb0c1 | ||
|
|
bf42b2c3a1 | ||
|
|
83849336d8 | ||
|
|
3e3746283c | ||
|
|
88ff3e6ad8 | ||
|
|
9e80193008 | ||
|
|
0816ecedb7 | ||
|
|
98a7508a99 | ||
|
|
85f2f01a3a | ||
|
|
f8d220c1e6 | ||
|
|
4a2727b71d | ||
|
|
1d88456659 | ||
|
|
dc8ed6dbe7 | ||
|
|
c55d3c61c6 | ||
|
|
ae61c1a0f4 | ||
|
|
15f466ca3f | ||
|
|
219f0a7731 | ||
|
|
298d4719c6 | ||
|
|
7c29879e79 | ||
|
|
af1eef1b08 | ||
|
|
28e6bd4fcd | ||
|
|
d1bfb08e8d | ||
|
|
acbcc12e7b | ||
|
|
dce02732a4 | ||
|
|
8078c41ec6 | ||
|
|
a45a652130 | ||
|
|
f59998d268 | ||
|
|
aff41f3482 | ||
|
|
e7129f9dbe | ||
|
|
724147ffab | ||
|
|
faa5c82c64 | ||
|
|
3f37a2e915 | ||
|
|
c970c5f166 | ||
|
|
81794692ab | ||
|
|
36bc276005 | ||
|
|
0986d075fb | ||
|
|
9a94d7b4f6 | ||
|
|
2a9699033d | ||
|
|
f2ee917d4f | ||
|
|
685cfe2540 | ||
|
|
a8d02dec8f | ||
|
|
63234b9b6f | ||
|
|
75d6cfd14d | ||
|
|
7080a02252 | ||
|
|
3eb0b77427 | ||
|
|
27641ac182 | ||
|
|
6c3590ba9a | ||
|
|
0aedb89921 | ||
|
|
75c6ae8502 | ||
|
|
077bbc6b10 | ||
|
|
1b0e2d8750 | ||
|
|
f6ca0ee072 | ||
|
|
2db36da979 | ||
|
|
5028480eba | ||
|
|
355b5f6c8b | ||
|
|
cc9b7253c1 | ||
|
|
8531100109 | ||
|
|
73bfc936a0 | ||
|
|
bae1aa34aa | ||
|
|
7f6579ab20 | ||
|
|
0d3f854778 | ||
|
|
8620d6ffe7 | ||
|
|
cc8a4fdcb1 | ||
|
|
409a48d6bd | ||
|
|
5d00574a56 | ||
|
|
51c50b265d | ||
|
|
616ea6966d | ||
|
|
c25a381540 | ||
|
|
8e10f9894a | ||
|
|
9ec46b8c44 | ||
|
|
0c7237e4b7 | ||
|
|
bad1da99db | ||
|
|
0c1bc6d1d0 | ||
|
|
9cd6ea6c0b | ||
|
|
83bfd5c64b | ||
|
|
126b3a768f | ||
|
|
9d7a36356d | ||
|
|
2faaf18f1f | ||
|
|
f1ec6c8662 | ||
|
|
bd13a8f255 | ||
|
|
076aa67963 | ||
|
|
366de4b561 | ||
|
|
e8595730b4 | ||
|
|
61276f6a37 | ||
|
|
4800d1d522 | ||
|
|
052c82b664 | ||
|
|
0f77ff9670 | ||
|
|
17c29fa0a2 | ||
|
|
4bf763e1d9 | ||
|
|
c0e295dd1d | ||
|
|
e3bba510d4 | ||
|
|
71fa046c17 | ||
|
|
d99fb0a22a | ||
|
|
1c549d176b | ||
|
|
dc3094549e | ||
|
|
e4d3f4449d | ||
|
|
470c822f44 | ||
|
|
adb975a380 | ||
|
|
fc483650b5 | ||
|
|
38c50087fe | ||
|
|
253e85a519 | ||
|
|
9ec9b1bf83 | ||
|
|
ee7b3028ac | ||
|
|
4925c307cf | ||
|
|
93e1850a2c | ||
|
|
cbf4daf1c8 | ||
|
|
fd61297933 | ||
|
|
8cb73b78e1 | ||
|
|
041248cc9f | ||
|
|
5534d01da0 | ||
|
|
c4a715fd1e | ||
|
|
035cd3e2a9 | ||
|
|
2826c60044 | ||
|
|
3fa1a899ae | ||
|
|
c375b69413 | ||
|
|
62c774bf24 | ||
|
|
e7ac06c169 | ||
|
|
0c5fa3728e | ||
|
|
006a866079 | ||
|
|
47d4758509 | ||
|
|
4920981b14 | ||
|
|
8984e95c67 | ||
|
|
2bde625d57 | ||
|
|
512bc2d0e0 | ||
|
|
f8ef6e09af | ||
|
|
bf7e4a4597 | ||
|
|
9ea2a69210 | ||
|
|
3bc2ec2b11 | ||
|
|
1c7209a725 | ||
|
|
a1b3307b66 | ||
|
|
fa960496d5 | ||
|
|
ed6e16191d | ||
|
|
13a434f351 | ||
|
|
a2ab42d390 | ||
|
|
348d4860c2 | ||
|
|
d2bae7694c | ||
|
|
b28fa86db6 | ||
|
|
760b4dd115 | ||
|
|
e4fb2475d2 | ||
|
|
5ef564a22e | ||
|
|
c4f36db0d8 | ||
|
|
05115e42ee | ||
|
|
1927afe894 | ||
|
|
605cc9ab14 | ||
|
|
89590adc14 | ||
|
|
d1c0154d66 | ||
|
|
cbef35054c | ||
|
|
4e8f628d3c | ||
|
|
530223bf0b | ||
|
|
76f947e3cf | ||
|
|
99bd66445f | ||
|
|
987505ead3 | ||
|
|
941e0663da | ||
|
|
f82667f0b4 | ||
|
|
85bf2e15b9 | ||
|
|
8137eb8ef4 | ||
|
|
53d8e46502 | ||
|
|
bf5290bc0f | ||
|
|
967b70327e | ||
|
|
6001d279c6 | ||
|
|
475e012ee8 | ||
|
|
b817bb33fd | ||
|
|
f3da45f65d | ||
|
|
df7bb0db1f | ||
|
|
d0211afb3c | ||
|
|
2da197bba4 | ||
|
|
690d693913 | ||
|
|
d9da16edba | ||
|
|
84ab1f95be | ||
|
|
d186621926 | ||
|
|
7853fb1c8d | ||
|
|
b7a5c7db8d | ||
|
|
5f5569e9ac | ||
|
|
4c2e3b168b | ||
|
|
ea60f14674 | ||
|
|
b71ef50e9d | ||
|
|
b21bd8bb1e | ||
|
|
d08acb4af9 | ||
|
|
3526b7923c | ||
|
|
4cea720da8 | ||
|
|
905afced1c | ||
|
|
3f26b0408b | ||
|
|
9e3867dc83 | ||
|
|
d5c407cf35 | ||
|
|
f8aaf3c23a | ||
|
|
c12a53c998 | ||
|
|
ace8afb825 | ||
|
|
89090d9a61 | ||
|
|
a41da1ec95 | ||
|
|
b950a0c6db | ||
|
|
307d13b540 | ||
|
|
55283bb8f1 | ||
|
|
ec2e641749 | ||
|
|
a6c3ec2299 | ||
|
|
195a45c6e1 | ||
|
|
cd5c32dc19 | ||
|
|
b46ca01340 | ||
|
|
a4bf339724 | ||
|
|
e9569c3984 | ||
|
|
771d3d8ed6 | ||
|
|
7f49e3c3ce | ||
|
|
c5fe92d152 | ||
|
|
1dd4aedbe1 | ||
|
|
c5fb51e5d1 | ||
|
|
d10bded7f8 | ||
|
|
1ee0acc852 | ||
|
|
15a29e99f8 | ||
|
|
be13f5199b | ||
|
|
c6c2855c80 | ||
|
|
bbcaec75b4 | ||
|
|
ee0592473c | ||
|
|
6e6f9971a2 | ||
|
|
965ca7948f | ||
|
|
1180bb0d80 | ||
|
|
f5b59d2b0b | ||
|
|
9bb9ce079e | ||
|
|
765fea5e36 | ||
|
|
70952553c7 | ||
|
|
363b632a0d | ||
|
|
fa861de05b | ||
|
|
7b80acd524 | ||
|
|
943451284f | ||
|
|
511eb6aa94 | ||
|
|
8b83e6f843 | ||
|
|
4a32e1f80c | ||
|
|
0fe3b033d0 | ||
|
|
c4afc0421d | ||
|
|
234aba1c50 | ||
|
|
4ff91b6588 | ||
|
|
bf2aa19b21 | ||
|
|
029aab6404 | ||
|
|
35717a088c | ||
|
|
bc55feaf3e | ||
|
|
a317450dfa | ||
|
|
d1e7d9c5d5 | ||
|
|
3a207e7a57 | ||
|
|
6acb0e1bee | ||
|
|
cbd4d967cc | ||
|
|
19c8dced67 | ||
|
|
b976112539 | ||
|
|
763a7011c0 | ||
|
|
d9de14d1f7 | ||
|
|
d4017fbb6d | ||
|
|
d4b1e31c49 | ||
|
|
faababc4ea | ||
|
|
877cf44c08 | ||
|
|
d35818f4e1 | ||
|
|
0dd71e78c9 | ||
|
|
98f4c694b9 | ||
|
|
b6fffbd216 | ||
|
|
2c7ff86015 | ||
|
|
5993ebeb1b | ||
|
|
23399aff3c | ||
|
|
5861013e68 | ||
|
|
a90df27ff5 | ||
|
|
ae1fe87365 | ||
|
|
8f2493cc60 | ||
|
|
370fe7b7cf | ||
|
|
8ebe868916 | ||
|
|
93fd4ad25d | ||
|
|
f1b64df8dd | ||
|
|
60ac495d59 | ||
|
|
b313adf653 | ||
|
|
c71a2af5ab | ||
|
|
bfbde73409 | ||
|
|
e99c20bcb0 | ||
|
|
9424ba17c8 | ||
|
|
1aa76b3beb | ||
|
|
bce1b68ca9 | ||
|
|
812d878812 | ||
|
|
1df2b0d3ae | ||
|
|
8228822a6c | ||
|
|
62455b415c | ||
|
|
765de6f678 | ||
|
|
89ec4c9ba6 | ||
|
|
06619e5f03 | ||
|
|
022664f2bd | ||
|
|
4335a24ff8 | ||
|
|
a778270536 | ||
|
|
25cf3600aa | ||
|
|
39cbb5fee0 | ||
|
|
da1919baae | ||
|
|
a3031795a3 | ||
|
|
008c6dd682 | ||
|
|
ee09e44c85 | ||
|
|
78aeabca89 | ||
|
|
8320190184 | ||
|
|
15989c2ed8 | ||
|
|
86c3ed3218 | ||
|
|
c178ea02fe | ||
|
|
6117ef7d64 | ||
|
|
fe8e80e04a | ||
|
|
ff1c00bdd9 | ||
|
|
d3e7c655e5 | ||
|
|
99588be576 | ||
|
|
e243424ba1 | ||
|
|
8cfd7f976b | ||
|
|
d5e1bccef9 | ||
|
|
b3bf7a885d | ||
|
|
9c59acf820 | ||
|
|
ae02ffc605 | ||
|
|
c19b995b8e | ||
|
|
6ba0164c70 | ||
|
|
5ab069786b | ||
|
|
b9da5c7e3a | ||
|
|
9c9df2063f | ||
|
|
ba976d1390 | ||
|
|
ed42154c78 | ||
|
|
5fdebc554b | ||
|
|
b1495d52e5 | ||
|
|
6589ebeca8 | ||
|
|
e2e73ed22f | ||
|
|
e2e90af6cd | ||
|
|
44a6d8a761 | ||
|
|
9f07a1f5d7 | ||
|
|
f727b4a2cc | ||
|
|
b3342b8dd8 | ||
|
|
4fa52a1302 | ||
|
|
2002590536 | ||
|
|
71ae05e0a4 | ||
|
|
5ad080ff25 | ||
|
|
4fabd729c9 | ||
|
|
5135523429 | ||
|
|
4eecb6611f | ||
|
|
8d481ef9d5 | ||
|
|
caa6afc88b | ||
|
|
c5e54c0b37 | ||
|
|
e52f62d3ff | ||
|
|
85c4486d4a | ||
|
|
d00d713ace | ||
|
|
c1cc65e82e | ||
|
|
d68f0fbdf7 | ||
|
|
a0abf93425 | ||
|
|
ef9910c767 | ||
|
|
1c4a2c9a71 | ||
|
|
03544d4fb6 | ||
|
|
c6901aba9f | ||
|
|
170ad3d3ec | ||
|
|
8144e1031e | ||
|
|
ae54d8faaa | ||
|
|
5c2f8d828e | ||
|
|
2fc58ad935 | ||
|
|
d78abe480b | ||
|
|
ce9e2d94b1 | ||
|
|
5dfab7d363 | ||
|
|
c9b3c9dfbf | ||
|
|
2c2d453c8c | ||
|
|
0ef1b8f8b4 | ||
|
|
38dc09dca5 | ||
|
|
038a012581 | ||
|
|
682c78ea42 | ||
|
|
454366f93e | ||
|
|
d7b336d37e | ||
|
|
4ed0da74a8 | ||
|
|
598568b1ed | ||
|
|
297a406e05 | ||
|
|
6ea0206207 | ||
|
|
14e6baeb48 | ||
|
|
9025848df5 | ||
|
|
d337ea31fa | ||
|
|
8229736ec4 | ||
|
|
89f40cdcf7 | ||
|
|
ad1ada6574 | ||
|
|
d8aad6da94 | ||
|
|
8b8d39ec4e | ||
|
|
0b3503c91f | ||
|
|
649ee729c1 | ||
|
|
bf48ec8c44 | ||
|
|
a5855c345c | ||
|
|
f1f32386b4 | ||
|
|
204db28362 | ||
|
|
eef90a4964 | ||
|
|
a8a64b6c1c | ||
|
|
c010cea7be | ||
|
|
cbffcf67ef | ||
|
|
77a73cc561 | ||
|
|
109de34e3b | ||
|
|
bb1905ebc5 | ||
|
|
1981327285 | ||
|
|
79a26d7a5c | ||
|
|
1bd208c219 | ||
|
|
9b80d1d6c2 | ||
|
|
525b1e0207 | ||
|
|
2bfaf44df0 | ||
|
|
01e42a00ff | ||
|
|
80cdbe4e09 | ||
|
|
758c3f15a5 | ||
|
|
60d67994d9 | ||
|
|
5bcd2d7ad0 | ||
|
|
677d74a6a0 | ||
|
|
6ab04698f6 | ||
|
|
26317a4c7e | ||
|
|
f04a37adc2 | ||
|
|
0261338910 | ||
|
|
39fded487a | ||
|
|
a12e05d9c0 | ||
|
|
769eee1ff3 | ||
|
|
16fa9215c4 | ||
|
|
b131f86584 | ||
|
|
01f20d2d9f | ||
|
|
12f6f7ba9f | ||
|
|
dba17c40fc | ||
|
|
16f4f1a1c3 | ||
|
|
7c883ef2f0 | ||
|
|
cf9676c4d5 | ||
|
|
b54bf359bf | ||
|
|
edbe0af647 | ||
|
|
6724d2bfa4 | ||
|
|
44e569c3a2 | ||
|
|
f28f39792d | ||
|
|
f074ffc31b | ||
|
|
c6f2c2fd7e | ||
|
|
0360f54ae8 | ||
|
|
01c46f8b56 | ||
|
|
32cdaa540f | ||
|
|
461d1fdb76 | ||
|
|
fea98f82c5 | ||
|
|
9ac4d81c8b | ||
|
|
b614ea6596 | ||
|
|
f01cc079b9 | ||
|
|
b7c17727b0 | ||
|
|
9ddcc91a91 | ||
|
|
e3fd4a0ea7 | ||
|
|
a1c353a4b3 | ||
|
|
3936589755 | ||
|
|
0b9ab1438d | ||
|
|
bac652bb1d | ||
|
|
340022d4b0 | ||
|
|
053911b629 | ||
|
|
1c9dfa871b | ||
|
|
87de91dd65 | ||
|
|
c49251e95d | ||
|
|
75ff3f3815 | ||
|
|
3d4f3e423c | ||
|
|
71a551a622 | ||
|
|
5d6f3e6f92 | ||
|
|
0485ff20e8 | ||
|
|
7f8c1c1f07 | ||
|
|
b76b7f6bf5 | ||
|
|
4bd260c60d | ||
|
|
b56eb0b9cd | ||
|
|
39799adc47 | ||
|
|
079ace63ec | ||
|
|
41f4fee085 | ||
|
|
ff250dd800 | ||
|
|
2bf8788c30 | ||
|
|
5e99dded4e | ||
|
|
ecb5d3c485 | ||
|
|
f8a5b0bc43 | ||
|
|
096272f49e | ||
|
|
c32f06d62f | ||
|
|
878f378e9f | ||
|
|
0258a6f877 | ||
|
|
fe96678692 | ||
|
|
ddb0f71741 | ||
|
|
2344366c9b | ||
|
|
7e80266ae9 | ||
|
|
5d25739767 | ||
|
|
1ef748fb20 | ||
|
|
f843cb475b | ||
|
|
c832953ff7 | ||
|
|
53b838d6c5 | ||
|
|
c85e5e58d0 | ||
|
|
facb4155d4 | ||
|
|
ed16374ece | ||
|
|
a0492ce325 | ||
|
|
f1797f4323 | ||
|
|
1b9121e5b8 | ||
|
|
a5d64b586d | ||
|
|
58342740a5 | ||
|
|
3a722a36c8 | ||
|
|
02db4b0d06 | ||
|
|
d2f6c0f65f | ||
|
|
c393f7650d | ||
|
|
83c426e96b | ||
|
|
17aa97248f | ||
|
|
7fe46764fb | ||
|
|
da6d868f58 | ||
|
|
15bfe36619 | ||
|
|
e6eda6a3bb | ||
|
|
f3c0f964a2 | ||
|
|
0e94d7075e | ||
|
|
3020f2e5ec | ||
|
|
c08d87b78d | ||
|
|
03b4067f31 | ||
|
|
619265b32c | ||
|
|
5c89068168 | ||
|
|
4ffc9ffc7a | ||
|
|
e6796c3859 | ||
|
|
b9e2ded6d4 | ||
|
|
91a8a87887 | ||
|
|
ad118056b8 | ||
|
|
7157257c3f | ||
|
|
d3adcbf64b | ||
|
|
1f86722977 | ||
|
|
03a0f236a4 | ||
|
|
c0f600c887 | ||
|
|
11af199aff | ||
|
|
3967520e71 | ||
|
|
d56b500568 | ||
|
|
049297fa66 | ||
|
|
0e673a7a42 | ||
|
|
9f24885bd2 | ||
|
|
3815f46838 | ||
|
|
e2702200e1 | ||
|
|
4b3e1b3757 | ||
|
|
b8fc9010fa | ||
|
|
973255cb0b | ||
|
|
75f1b5ccde | ||
|
|
13c033c745 | ||
|
|
979e1f1bd6 | ||
|
|
f011787a83 | ||
|
|
9163951f3a | ||
|
|
725639118a | ||
|
|
7b88724711 | ||
|
|
88a6331abf | ||
|
|
64853f8509 | ||
|
|
e953af85cd | ||
|
|
d24b83132b | ||
|
|
cca4ac56fa | ||
|
|
292cd489e9 | ||
|
|
4ce9d13dbe | ||
|
|
39a5c9a49c | ||
|
|
0490ee620a | ||
|
|
ee3a533e5c | ||
|
|
89888bef56 | ||
|
|
2acec386fc | ||
|
|
e2fb86e5df | ||
|
|
c8ddb86c22 | ||
|
|
24a4c98d42 | ||
|
|
836a868abc | ||
|
|
4d466d5c80 | ||
|
|
fee23df1a5 | ||
|
|
9fd12605ac | ||
|
|
228caf0f3c | ||
|
|
d01dd2e1c8 | ||
|
|
2bea4dfa96 | ||
|
|
0a15cff6a0 | ||
|
|
636a6621cc | ||
|
|
0c069e5b3f | ||
|
|
c48e4622e8 | ||
|
|
b27f6f8915 | ||
|
|
e83235a0cc | ||
|
|
ac0f60eb1a | ||
|
|
b051e2c161 | ||
|
|
60c93e0c66 | ||
|
|
ddccc0d657 | ||
|
|
3030c79e8c | ||
|
|
addad3c63e | ||
|
|
c43ee5db11 | ||
|
|
517fcc1f23 | ||
|
|
d769618591 | ||
|
|
dc56fcff12 | ||
|
|
25c640ec0c | ||
|
|
97f5615661 | ||
|
|
27398428f6 | ||
|
|
baa566b0c6 | ||
|
|
f7836c4bd8 | ||
|
|
aa629e2809 | ||
|
|
350758f81c | ||
|
|
d01293861b | ||
|
|
3d19746a5d | ||
|
|
1c937dad72 | ||
|
|
f93196e306 | ||
|
|
9b3a3d8f12 | ||
|
|
5fa9336dab | ||
|
|
0c48ecf359 | ||
|
|
8d5cf7b134 | ||
|
|
cc8c7ed209 | ||
|
|
3a92fa517b | ||
|
|
8deea2936d | ||
|
|
e1061ba7e3 | ||
|
|
b50dc3bf57 | ||
|
|
386c0d8289 | ||
|
|
6a0837451e | ||
|
|
18f836b280 | ||
|
|
e784938654 | ||
|
|
f1a8eae04d | ||
|
|
2468cfd8bb | ||
|
|
bb62e796eb | ||
|
|
c9a9f63d1b | ||
|
|
03a2e70054 | ||
|
|
9d8b1c5fd9 | ||
|
|
22baa5378f | ||
|
|
e1338a1804 | ||
|
|
d1af7a41ad | ||
|
|
49dfa0adaf | ||
|
|
93c250b9b6 | ||
|
|
3b06cb4523 | ||
|
|
d364aa0a3c | ||
|
|
cca9d6e22d | ||
|
|
c6b50f88da | ||
|
|
7cb98351da | ||
|
|
617cd7b705 | ||
|
|
6063a66414 | ||
|
|
4d9ce586d3 | ||
|
|
9ca0cd7749 | ||
|
|
bbdeed3cf4 | ||
|
|
01362681f2 | ||
|
|
e4b0467f9f | ||
|
|
0f90a1b50f | ||
|
|
055f3f5632 | ||
|
|
57160cd6fa | ||
|
|
3f0571b62b | ||
|
|
3fb02f43f6 | ||
|
|
3b99532e02 | ||
|
|
c61b29b9ce | ||
|
|
b92d7fd43e | ||
|
|
65e5864084 | ||
|
|
1a870b3ea7 | ||
|
|
85994e3ef0 | ||
|
|
ca5a2dba72 | ||
|
|
7276dca933 | ||
|
|
46996f6519 | ||
|
|
301375834e | ||
|
|
3492e33fd5 | ||
|
|
626b0a0437 | ||
|
|
5c918c5b2d | ||
|
|
78b8705400 | ||
|
|
c5f048e912 | ||
|
|
7424f789bf | ||
|
|
c497a32372 | ||
|
|
f98431c744 | ||
|
|
a50477ec85 | ||
|
|
ac30b004ef | ||
|
|
e86ab37aaf | ||
|
|
27797a92d0 | ||
|
|
4924ee2901 | ||
|
|
bba5b36d33 | ||
|
|
c5b40eb555 | ||
|
|
2cb8d4c96e | ||
|
|
64919e0d69 | ||
|
|
68d52c60f3 | ||
|
|
d1168afa76 | ||
|
|
9a150c3368 | ||
|
|
4c74c7a116 | ||
|
|
41a8eb4eeb | ||
|
|
1f288b4072 | ||
|
|
c24966c591 | ||
|
|
5522584992 | ||
|
|
1124f71cf3 | ||
|
|
1b62cd8508 | ||
|
|
d9a031fcad | ||
|
|
073694bf15 | ||
|
|
9d99156ca3 | ||
|
|
406995f722 | ||
|
|
1b1518aa6a | ||
|
|
5058269143 | ||
|
|
fd9cb26619 | ||
|
|
64e16e9a46 | ||
|
|
68f928b5e0 | ||
|
|
8bac1a9382 | ||
|
|
bb987ffe66 | ||
|
|
4d8c1801c2 | ||
|
|
bf8187124d | ||
|
|
089d5a9415 | ||
|
|
81773f7f36 | ||
|
|
e926c03b3d | ||
|
|
f106e780ba | ||
|
|
d011040f43 | ||
|
|
608545d282 | ||
|
|
30b4d8c8b2 | ||
|
|
f4d95f33b8 | ||
|
|
9dcff21da9 | ||
|
|
addcb52c56 | ||
|
|
514fb2e451 | ||
|
|
3aa646c1d0 | ||
|
|
92ab3a9a6a | ||
|
|
5223c009fe | ||
|
|
7050bb880e | ||
|
|
078e8c8969 | ||
|
|
ffc713f72b | ||
|
|
493f8c3242 | ||
|
|
e4d411b841 | ||
|
|
6bab4c2faa | ||
|
|
f32d26240d | ||
|
|
9e82f8c394 | ||
|
|
c5814db173 | ||
|
|
498fec2c7c | ||
|
|
b80d5906c2 | ||
|
|
dd97a83534 | ||
|
|
e4624fbc68 | ||
|
|
42e80108f5 | ||
|
|
a34273755b | ||
|
|
d581334a41 | ||
|
|
14584fda36 | ||
|
|
b85ae6bc96 | ||
|
|
b6830bcdae | ||
|
|
ac30e7fe9c | ||
|
|
1f101ee3e5 | ||
|
|
3170b6efc9 | ||
|
|
7e2851e505 | ||
|
|
947016d010 | ||
|
|
3b2c23dfb5 | ||
|
|
8a5f110c14 | ||
|
|
e637b702ff | ||
|
|
98ed6d3a66 | ||
|
|
af839d20ac | ||
|
|
9d5513fda0 | ||
|
|
8b52b93e85 | ||
|
|
e777b73349 | ||
|
|
1815877061 | ||
|
|
e6181e834a | ||
|
|
f66ab63d64 | ||
|
|
6b4d762120 | ||
|
|
95b3e98c36 | ||
|
|
d1115f18b9 | ||
|
|
3ee682208c | ||
|
|
5e7f4ee88a | ||
|
|
5c5e7264ec | ||
|
|
7e73058943 | ||
|
|
f18c947a86 | ||
|
|
aa809e420e | ||
|
|
11bbf71aa5 | ||
|
|
0f53a736c1 | ||
|
|
a687f950ba | ||
|
|
017d2332ea | ||
|
|
7d2449f8b0 | ||
|
|
f2d802e707 | ||
|
|
8768b69a2d | ||
|
|
79e8dbe45f | ||
|
|
e1085180cf | ||
|
|
7ef2414357 | ||
|
|
423372d6e7 | ||
|
|
af99e0697e | ||
|
|
17df2d7bdf | ||
|
|
d05846eae5 | ||
|
|
58a1581b96 | ||
|
|
e9d4bff7d0 | ||
|
|
916d1d8283 | ||
|
|
9b205f94a4 | ||
|
|
564d8c8c0d | ||
|
|
79c4d3da3d | ||
|
|
a14c510afb | ||
|
|
6ab477f375 | ||
|
|
a9a6d72d8c | ||
|
|
1c3671699c | ||
|
|
aa7c14a463 | ||
|
|
0315122cf0 | ||
|
|
b19d239a60 | ||
|
|
4148a9201f | ||
|
|
d01c68f2a3 | ||
|
|
05676caf70 | ||
|
|
f5599656b4 | ||
|
|
d4eac58f2d | ||
|
|
a30ec2e7db | ||
|
|
063d2047dd | ||
|
|
e436d69e2b | ||
|
|
512b311137 | ||
|
|
01e4721da7 | ||
|
|
fa075e41f4 | ||
|
|
f957b17d18 | ||
|
|
c176244327 | ||
|
|
e813b322cf | ||
|
|
aa653e3b5a | ||
|
|
a210e61df1 | ||
|
|
3315d00651 | ||
|
|
e79e7b90dc | ||
|
|
363efe54f4 | ||
|
|
8b44d7b12a | ||
|
|
a47de06088 | ||
|
|
f243b4ca9c | ||
|
|
f77cf159ba | ||
|
|
7e22eaa36c | ||
|
|
907137a13d | ||
|
|
8074fba18d | ||
|
|
8a39f579d8 | ||
|
|
19a56dd538 | ||
|
|
1ea3826333 | ||
|
|
ed01322763 | ||
|
|
3e3f8637d6 | ||
|
|
4ea260098f | ||
|
|
220c1797fc | ||
|
|
f62aad3d59 | ||
|
|
8803ae1845 | ||
|
|
cc825dd1f4 | ||
|
|
5c6b9c610d | ||
|
|
de69a62004 | ||
|
|
38d58764db | ||
|
|
04cb197ed6 | ||
|
|
da196707cf | ||
|
|
9660f6f10e | ||
|
|
a5df8f4e3c | ||
|
|
c6cec0588c | ||
|
|
2f71515cb0 | ||
|
|
1da47f2ae6 | ||
|
|
9dbcb1aeea | ||
|
|
66090758df | ||
|
|
6915c5077a | ||
|
|
8ec8bc0b85 | ||
|
|
0e138e4be1 | ||
|
|
a85749dcbe | ||
|
|
5fe532a5ce | ||
|
|
b1187fc9a5 | ||
|
|
3691451d00 | ||
|
|
53fbd2f245 | ||
|
|
ac3f92d36a | ||
|
|
46ca15cb79 | ||
|
|
83534798b2 | ||
|
|
279cba607f | ||
|
|
3290edfad9 | ||
|
|
e7e1f5901e | ||
|
|
602b455507 | ||
|
|
a43c210617 | ||
|
|
96ba53d916 | ||
|
|
7db8b3b532 | ||
|
|
35f32d08bc | ||
|
|
564a3e1553 | ||
|
|
577a8cd3ee | ||
|
|
536f8d58d4 | ||
|
|
b48ab482f8 | ||
|
|
5e8dc56f8a | ||
|
|
57119c1b30 | ||
|
|
125bb7b03b | ||
|
|
5993904acf | ||
|
|
2c5a9eb597 | ||
|
|
5904142777 | ||
|
|
b10d735176 | ||
|
|
229d89ccfb | ||
|
|
fd7c3c5bb0 | ||
|
|
b6eaf7923e | ||
|
|
9420973b62 | ||
|
|
1576227f16 | ||
|
|
fdd8fab9cf | ||
|
|
10601850d9 | ||
|
|
0f3a423de1 | ||
|
|
9aef01551d | ||
|
|
8930bfc5f4 | ||
|
|
386500aa37 | ||
|
|
4820ae9aef | ||
|
|
1d79aa67cf | ||
|
|
3abafee696 | ||
|
|
b675151f25 | ||
|
|
a36fa73071 | ||
|
|
2d196ed2fe | ||
|
|
46174a2d33 | ||
|
|
a363cdfca1 | ||
|
|
8df68b05e9 | ||
|
|
4f1e96b9e3 | ||
|
|
ad54d524f7 | ||
|
|
5499bc9bc8 | ||
|
|
8aaa0a6f4e | ||
|
|
9e189947d1 | ||
|
|
ae86292159 | ||
|
|
bd7cc4234d | ||
|
|
6a1682aa95 | ||
|
|
852c943769 | ||
|
|
9f77ed1b98 | ||
|
|
818b4e0354 | ||
|
|
907702c204 | ||
|
|
d7bd3da35e | ||
|
|
5cb59707f3 | ||
|
|
8456d13349 | ||
|
|
b63dc4e325 | ||
|
|
2de586f586 | ||
|
|
6b546a2c8b | ||
|
|
abe5ddc883 | ||
|
|
a38a37b3b3 | ||
|
|
a4611232b7 | ||
|
|
0d90b3a25c | ||
|
|
e225b0b995 | ||
|
|
9557f49f2f | ||
|
|
9ac528715c | ||
|
|
7a728a38eb | ||
|
|
d5bde7babc | ||
|
|
0b193b8553 | ||
|
|
cb31998605 | ||
|
|
e9c9483171 | ||
|
|
e61055253c | ||
|
|
0476f9fe70 | ||
|
|
ae0f28530c | ||
|
|
8f12fb028d | ||
|
|
1eba888af6 | ||
|
|
51fb766bea | ||
|
|
81f603d09f | ||
|
|
5770e06c48 | ||
|
|
dfdb6fee22 | ||
|
|
70845c76fb | ||
|
|
6761b5e7c6 | ||
|
|
9c04365f54 | ||
|
|
8b1dee3ec8 | ||
|
|
4aa481282b | ||
|
|
ad122361ea | ||
|
|
c9b0df16ee | ||
|
|
4094813f8d | ||
|
|
64e2a9a0a7 | ||
|
|
f0538efb99 | ||
|
|
f3c9103e04 | ||
|
|
c725d97368 | ||
|
|
9b623b8a78 | ||
|
|
0877741b03 | ||
|
|
a4b732c30b | ||
|
|
f27e1ba302 | ||
|
|
b30bce3b2f | ||
|
|
a0c69749e6 | ||
|
|
8985a8538b | ||
|
|
26d822f64f | ||
|
|
597556cb77 | ||
|
|
e158299fb4 | ||
|
|
fd4e46bce2 | ||
|
|
a90509d82e | ||
|
|
70c637bf90 | ||
|
|
3e3a7c4250 | ||
|
|
5f5ceaf025 | ||
|
|
bd71a504b8 | ||
|
|
c423d51a83 | ||
|
|
b94cd6754e | ||
|
|
17c4319e2d | ||
|
|
3ae61c0338 | ||
|
|
cbd65ba767 | ||
|
|
ed4001e324 | ||
|
|
91a7370a65 | ||
|
|
f6828de3f2 | ||
|
|
39ff9c9dcf | ||
|
|
65099dc192 | ||
|
|
d02744282b | ||
|
|
dfb01f9a63 | ||
|
|
096f75a432 | ||
|
|
dd6e4ac55f | ||
|
|
1bdceea2d4 | ||
|
|
168a0f4f67 | ||
|
|
64a76856bd | ||
|
|
1b87844928 | ||
|
|
6b7f7555fc | ||
|
|
4e739dc211 | ||
|
|
8a8dbf2f16 | ||
|
|
0f536dd97d | ||
|
|
a4c67e1974 | ||
|
|
14f6194211 | ||
|
|
5b91dbb73b | ||
|
|
308452b783 | ||
|
|
d423021a48 | ||
|
|
13fe38eb27 | ||
|
|
3952560da8 | ||
|
|
9ab7365b56 | ||
|
|
db5f6cd1d8 | ||
|
|
624faa1438 | ||
|
|
70c58b5fc2 | ||
|
|
1a7c027386 | ||
|
|
c37f792afa | ||
|
|
9653a9176c | ||
|
|
3ce0d9221b | ||
|
|
e0e28ecb0b | ||
|
|
723f912c16 | ||
|
|
35da6b989d | ||
|
|
3609ea69e4 | ||
|
|
9ad116a6e2 | ||
|
|
7cbafc0540 | ||
|
|
bdcf31035f | ||
|
|
8c9aca239a | ||
|
|
2a92a842ce | ||
|
|
7cf1402bde | ||
|
|
49b111e2dd | ||
|
|
d890c99b53 | ||
|
|
d828844a6f | ||
|
|
2ef5490a36 | ||
|
|
40a60e0297 | ||
|
|
edec3bf3b0 | ||
|
|
8152152dd6 | ||
|
|
28076928ac | ||
|
|
63701f59cf | ||
|
|
46031407b5 | ||
|
|
9eca197409 | ||
|
|
afadc787d7 | ||
|
|
1934cb61ef | ||
|
|
056717923f | ||
|
|
15d90d9bd5 | ||
|
|
abcdd0ad5b | ||
|
|
a102c704f5 | ||
|
|
b3ade5832b | ||
|
|
67b24b0b88 | ||
|
|
763f9beb7e | ||
|
|
52a34921ef | ||
|
|
cf0697936a | ||
|
|
afb51bd5d6 | ||
|
|
9271e80914 | ||
|
|
549bb88975 | ||
|
|
238f69accc | ||
|
|
d0663bae31 | ||
|
|
0e6eb7c27a | ||
|
|
2681f6f640 | ||
|
|
bae14c8f13 | ||
|
|
aa0da07af0 | ||
|
|
104573f7d4 | ||
|
|
bef08129bc | ||
|
|
303433001f | ||
|
|
bde7f00cae | ||
|
|
2ec1d96c91 | ||
|
|
fa0e68cefd | ||
|
|
992affefef | ||
|
|
fcc92caa30 | ||
|
|
2174958362 | ||
|
|
7eee9e9470 | ||
|
|
03f03af535 | ||
|
|
d61e31e182 | ||
|
|
ae12d045ea | ||
|
|
72a498ddd4 | ||
|
|
1437f757a1 | ||
|
|
63a1d4afc8 | ||
|
|
164ff2440d | ||
|
|
3cfcab63a5 | ||
|
|
907bda0d56 | ||
|
|
f697cb4609 | ||
|
|
2d74660733 | ||
|
|
fbe83854ca | ||
|
|
90ab022856 | ||
|
|
97dc3602fc | ||
|
|
6adf222599 | ||
|
|
4bb79c57ac | ||
|
|
74564fe8d0 | ||
|
|
dc2dd5b9d8 | ||
|
|
527ba98105 | ||
|
|
fa4ce0eee8 | ||
|
|
8bd4960d05 | ||
|
|
70047a5c57 | ||
|
|
24e86bb21b | ||
|
|
60f3d87309 | ||
|
|
314e42fd98 | ||
|
|
71b1617c1b | ||
|
|
cfb25c9b3f | ||
|
|
3168644152 | ||
|
|
71dc5b4dee | ||
|
|
09b13acfb2 | ||
|
|
dfdf6eb5b4 | ||
|
|
332957ffec | ||
|
|
b64770805b | ||
|
|
830168d3d4 | ||
|
|
21acf504ce | ||
|
|
4164e29416 | ||
|
|
ba852716fd | ||
|
|
d34126255d | ||
|
|
0f68c6fb5b | ||
|
|
45c4cd01c5 | ||
|
|
e0fc808980 | ||
|
|
32ee5504ed | ||
|
|
c07dc56736 | ||
|
|
98580cad8e | ||
|
|
527f2652af | ||
|
|
3f42e3292a | ||
|
|
10aedc329f | ||
|
|
faf3bf2503 | ||
|
|
ac5a7a26ea | ||
|
|
59032140b5 | ||
|
|
c203c57c18 | ||
|
|
5f7dbf454a | ||
|
|
d04fef6a07 | ||
|
|
ed6ff49431 | ||
|
|
d6bb6e7390 | ||
|
|
10df23efb7 | ||
|
|
0b2279d031 | ||
|
|
ae05d9830f | ||
|
|
717c3494e8 | ||
|
|
1f27bef71b | ||
|
|
d8064c00e8 | ||
|
|
36c29084bb | ||
|
|
904867a139 | ||
|
|
7838075990 | ||
|
|
d6bd71db7f | ||
|
|
dd46229487 | ||
|
|
af0bbf5b13 | ||
|
|
fa1019e8fe | ||
|
|
c375c753d6 | ||
|
|
a6730f88f7 | ||
|
|
4039999be5 | ||
|
|
76d28eaa9e | ||
|
|
0e1d8d5601 | ||
|
|
f465b7b486 | ||
|
|
44018c2f69 | ||
|
|
b2b74c83a6 | ||
|
|
080f7132c0 | ||
|
|
ea0e1feee7 | ||
|
|
549f106879 | ||
|
|
7123ac3f77 | ||
|
|
33c4ce0720 | ||
|
|
771c59290a | ||
|
|
04d8bdf929 | ||
|
|
b16958575f | ||
|
|
d47182d9d1 | ||
|
|
3a9ce3cfa6 | ||
|
|
93dd31fc0f | ||
|
|
dc6adefd87 | ||
|
|
069ed7c6ef | ||
|
|
193548edce | ||
|
|
25b655faeb | ||
|
|
f99f1fc68e | ||
|
|
d8081e85ec | ||
|
|
653b195b1e | ||
|
|
147b4cf3e0 | ||
|
|
512933fa44 | ||
|
|
e9fea353c5 | ||
|
|
7342afaf19 | ||
|
|
86c320ab5a | ||
|
|
acea6a6669 | ||
|
|
35537ad3d1 | ||
|
|
b8e25e8678 | ||
|
|
c55b8ce932 | ||
|
|
4e34ae0587 | ||
|
|
3add2376cd | ||
|
|
494cc3c5b0 | ||
|
|
0f134bf744 | ||
|
|
775902c1f2 | ||
|
|
a329db062e | ||
|
|
acfbe6b3b3 | ||
|
|
8ee3cea7cb | ||
|
|
8a6d9abb41 | ||
|
|
2a1063eff5 | ||
|
|
8c35fefb3b | ||
|
|
7301c7618f | ||
|
|
f234fbe83f | ||
|
|
7073665a10 | ||
|
|
9033fa5eee | ||
|
|
cd4ffd3dd4 | ||
|
|
92d0617bce | ||
|
|
a210999255 | ||
|
|
9fdee65cf5 | ||
|
|
2a45620c85 | ||
|
|
3df7e151f7 | ||
|
|
4e188eeb80 | ||
|
|
cde000d478 | ||
|
|
b6077b02e4 | ||
|
|
e98d1086f5 | ||
|
|
4f3fdf1b5f | ||
|
|
167ee72d4e | ||
|
|
ee65f4f014 | ||
|
|
89f6036e98 | ||
|
|
528318b700 | ||
|
|
40c7977f9b | ||
|
|
b1463df0a1 | ||
|
|
d921f80322 | ||
|
|
26c3ab367e | ||
|
|
5e87678fea | ||
|
|
69622930c7 | ||
|
|
828be63f2c | ||
|
|
e7a760e6b3 | ||
|
|
de387069da | ||
|
|
c0bdcee646 | ||
|
|
bfe2326a24 | ||
|
|
70648e75e6 | ||
|
|
c1470870bb | ||
|
|
87dc421ee8 | ||
|
|
837bd888e4 | ||
|
|
1343aa3d33 | ||
|
|
aa575119e6 | ||
|
|
821dd65fb3 | ||
|
|
6247eafcc5 | ||
|
|
817866c9cf | ||
|
|
b9d1873301 | ||
|
|
aad73667af | ||
|
|
6ada77cf5a | ||
|
|
8b5495ebf8 | ||
|
|
cc6505df14 | ||
|
|
fbf8ae39f8 | ||
|
|
166fdf09f3 | ||
|
|
db1da9f98d | ||
|
|
b5cabb6e9d | ||
|
|
8962bb173e | ||
|
|
232c07bf1f | ||
|
|
3fef37cda8 | ||
|
|
7916cf863b | ||
|
|
5c5ef4cef7 | ||
|
|
4d14eb8b82 | ||
|
|
535ea9928a | ||
|
|
e055967974 | ||
|
|
d8c3a5bee8 | ||
|
|
952a05a7c8 | ||
|
|
61e4bfe305 | ||
|
|
d80b191b1c | ||
|
|
e1dd5ee2de | ||
|
|
ec2da5adef | ||
|
|
b3fc2cd887 | ||
|
|
bb2c4707c4 | ||
|
|
4332e24740 | ||
|
|
a4c51b5a05 | ||
|
|
53dc1d8197 | ||
|
|
2dc8db8aa4 | ||
|
|
038b4fc8af | ||
|
|
89e7e107fc | ||
|
|
c44836c4d7 | ||
|
|
bec4e0a1ce | ||
|
|
4333d82b9d | ||
|
|
fbce30b09f | ||
|
|
953343cced | ||
|
|
123f27a3c5 | ||
|
|
ba87b9993d | ||
|
|
b908ed318d | ||
|
|
4ca82a4df9 | ||
|
|
7af50ede94 | ||
|
|
a9f49a7574 | ||
|
|
4d730a759a | ||
|
|
6e9d814095 | ||
|
|
7bdd2118a2 | ||
|
|
aac93a1fd6 | ||
|
|
615fa11af8 | ||
|
|
4f7e1eeafd | ||
|
|
372ef5e2d8 | ||
|
|
29c2693ea0 | ||
|
|
dc1df22a2b | ||
|
|
32cdc66cf1 | ||
|
|
c4e005efec | ||
|
|
9cd2106303 | ||
|
|
4365fb890f | ||
|
|
ad1ff53034 | ||
|
|
0d07b3a6a1 | ||
|
|
b8a0b3f925 | ||
|
|
e169993b7a | ||
|
|
cf820c69c5 | ||
|
|
c4c7fc4ab3 | ||
|
|
d93db3b486 | ||
|
|
48327cc5c4 | ||
|
|
3eca20c015 | ||
|
|
8ea3f31601 | ||
|
|
91c2b8e11c | ||
|
|
248742df1c | ||
|
|
9e86bea8e9 | ||
|
|
3d854ee516 | ||
|
|
c9c31f71b8 | ||
|
|
c9d814592e | ||
|
|
3bb4b0504e | ||
|
|
e4d724eb3f | ||
|
|
37eff915d6 | ||
|
|
c54d1daaaa | ||
|
|
7965f6045e | ||
|
|
894e1a0700 | ||
|
|
b80e6365d0 | ||
|
|
f6a204d7c9 | ||
|
|
3a6cba9021 | ||
|
|
3f28925a8d | ||
|
|
7cce88c403 | ||
|
|
90c7e84b01 | ||
|
|
a4b4708560 | ||
|
|
94afa0f9cf | ||
|
|
3e3a66e721 | ||
|
|
cbf6f9e695 | ||
|
|
2aad91f3c9 | ||
|
|
485b85ee76 | ||
|
|
2734ce3e4c | ||
|
|
0e54a09bcb | ||
|
|
8e397915c9 | ||
|
|
b7dd1f9542 | ||
|
|
20a2eaaf95 | ||
|
|
a4079e879e | ||
|
|
c419206ce1 | ||
|
|
3fd7073808 | ||
|
|
648c2d1cc2 | ||
|
|
c21e3d6300 | ||
|
|
2ad6c526b8 | ||
|
|
63553b41ed | ||
|
|
abd227594c | ||
|
|
8d0359a6d8 | ||
|
|
5466ae59a7 | ||
|
|
19d13743a6 | ||
|
|
02d063fb9f | ||
|
|
ae927950a8 | ||
|
|
18ca35faaa | ||
|
|
73ba7a8921 | ||
|
|
29b0f14d5a | ||
|
|
af876095e2 | ||
|
|
c06f630bcc | ||
|
|
92d5e64a82 | ||
|
|
4aeebfc571 | ||
|
|
d76b00c211 | ||
|
|
8c60495878 | ||
|
|
1b8b61b928 | ||
|
|
dbe438564e | ||
|
|
8956f3ebe2 | ||
|
|
afc91edcb2 | ||
|
|
554a8f910b | ||
|
|
4b25acf58f | ||
|
|
588b37c032 | ||
|
|
568541aa31 | ||
|
|
c1b99f45cb | ||
|
|
0dd759c44f | ||
|
|
6fbd64db72 | ||
|
|
2706149c65 | ||
|
|
c727a70572 | ||
|
|
e3e053ab99 | ||
|
|
a098c7eee3 | ||
|
|
11288d11d4 | ||
|
|
6efbe3009f | ||
|
|
bcba200790 | ||
|
|
f0f6d9bdf9 | ||
|
|
b15f510154 | ||
|
|
489f4a23bf | ||
|
|
258c695ead | ||
|
|
c1f78dbd0f | ||
|
|
fadb295d4d | ||
|
|
2289e9031e | ||
|
|
fb8ee9f7ff | ||
|
|
366c93a008 | ||
|
|
9992f7d8c0 | ||
|
|
23818dc098 | ||
|
|
95600073bc | ||
|
|
f9accd38e0 | ||
|
|
d8279dc710 | ||
|
|
ff3e845b04 | ||
|
|
40d5bf6c35 | ||
|
|
f42074b6c1 | ||
|
|
5b791cae4a | ||
|
|
e83e6cedbe | ||
|
|
f4ae0075e8 | ||
|
|
de138b8ba6 | ||
|
|
71eb744b1c | ||
|
|
0a299d5959 | ||
|
|
83cf1a6b67 | ||
|
|
781367bdc3 | ||
|
|
9847809a7a | ||
|
|
f6d701624c | ||
|
|
a23a004434 | ||
|
|
3d10c574e7 | ||
|
|
9e48e50428 | ||
|
|
9fa3883630 | ||
|
|
674be9a09a | ||
|
|
b28020a9e4 | ||
|
|
64a57d9dc2 | ||
|
|
1f9e25e76a | ||
|
|
da1c8d77ea | ||
|
|
cac89df97b | ||
|
|
f0d6ead877 | ||
|
|
3f3cd4fbe4 | ||
|
|
306c479d3a | ||
|
|
3f973e1fbf | ||
|
|
7c6f39382b | ||
|
|
59da429cbd | ||
|
|
7be09836fc | ||
|
|
f1f2c4c3f4 | ||
|
|
12690d3ffc | ||
|
|
aa200f8723 | ||
|
|
7a84d7b2da | ||
|
|
41424907b1 | ||
|
|
d2ed0a06bf | ||
|
|
0087dca286 | ||
|
|
f8079d067d | ||
|
|
443be391f2 | ||
|
|
a060908d6c | ||
|
|
3bbf6c601d | ||
|
|
7de10f4c8e | ||
|
|
d0677caf2c | ||
|
|
69ba3cb0d9 | ||
|
|
127c71a22a | ||
|
|
85816898f9 | ||
|
|
2cb5b68ad9 | ||
|
|
11f082e417 | ||
|
|
e53f99faa0 | ||
|
|
eaa1fe67f3 | ||
|
|
c3e0fcfc52 | ||
|
|
f336f8a811 | ||
|
|
dde7921057 | ||
|
|
fd1449de20 | ||
|
|
4ae2dcebf5 | ||
|
|
8acecf3aee | ||
|
|
8835ea3704 | ||
|
|
bf68d4499e | ||
|
|
623c92792a | ||
|
|
3580bed041 | ||
|
|
e91c09b8af | ||
|
|
d5ec3c3444 | ||
|
|
5b283fff22 | ||
|
|
958799221f | ||
|
|
e7fa17740a | ||
|
|
03babe7d81 | ||
|
|
aad14174e4 | ||
|
|
783947a2aa | ||
|
|
7fef16950f | ||
|
|
d36e7f1762 | ||
|
|
9695db0ee4 | ||
|
|
d354f5009c | ||
|
|
0a4fad2d46 | ||
|
|
fade6abfe9 | ||
|
|
aafd15109d | ||
|
|
634518a412 | ||
|
|
0d5ca05ab9 | ||
|
|
d241de86c4 | ||
|
|
5754f0c357 | ||
|
|
f51156705d | ||
|
|
36e850fe89 | ||
|
|
3e0c11a758 | ||
|
|
1bfee1d12e | ||
|
|
d14d4cad4a | ||
|
|
3f3960dbfb | ||
|
|
4eeac70af7 | ||
|
|
fcf5512364 | ||
|
|
bdcc769e6f | ||
|
|
7f1a6a70e3 | ||
|
|
314a095c74 | ||
|
|
c2802bc3ac | ||
|
|
b2cae6cac6 | ||
|
|
21a5bfc67f | ||
|
|
12f58e2cac | ||
|
|
1c531a3713 | ||
|
|
85a1d8965c | ||
|
|
8513028968 | ||
|
|
736fe4aa3e | ||
|
|
39d2fe1ed9 | ||
|
|
aab0dd962d | ||
|
|
7a987417bb | ||
|
|
a17750db91 | ||
|
|
a8a92c6c87 | ||
|
|
993ca51a65 | ||
|
|
602b8c6210 | ||
|
|
8c8825b777 | ||
|
|
2a335b8aa7 | ||
|
|
62d59a516f | ||
|
|
705f04a0c9 | ||
|
|
181743fd97 | ||
|
|
884871c107 | ||
|
|
00aedf9209 | ||
|
|
7bbe7e803a | ||
|
|
1c74b3ab45 | ||
|
|
2c5a1e67f9 | ||
|
|
e16e5997ef | ||
|
|
d516815c9c | ||
|
|
98361af4d5 | ||
|
|
6430acadde | ||
|
|
c21a9668a5 | ||
|
|
f786aa3caa | ||
|
|
2694ef45a3 | ||
|
|
0f828ea441 | ||
|
|
af261e5dd4 | ||
|
|
9edb193def | ||
|
|
1ccbcb967e | ||
|
|
ac9f154bcc | ||
|
|
131a5212ce | ||
|
|
f7145544f9 | ||
|
|
8e1f86a866 | ||
|
|
be88b072e9 | ||
|
|
801ba87c68 | ||
|
|
7fc9033b2e | ||
|
|
e4e35f357b | ||
|
|
3f993280e4 | ||
|
|
0931ed501b | ||
|
|
427a165597 | ||
|
|
0bfd5090be | ||
|
|
2e83844f35 | ||
|
|
06cc9a85f7 | ||
|
|
7c0a17962d | ||
|
|
96df4f10b9 | ||
|
|
77d6ccf12b | ||
|
|
1c90e02243 | ||
|
|
092a2c3516 | ||
|
|
6d3a9b8689 | ||
|
|
000b77a17d | ||
|
|
88620c6b39 | ||
|
|
2698d7c9fd | ||
|
|
fa89d305e3 | ||
|
|
9940ed9c77 | ||
|
|
78fd7f6aa8 | ||
|
|
a7670c31ca | ||
|
|
6e51bae2e0 | ||
|
|
f4b956b47c | ||
|
|
68059d7c23 | ||
|
|
1b05832f9a | ||
|
|
b5b3d18773 | ||
|
|
9f7ae6bb2e | ||
|
|
d06ce7b75c | ||
|
|
b6d16a35b1 | ||
|
|
51add248c8 | ||
|
|
cb0dbffccc | ||
|
|
8d811a4d58 | ||
|
|
0589ff5b12 | ||
|
|
2769a1fa25 | ||
|
|
0047d9f5e0 | ||
|
|
fb124ab6e2 | ||
|
|
e9cdaa2ada | ||
|
|
b81d6ad8a4 | ||
|
|
360eeb9ff1 | ||
|
|
54a4eb60a3 | ||
|
|
efdd99623c | ||
|
|
b02dc4dc0d | ||
|
|
55f2a3643b | ||
|
|
829c6d4f78 | ||
|
|
8dc9ec3491 | ||
|
|
ff24648510 | ||
|
|
be78d79811 | ||
|
|
4b84e45116 | ||
|
|
d7f1bc102b | ||
|
|
5e70263e25 | ||
|
|
f11092ac2a | ||
|
|
f0d66cf817 | ||
|
|
22e7a22d1e | ||
|
|
a2e6d00128 | ||
|
|
d1bba48a83 | ||
|
|
8cf05c1b31 | ||
|
|
9da7bb203d | ||
|
|
78af3b0a00 | ||
|
|
a6f1e1bcc5 | ||
|
|
a290d17386 | ||
|
|
ab94f0d9bf | ||
|
|
5fcee696ea | ||
|
|
cb836dd49c | ||
|
|
771e62e476 | ||
|
|
ef6feedeb2 | ||
|
|
0fa1af296c | ||
|
|
8f4f4daf8b | ||
|
|
af76fbedb8 | ||
|
|
baab894759 | ||
|
|
47d9e2618b | ||
|
|
83b64e7fc1 | ||
|
|
d1a58da52f | ||
|
|
e0ca49ed9c | ||
|
|
3146124ec0 | ||
|
|
9d6f79db74 | ||
|
|
e0a7cc5e0f | ||
|
|
13dc3b61da | ||
|
|
8b66d83aa9 | ||
|
|
f889302d24 | ||
|
|
b2ce8dc7ee | ||
|
|
0ee8d2b66b | ||
|
|
780b00e1cf | ||
|
|
c0233bb9d3 | ||
|
|
94b7177174 | ||
|
|
6525707a7f | ||
|
|
510a01ef46 | ||
|
|
923c8e25fb | ||
|
|
61f429563e | ||
|
|
e7d460d932 | ||
|
|
cbf2b47476 | ||
|
|
58c6001be9 | ||
|
|
cd41f8912b | ||
|
|
a475aa7816 | ||
|
|
9be48e83a9 | ||
|
|
a85ce5f055 | ||
|
|
883701bc40 | ||
|
|
8ac942813c | ||
|
|
e6f44d6d19 | ||
|
|
e05d8fd441 | ||
|
|
be125e2708 | ||
|
|
564d0cde82 | ||
|
|
025da386a0 | ||
|
|
8a9d5a0cea | ||
|
|
8a2af87d3a | ||
|
|
072cfe19e9 | ||
|
|
2337aebe4d | ||
|
|
3d861a459d | ||
|
|
dea90c7b67 | ||
|
|
454fcf39a9 | ||
|
|
4f9bc63edf | ||
|
|
74fee4f312 | ||
|
|
52758f15da | ||
|
|
c5be3f7acb | ||
|
|
7ed2143cd6 | ||
|
|
0777b0d3c7 | ||
|
|
4aabff3728 | ||
|
|
6a7cd01ebf | ||
|
|
2af7e382b1 | ||
|
|
07d66e45b4 | ||
|
|
372d712921 | ||
|
|
d86f1fd2c3 | ||
|
|
f7534b2f4b | ||
|
|
effb3aef42 | ||
|
|
4da00b6032 | ||
|
|
21ed9a260e | ||
|
|
678fd73aef | ||
|
|
1754a3761b | ||
|
|
6c7aad11f3 | ||
|
|
881e8a6e70 | ||
|
|
050ff36bd6 | ||
|
|
38b07493a0 | ||
|
|
2358706453 | ||
|
|
43c53a7820 | ||
|
|
1b69694fe9 | ||
|
|
f6ca9cfcdc | ||
|
|
6e2e0317af | ||
|
|
322c170566 | ||
|
|
5c0559da69 | ||
|
|
af3d25a503 | ||
|
|
5c3eb22ce6 | ||
|
|
3fc505dc0f | ||
|
|
3d59346871 | ||
|
|
cee099f131 | ||
|
|
48c9c31440 | ||
|
|
d59f1ad89a | ||
|
|
0c440877de | ||
|
|
55dc9845cb | ||
|
|
b0b999dd68 | ||
|
|
2bda1a9c9b | ||
|
|
cc04abda49 | ||
|
|
ddca6948b2 | ||
|
|
40e73aafce | ||
|
|
6ec997f195 | ||
|
|
15d4ea180d | ||
|
|
b2afdda4e8 | ||
|
|
349604458b | ||
|
|
fd893baba1 | ||
|
|
18739c8b3a | ||
|
|
79b3f5a546 | ||
|
|
97c21e5667 | ||
|
|
4a45dc4041 | ||
|
|
1fba6db69f | ||
|
|
0ed6a17ed4 | ||
|
|
0db81355bc | ||
|
|
b87c6213ae | ||
|
|
fcc9114b58 | ||
|
|
ceb8c92dfc | ||
|
|
28fd535f9c | ||
|
|
5b5ef57049 | ||
|
|
ec17a5d2b7 | ||
|
|
84d957ba62 | ||
|
|
e18a0460d4 | ||
|
|
b7a409ef57 | ||
|
|
fb3bd0203d | ||
|
|
1d8c7c1fc4 | ||
|
|
b5c53041b8 | ||
|
|
40f7f37009 | ||
|
|
2081f43ac2 | ||
|
|
4766a57352 | ||
|
|
add359379e | ||
|
|
cfbd108826 | ||
|
|
aa5d671579 | ||
|
|
1ab8700d94 | ||
|
|
45fcb60e7a | ||
|
|
7f9c1cbb30 | ||
|
|
4537853e2c | ||
|
|
367e5e6e43 | ||
|
|
fcb7017b7a | ||
|
|
fdcaa955e3 | ||
|
|
c0655475ae | ||
|
|
42f816312d | ||
|
|
77abd9b69b | ||
|
|
a56ef2a942 | ||
|
|
deba039c03 | ||
|
|
aaf726dbfb | ||
|
|
9bd0724d85 | ||
|
|
6b7fa45cc3 | ||
|
|
41e159e88f | ||
|
|
0707ed7677 | ||
|
|
262f8ae5bb | ||
|
|
f481ce3dd8 | ||
|
|
af98587580 | ||
|
|
839a87bac8 | ||
|
|
778a010df8 | ||
|
|
317e2c857e | ||
|
|
92b2f57095 | ||
|
|
2d97897a25 | ||
|
|
0ced78fdfa | ||
|
|
72f6fc6923 | ||
|
|
ef1489cd4d | ||
|
|
1edf321362 | ||
|
|
280ae720d7 | ||
|
|
49e5eecce4 | ||
|
|
82c11be067 | ||
|
|
306d764ff6 | ||
|
|
4bc411332f | ||
|
|
92691ee626 | ||
|
|
4440f87722 | ||
|
|
6086768309 | ||
|
|
b8183148cf | ||
|
|
cea7fc2435 | ||
|
|
df90d03e0b | ||
|
|
d0c3b407b3 | ||
|
|
4405513ca5 | ||
|
|
7a3f885ea8 | ||
|
|
63688004dc | ||
|
|
613feca23b | ||
|
|
994502d41b | ||
|
|
c544f5cc51 | ||
|
|
05741821a5 | ||
|
|
6a61158adf | ||
|
|
c18504f369 | ||
|
|
cd45635f53 | ||
|
|
ae79c510cc | ||
|
|
2d1b3332e4 | ||
|
|
caf6db07ad | ||
|
|
1a34927314 | ||
|
|
09f807af83 | ||
|
|
619093483e | ||
|
|
506d05aede | ||
|
|
b1f33b55fd | ||
|
|
ac6d5d50b7 | ||
|
|
6efb990b60 | ||
|
|
5a5bc135e9 | ||
|
|
b98fbe0afc | ||
|
|
fbac6d21ca | ||
|
|
3345da2ea4 | ||
|
|
258d046218 | ||
|
|
1d5a015ce7 | ||
|
|
8f6405d2fa | ||
|
|
9be74fb57c | ||
|
|
e208128d68 | ||
|
|
dedbdb46c2 | ||
|
|
32984ea2f0 | ||
|
|
bb71272903 | ||
|
|
eda7126b25 | ||
|
|
ae8cd449ae | ||
|
|
c0ffb77fd8 | ||
|
|
db7ecdd274 | ||
|
|
f17f7a6913 | ||
|
|
8ea554bc19 | ||
|
|
188d20e9e5 | ||
|
|
2d44adbb76 | ||
|
|
388d1864a6 | ||
|
|
71cac7a1b2 | ||
|
|
e14bde4946 | ||
|
|
b88b2b74a6 | ||
|
|
d331501ebc | ||
|
|
3bb4046fad | ||
|
|
45fa803943 | ||
|
|
2706394bfe | ||
|
|
8cce1f1126 | ||
|
|
1f5a2c5597 | ||
|
|
cd1cad1b47 | ||
|
|
6e2dec82f1 | ||
|
|
ed66ca3cdf | ||
|
|
43be1be598 | ||
|
|
faf5c4dd58 | ||
|
|
773c17faec | ||
|
|
f63361568c | ||
|
|
39f16ff83d | ||
|
|
fae8062d39 | ||
|
|
2b75d725e6 | ||
|
|
9fab9a1ca6 | ||
|
|
a49cc69a4a | ||
|
|
3a9d90c3a1 | ||
|
|
520cbb2ab1 | ||
|
|
35695e18c7 |
1
.github/FUNDING.yml
vendored
1
.github/FUNDING.yml
vendored
|
|
@ -1 +0,0 @@
|
||||||
ko_fi: oobabooga
|
|
||||||
|
|
@ -46,7 +46,7 @@ body:
|
||||||
id: system-info
|
id: system-info
|
||||||
attributes:
|
attributes:
|
||||||
label: System Info
|
label: System Info
|
||||||
description: "Please share your system info with us: operating system, GPU brand, and GPU model. If you are using a Google Colab notebook, mention that instead."
|
description: "Please share your operating system and GPU type (NVIDIA/AMD/Intel/Apple). If you are using a Google Colab notebook, mention that instead."
|
||||||
render: shell
|
render: shell
|
||||||
placeholder:
|
placeholder:
|
||||||
validations:
|
validations:
|
||||||
|
|
|
||||||
7
.github/dependabot.yml
vendored
7
.github/dependabot.yml
vendored
|
|
@ -5,7 +5,10 @@
|
||||||
|
|
||||||
version: 2
|
version: 2
|
||||||
updates:
|
updates:
|
||||||
- package-ecosystem: "pip" # See documentation for possible values
|
- package-ecosystem: "pip"
|
||||||
directory: "/" # Location of package manifests
|
directories:
|
||||||
|
- "/requirements/full/"
|
||||||
|
- "/requirements/portable/"
|
||||||
|
target-branch: "dev"
|
||||||
schedule:
|
schedule:
|
||||||
interval: "weekly"
|
interval: "weekly"
|
||||||
|
|
|
||||||
70
.github/workflows/build-everything-tgw.yml
vendored
Normal file
70
.github/workflows/build-everything-tgw.yml
vendored
Normal file
|
|
@ -0,0 +1,70 @@
|
||||||
|
name: Build Everything TGW
|
||||||
|
|
||||||
|
on:
|
||||||
|
workflow_dispatch:
|
||||||
|
inputs:
|
||||||
|
version:
|
||||||
|
description: 'Version tag of text-generation-webui to build: v3.0'
|
||||||
|
default: 'v3.0'
|
||||||
|
required: true
|
||||||
|
type: string
|
||||||
|
|
||||||
|
permissions:
|
||||||
|
contents: write
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
build_release_cuda_windows:
|
||||||
|
name: CUDA Windows
|
||||||
|
uses: ./.github/workflows/build-portable-release-cuda.yml
|
||||||
|
with:
|
||||||
|
version: ${{ inputs.version }}
|
||||||
|
config: 'os:windows-2022'
|
||||||
|
|
||||||
|
build_release_cuda_linux:
|
||||||
|
name: CUDA Linux
|
||||||
|
uses: ./.github/workflows/build-portable-release-cuda.yml
|
||||||
|
with:
|
||||||
|
version: ${{ inputs.version }}
|
||||||
|
config: 'os:ubuntu-22.04'
|
||||||
|
|
||||||
|
build_release_vulkan_windows:
|
||||||
|
name: Vulkan Windows
|
||||||
|
uses: ./.github/workflows/build-portable-release-vulkan.yml
|
||||||
|
with:
|
||||||
|
version: ${{ inputs.version }}
|
||||||
|
config: 'os:windows-2022'
|
||||||
|
|
||||||
|
build_release_vulkan_linux:
|
||||||
|
name: Vulkan Linux
|
||||||
|
uses: ./.github/workflows/build-portable-release-vulkan.yml
|
||||||
|
with:
|
||||||
|
version: ${{ inputs.version }}
|
||||||
|
config: 'os:ubuntu-22.04'
|
||||||
|
|
||||||
|
build_release_rocm_linux:
|
||||||
|
name: ROCm Linux
|
||||||
|
uses: ./.github/workflows/build-portable-release-rocm.yml
|
||||||
|
with:
|
||||||
|
version: ${{ inputs.version }}
|
||||||
|
config: 'os:ubuntu-22.04'
|
||||||
|
|
||||||
|
build_release_cpu_windows:
|
||||||
|
name: CPU Windows
|
||||||
|
uses: ./.github/workflows/build-portable-release.yml
|
||||||
|
with:
|
||||||
|
version: ${{ inputs.version }}
|
||||||
|
config: 'os:windows-2022'
|
||||||
|
|
||||||
|
build_release_cpu_linux:
|
||||||
|
name: CPU Linux
|
||||||
|
uses: ./.github/workflows/build-portable-release.yml
|
||||||
|
with:
|
||||||
|
version: ${{ inputs.version }}
|
||||||
|
config: 'os:ubuntu-22.04'
|
||||||
|
|
||||||
|
build_release_macos:
|
||||||
|
name: macOS
|
||||||
|
uses: ./.github/workflows/build-portable-release.yml
|
||||||
|
with:
|
||||||
|
version: ${{ inputs.version }}
|
||||||
|
config: 'os:macos-15-intel,macos-14'
|
||||||
175
.github/workflows/build-portable-release-cuda.yml
vendored
Normal file
175
.github/workflows/build-portable-release-cuda.yml
vendored
Normal file
|
|
@ -0,0 +1,175 @@
|
||||||
|
name: Build CUDA
|
||||||
|
|
||||||
|
on:
|
||||||
|
workflow_dispatch:
|
||||||
|
inputs:
|
||||||
|
version:
|
||||||
|
description: 'Version tag of text-generation-webui to build: v3.0'
|
||||||
|
default: 'v3.0'
|
||||||
|
required: true
|
||||||
|
type: string
|
||||||
|
config:
|
||||||
|
description: 'Override configurations to build: key1:item1-1,item1-2;key2:item2-1,item2-2'
|
||||||
|
default: 'Default'
|
||||||
|
required: false
|
||||||
|
type: string
|
||||||
|
exclude:
|
||||||
|
description: 'Exclude build configurations: key1-1:item1-1,key1-2:item1-2;key2-1:item2-1,key2-2:item2-2'
|
||||||
|
default: 'None'
|
||||||
|
required: false
|
||||||
|
type: string
|
||||||
|
workflow_call:
|
||||||
|
inputs:
|
||||||
|
version:
|
||||||
|
description: 'Version tag of text-generation-webui to build: v3.0'
|
||||||
|
default: 'v3.0'
|
||||||
|
required: true
|
||||||
|
type: string
|
||||||
|
config:
|
||||||
|
description: 'Configurations to build: key1:item1-1,item1-2;key2:item2-1,item2-2'
|
||||||
|
default: 'Default'
|
||||||
|
required: false
|
||||||
|
type: string
|
||||||
|
exclude:
|
||||||
|
description: 'Exclude build configurations: key1-1:item1-1,key1-2:item1-2;key2-1:item2-1,key2-2:item2-2'
|
||||||
|
default: 'None'
|
||||||
|
required: false
|
||||||
|
type: string
|
||||||
|
|
||||||
|
permissions:
|
||||||
|
contents: write
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
define_matrix:
|
||||||
|
name: Define Build Matrix
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
outputs:
|
||||||
|
matrix: ${{ steps.set-matrix.outputs.matrix }}
|
||||||
|
defaults:
|
||||||
|
run:
|
||||||
|
shell: pwsh
|
||||||
|
env:
|
||||||
|
CONFIGIN: ${{ inputs.config }}
|
||||||
|
EXCLUDEIN: ${{ inputs.exclude }}
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- name: Define Job Output
|
||||||
|
id: set-matrix
|
||||||
|
run: |
|
||||||
|
$matrix = @{
|
||||||
|
'os' = @('ubuntu-22.04', 'windows-2022')
|
||||||
|
'pyver' = @("3.13")
|
||||||
|
'cuda' = @("12.4", "13.1")
|
||||||
|
}
|
||||||
|
|
||||||
|
if ($env:CONFIGIN -ne 'Default') {$env:CONFIGIN.split(';').foreach({$matrix[$_.split(':')[0]] = $_.split(':')[1].split(',')})}
|
||||||
|
|
||||||
|
if ($env:EXCLUDEIN -ne 'None') {
|
||||||
|
$exclusions = @()
|
||||||
|
$exclusions += $env:EXCLUDEIN.split(';').replace(':','=').replace(',',"`n") | ConvertFrom-StringData
|
||||||
|
$matrix['exclude'] = $exclusions
|
||||||
|
}
|
||||||
|
|
||||||
|
$matrixOut = ConvertTo-Json $matrix -Compress
|
||||||
|
Write-Output ('matrix=' + $matrixOut) >> $env:GITHUB_OUTPUT
|
||||||
|
|
||||||
|
build_wheels:
|
||||||
|
name: ${{ matrix.os }} ${{ matrix.pyver }} CUDA ${{ matrix.cuda }}
|
||||||
|
needs: define_matrix
|
||||||
|
runs-on: ${{ matrix.os }}
|
||||||
|
strategy:
|
||||||
|
matrix: ${{ fromJSON(needs.define_matrix.outputs.matrix) }}
|
||||||
|
defaults:
|
||||||
|
run:
|
||||||
|
shell: pwsh
|
||||||
|
env:
|
||||||
|
PCKGVER: ${{ inputs.version }}
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v6
|
||||||
|
with:
|
||||||
|
repository: 'oobabooga/text-generation-webui'
|
||||||
|
ref: ${{ inputs.version }}
|
||||||
|
submodules: 'recursive'
|
||||||
|
|
||||||
|
- uses: actions/setup-python@v6
|
||||||
|
with:
|
||||||
|
python-version: ${{ matrix.pyver }}
|
||||||
|
|
||||||
|
- name: Build Package
|
||||||
|
shell: bash
|
||||||
|
run: |
|
||||||
|
VERSION_CLEAN="${{ inputs.version }}"
|
||||||
|
VERSION_CLEAN="${VERSION_CLEAN#v}"
|
||||||
|
cd ..
|
||||||
|
cp -r text-generation-webui "text-generation-webui-${VERSION_CLEAN}"
|
||||||
|
cd "text-generation-webui-${VERSION_CLEAN}"
|
||||||
|
|
||||||
|
# Remove extensions that need additional requirements
|
||||||
|
allowed=("character_bias" "gallery" "openai" "sd_api_pictures")
|
||||||
|
find extensions/ -mindepth 1 -maxdepth 1 -type d | grep -v -E "$(printf '%s|' "${allowed[@]}" | sed 's/|$//')" | xargs rm -rf
|
||||||
|
|
||||||
|
# Define common variables
|
||||||
|
CUDA_VERSION="${{ matrix.cuda }}"
|
||||||
|
VERSION="${{ inputs.version }}"
|
||||||
|
|
||||||
|
# 1. Set platform-specific variables
|
||||||
|
if [[ "$RUNNER_OS" == "Windows" ]]; then
|
||||||
|
PLATFORM="windows"
|
||||||
|
PYTHON_URL="https://github.com/astral-sh/python-build-standalone/releases/download/20260303/cpython-3.13.12+20260303-x86_64-pc-windows-msvc-install_only.tar.gz"
|
||||||
|
PIP_PATH="portable_env/python.exe -m pip"
|
||||||
|
PACKAGES_PATH="portable_env/Lib/site-packages"
|
||||||
|
rm start_linux.sh start_macos.sh
|
||||||
|
else
|
||||||
|
PLATFORM="linux"
|
||||||
|
PYTHON_URL="https://github.com/astral-sh/python-build-standalone/releases/download/20260303/cpython-3.13.12+20260303-x86_64-unknown-linux-gnu-install_only.tar.gz"
|
||||||
|
PIP_PATH="portable_env/bin/python -m pip"
|
||||||
|
PACKAGES_PATH="portable_env/lib/python3.13/site-packages"
|
||||||
|
rm start_macos.sh start_windows.bat
|
||||||
|
fi
|
||||||
|
|
||||||
|
# 2. Download and extract Python
|
||||||
|
cd ..
|
||||||
|
echo "Downloading Python for $PLATFORM..."
|
||||||
|
curl -L -o python-build.tar.gz "$PYTHON_URL"
|
||||||
|
tar -xzf python-build.tar.gz
|
||||||
|
mv python "text-generation-webui-${VERSION_CLEAN}/portable_env"
|
||||||
|
|
||||||
|
# 3. Prepare requirements file based on CUDA version
|
||||||
|
cd "text-generation-webui-${VERSION_CLEAN}"
|
||||||
|
if [[ "$CUDA_VERSION" == "13.1" ]]; then
|
||||||
|
REQ_FILE="requirements/portable/requirements_cuda131.txt"
|
||||||
|
else
|
||||||
|
REQ_FILE="requirements/portable/requirements.txt"
|
||||||
|
fi
|
||||||
|
|
||||||
|
# 4. Install packages
|
||||||
|
echo "Installing Python packages from $REQ_FILE..."
|
||||||
|
$PIP_PATH install --target="./$PACKAGES_PATH" -r "$REQ_FILE"
|
||||||
|
|
||||||
|
# 5. Clean up
|
||||||
|
rm -rf .git cmd* update_wizard* Colab-TextGen-GPU.ipynb docker setup.cfg .github .gitignore requirements/ one_click.py
|
||||||
|
|
||||||
|
# 6. Create archive
|
||||||
|
cd ..
|
||||||
|
if [[ "$RUNNER_OS" == "Windows" ]]; then
|
||||||
|
ARCHIVE_NAME="textgen-portable-${VERSION_CLEAN}-${PLATFORM}-cuda${CUDA_VERSION}.zip"
|
||||||
|
echo "Creating archive: $ARCHIVE_NAME"
|
||||||
|
powershell -Command "Compress-Archive -Path text-generation-webui-${VERSION_CLEAN} -DestinationPath $ARCHIVE_NAME"
|
||||||
|
else
|
||||||
|
ARCHIVE_NAME="textgen-portable-${VERSION_CLEAN}-${PLATFORM}-cuda${CUDA_VERSION}.tar.gz"
|
||||||
|
echo "Creating archive: $ARCHIVE_NAME"
|
||||||
|
tar czf "$ARCHIVE_NAME" "text-generation-webui-${VERSION_CLEAN}"
|
||||||
|
fi
|
||||||
|
|
||||||
|
- name: Upload files to a GitHub release
|
||||||
|
id: upload-release
|
||||||
|
uses: svenstaro/upload-release-action@2.7.0
|
||||||
|
continue-on-error: true
|
||||||
|
with:
|
||||||
|
repo_token: ${{ secrets.GITHUB_TOKEN }}
|
||||||
|
file: ../textgen-portable-*
|
||||||
|
tag: ${{ inputs.version }}
|
||||||
|
file_glob: true
|
||||||
|
make_latest: false
|
||||||
|
overwrite: true
|
||||||
170
.github/workflows/build-portable-release-rocm.yml
vendored
Normal file
170
.github/workflows/build-portable-release-rocm.yml
vendored
Normal file
|
|
@ -0,0 +1,170 @@
|
||||||
|
name: Build ROCm
|
||||||
|
|
||||||
|
on:
|
||||||
|
workflow_dispatch:
|
||||||
|
inputs:
|
||||||
|
version:
|
||||||
|
description: 'Version tag of text-generation-webui to build: v3.0'
|
||||||
|
default: 'v3.0'
|
||||||
|
required: true
|
||||||
|
type: string
|
||||||
|
config:
|
||||||
|
description: 'Override configurations to build: key1:item1-1,item1-2;key2:item2-1,item2-2'
|
||||||
|
default: 'Default'
|
||||||
|
required: false
|
||||||
|
type: string
|
||||||
|
exclude:
|
||||||
|
description: 'Exclude build configurations: key1-1:item1-1,key1-2:item1-2;key2-1:item2-1,key2-2:item2-2'
|
||||||
|
default: 'None'
|
||||||
|
required: false
|
||||||
|
type: string
|
||||||
|
workflow_call:
|
||||||
|
inputs:
|
||||||
|
version:
|
||||||
|
description: 'Version tag of text-generation-webui to build: v3.0'
|
||||||
|
default: 'v3.0'
|
||||||
|
required: true
|
||||||
|
type: string
|
||||||
|
config:
|
||||||
|
description: 'Configurations to build: key1:item1-1,item1-2;key2:item2-1,item2-2'
|
||||||
|
default: 'Default'
|
||||||
|
required: false
|
||||||
|
type: string
|
||||||
|
exclude:
|
||||||
|
description: 'Exclude build configurations: key1-1:item1-1,key1-2:item1-2;key2-1:item2-1,key2-2:item2-2'
|
||||||
|
default: 'None'
|
||||||
|
required: false
|
||||||
|
type: string
|
||||||
|
|
||||||
|
permissions:
|
||||||
|
contents: write
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
define_matrix:
|
||||||
|
name: Define Build Matrix
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
outputs:
|
||||||
|
matrix: ${{ steps.set-matrix.outputs.matrix }}
|
||||||
|
defaults:
|
||||||
|
run:
|
||||||
|
shell: pwsh
|
||||||
|
env:
|
||||||
|
CONFIGIN: ${{ inputs.config }}
|
||||||
|
EXCLUDEIN: ${{ inputs.exclude }}
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- name: Define Job Output
|
||||||
|
id: set-matrix
|
||||||
|
run: |
|
||||||
|
$matrix = @{
|
||||||
|
'os' = @('ubuntu-22.04', 'windows-2022')
|
||||||
|
'pyver' = @("3.13")
|
||||||
|
}
|
||||||
|
|
||||||
|
if ($env:CONFIGIN -ne 'Default') {$env:CONFIGIN.split(';').foreach({$matrix[$_.split(':')[0]] = $_.split(':')[1].split(',')})}
|
||||||
|
|
||||||
|
if ($env:EXCLUDEIN -ne 'None') {
|
||||||
|
$exclusions = @()
|
||||||
|
$exclusions += $env:EXCLUDEIN.split(';').replace(':','=').replace(',',"`n") | ConvertFrom-StringData
|
||||||
|
$matrix['exclude'] = $exclusions
|
||||||
|
}
|
||||||
|
|
||||||
|
$matrixOut = ConvertTo-Json $matrix -Compress
|
||||||
|
Write-Output ('matrix=' + $matrixOut) >> $env:GITHUB_OUTPUT
|
||||||
|
|
||||||
|
build_wheels:
|
||||||
|
name: ${{ matrix.os }} ${{ matrix.pyver }}
|
||||||
|
needs: define_matrix
|
||||||
|
runs-on: ${{ matrix.os }}
|
||||||
|
strategy:
|
||||||
|
matrix: ${{ fromJSON(needs.define_matrix.outputs.matrix) }}
|
||||||
|
defaults:
|
||||||
|
run:
|
||||||
|
shell: pwsh
|
||||||
|
env:
|
||||||
|
PCKGVER: ${{ inputs.version }}
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v6
|
||||||
|
with:
|
||||||
|
repository: 'oobabooga/text-generation-webui'
|
||||||
|
ref: ${{ inputs.version }}
|
||||||
|
submodules: 'recursive'
|
||||||
|
|
||||||
|
- uses: actions/setup-python@v6
|
||||||
|
with:
|
||||||
|
python-version: ${{ matrix.pyver }}
|
||||||
|
|
||||||
|
- name: Build Package
|
||||||
|
shell: bash
|
||||||
|
run: |
|
||||||
|
VERSION_CLEAN="${{ inputs.version }}"
|
||||||
|
VERSION_CLEAN="${VERSION_CLEAN#v}"
|
||||||
|
cd ..
|
||||||
|
cp -r text-generation-webui "text-generation-webui-${VERSION_CLEAN}"
|
||||||
|
cd "text-generation-webui-${VERSION_CLEAN}"
|
||||||
|
|
||||||
|
# Remove extensions that need additional requirements
|
||||||
|
allowed=("character_bias" "gallery" "openai" "sd_api_pictures")
|
||||||
|
find extensions/ -mindepth 1 -maxdepth 1 -type d | grep -v -E "$(printf '%s|' "${allowed[@]}" | sed 's/|$//')" | xargs rm -rf
|
||||||
|
|
||||||
|
# Define common variables
|
||||||
|
VERSION="${{ inputs.version }}"
|
||||||
|
|
||||||
|
# 1. Set platform-specific variables
|
||||||
|
if [[ "$RUNNER_OS" == "Windows" ]]; then
|
||||||
|
PLATFORM="windows"
|
||||||
|
PYTHON_URL="https://github.com/astral-sh/python-build-standalone/releases/download/20260303/cpython-3.13.12+20260303-x86_64-pc-windows-msvc-install_only.tar.gz"
|
||||||
|
PIP_PATH="portable_env/python.exe -m pip"
|
||||||
|
PACKAGES_PATH="portable_env/Lib/site-packages"
|
||||||
|
rm start_linux.sh start_macos.sh
|
||||||
|
else
|
||||||
|
PLATFORM="linux"
|
||||||
|
PYTHON_URL="https://github.com/astral-sh/python-build-standalone/releases/download/20260303/cpython-3.13.12+20260303-x86_64-unknown-linux-gnu-install_only.tar.gz"
|
||||||
|
PIP_PATH="portable_env/bin/python -m pip"
|
||||||
|
PACKAGES_PATH="portable_env/lib/python3.13/site-packages"
|
||||||
|
rm start_macos.sh start_windows.bat
|
||||||
|
fi
|
||||||
|
|
||||||
|
# 2. Download and extract Python
|
||||||
|
cd ..
|
||||||
|
echo "Downloading Python for $PLATFORM..."
|
||||||
|
curl -L -o python-build.tar.gz "$PYTHON_URL"
|
||||||
|
tar -xzf python-build.tar.gz
|
||||||
|
mv python "text-generation-webui-${VERSION_CLEAN}/portable_env"
|
||||||
|
|
||||||
|
# 3. Prepare requirements file
|
||||||
|
REQ_FILE="requirements/portable/requirements_amd.txt"
|
||||||
|
|
||||||
|
cd "text-generation-webui-${VERSION_CLEAN}"
|
||||||
|
|
||||||
|
# 4. Install packages
|
||||||
|
echo "Installing Python packages from $REQ_FILE..."
|
||||||
|
$PIP_PATH install --target="./$PACKAGES_PATH" -r "$REQ_FILE"
|
||||||
|
|
||||||
|
# 5. Clean up
|
||||||
|
rm -rf .git cmd* update_wizard* Colab-TextGen-GPU.ipynb docker setup.cfg .github .gitignore requirements/ one_click.py
|
||||||
|
|
||||||
|
# 6. Create archive
|
||||||
|
cd ..
|
||||||
|
if [[ "$RUNNER_OS" == "Windows" ]]; then
|
||||||
|
ARCHIVE_NAME="textgen-portable-${VERSION_CLEAN}-${PLATFORM}-rocm7.2.zip"
|
||||||
|
echo "Creating archive: $ARCHIVE_NAME"
|
||||||
|
powershell -Command "Compress-Archive -Path text-generation-webui-${VERSION_CLEAN} -DestinationPath $ARCHIVE_NAME"
|
||||||
|
else
|
||||||
|
ARCHIVE_NAME="textgen-portable-${VERSION_CLEAN}-${PLATFORM}-rocm7.2.tar.gz"
|
||||||
|
echo "Creating archive: $ARCHIVE_NAME"
|
||||||
|
tar czf "$ARCHIVE_NAME" "text-generation-webui-${VERSION_CLEAN}"
|
||||||
|
fi
|
||||||
|
|
||||||
|
- name: Upload files to a GitHub release
|
||||||
|
id: upload-release
|
||||||
|
uses: svenstaro/upload-release-action@2.7.0
|
||||||
|
continue-on-error: true
|
||||||
|
with:
|
||||||
|
repo_token: ${{ secrets.GITHUB_TOKEN }}
|
||||||
|
file: ../textgen-portable-*
|
||||||
|
tag: ${{ inputs.version }}
|
||||||
|
file_glob: true
|
||||||
|
make_latest: false
|
||||||
|
overwrite: true
|
||||||
170
.github/workflows/build-portable-release-vulkan.yml
vendored
Normal file
170
.github/workflows/build-portable-release-vulkan.yml
vendored
Normal file
|
|
@ -0,0 +1,170 @@
|
||||||
|
name: Build Vulkan
|
||||||
|
|
||||||
|
on:
|
||||||
|
workflow_dispatch:
|
||||||
|
inputs:
|
||||||
|
version:
|
||||||
|
description: 'Version tag of text-generation-webui to build: v3.0'
|
||||||
|
default: 'v3.0'
|
||||||
|
required: true
|
||||||
|
type: string
|
||||||
|
config:
|
||||||
|
description: 'Override configurations to build: key1:item1-1,item1-2;key2:item2-1,item2-2'
|
||||||
|
default: 'Default'
|
||||||
|
required: false
|
||||||
|
type: string
|
||||||
|
exclude:
|
||||||
|
description: 'Exclude build configurations: key1-1:item1-1,key1-2:item1-2;key2-1:item2-1,key2-2:item2-2'
|
||||||
|
default: 'None'
|
||||||
|
required: false
|
||||||
|
type: string
|
||||||
|
workflow_call:
|
||||||
|
inputs:
|
||||||
|
version:
|
||||||
|
description: 'Version tag of text-generation-webui to build: v3.0'
|
||||||
|
default: 'v3.0'
|
||||||
|
required: true
|
||||||
|
type: string
|
||||||
|
config:
|
||||||
|
description: 'Configurations to build: key1:item1-1,item1-2;key2:item2-1,item2-2'
|
||||||
|
default: 'Default'
|
||||||
|
required: false
|
||||||
|
type: string
|
||||||
|
exclude:
|
||||||
|
description: 'Exclude build configurations: key1-1:item1-1,key1-2:item1-2;key2-1:item2-1,key2-2:item2-2'
|
||||||
|
default: 'None'
|
||||||
|
required: false
|
||||||
|
type: string
|
||||||
|
|
||||||
|
permissions:
|
||||||
|
contents: write
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
define_matrix:
|
||||||
|
name: Define Build Matrix
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
outputs:
|
||||||
|
matrix: ${{ steps.set-matrix.outputs.matrix }}
|
||||||
|
defaults:
|
||||||
|
run:
|
||||||
|
shell: pwsh
|
||||||
|
env:
|
||||||
|
CONFIGIN: ${{ inputs.config }}
|
||||||
|
EXCLUDEIN: ${{ inputs.exclude }}
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- name: Define Job Output
|
||||||
|
id: set-matrix
|
||||||
|
run: |
|
||||||
|
$matrix = @{
|
||||||
|
'os' = @('ubuntu-22.04', 'windows-2022')
|
||||||
|
'pyver' = @("3.13")
|
||||||
|
}
|
||||||
|
|
||||||
|
if ($env:CONFIGIN -ne 'Default') {$env:CONFIGIN.split(';').foreach({$matrix[$_.split(':')[0]] = $_.split(':')[1].split(',')})}
|
||||||
|
|
||||||
|
if ($env:EXCLUDEIN -ne 'None') {
|
||||||
|
$exclusions = @()
|
||||||
|
$exclusions += $env:EXCLUDEIN.split(';').replace(':','=').replace(',',"`n") | ConvertFrom-StringData
|
||||||
|
$matrix['exclude'] = $exclusions
|
||||||
|
}
|
||||||
|
|
||||||
|
$matrixOut = ConvertTo-Json $matrix -Compress
|
||||||
|
Write-Output ('matrix=' + $matrixOut) >> $env:GITHUB_OUTPUT
|
||||||
|
|
||||||
|
build_wheels:
|
||||||
|
name: ${{ matrix.os }} ${{ matrix.pyver }}
|
||||||
|
needs: define_matrix
|
||||||
|
runs-on: ${{ matrix.os }}
|
||||||
|
strategy:
|
||||||
|
matrix: ${{ fromJSON(needs.define_matrix.outputs.matrix) }}
|
||||||
|
defaults:
|
||||||
|
run:
|
||||||
|
shell: pwsh
|
||||||
|
env:
|
||||||
|
PCKGVER: ${{ inputs.version }}
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v6
|
||||||
|
with:
|
||||||
|
repository: 'oobabooga/text-generation-webui'
|
||||||
|
ref: ${{ inputs.version }}
|
||||||
|
submodules: 'recursive'
|
||||||
|
|
||||||
|
- uses: actions/setup-python@v6
|
||||||
|
with:
|
||||||
|
python-version: ${{ matrix.pyver }}
|
||||||
|
|
||||||
|
- name: Build Package
|
||||||
|
shell: bash
|
||||||
|
run: |
|
||||||
|
VERSION_CLEAN="${{ inputs.version }}"
|
||||||
|
VERSION_CLEAN="${VERSION_CLEAN#v}"
|
||||||
|
cd ..
|
||||||
|
cp -r text-generation-webui "text-generation-webui-${VERSION_CLEAN}"
|
||||||
|
cd "text-generation-webui-${VERSION_CLEAN}"
|
||||||
|
|
||||||
|
# Remove extensions that need additional requirements
|
||||||
|
allowed=("character_bias" "gallery" "openai" "sd_api_pictures")
|
||||||
|
find extensions/ -mindepth 1 -maxdepth 1 -type d | grep -v -E "$(printf '%s|' "${allowed[@]}" | sed 's/|$//')" | xargs rm -rf
|
||||||
|
|
||||||
|
# Define common variables
|
||||||
|
VERSION="${{ inputs.version }}"
|
||||||
|
|
||||||
|
# 1. Set platform-specific variables
|
||||||
|
if [[ "$RUNNER_OS" == "Windows" ]]; then
|
||||||
|
PLATFORM="windows"
|
||||||
|
PYTHON_URL="https://github.com/astral-sh/python-build-standalone/releases/download/20260303/cpython-3.13.12+20260303-x86_64-pc-windows-msvc-install_only.tar.gz"
|
||||||
|
PIP_PATH="portable_env/python.exe -m pip"
|
||||||
|
PACKAGES_PATH="portable_env/Lib/site-packages"
|
||||||
|
rm start_linux.sh start_macos.sh
|
||||||
|
else
|
||||||
|
PLATFORM="linux"
|
||||||
|
PYTHON_URL="https://github.com/astral-sh/python-build-standalone/releases/download/20260303/cpython-3.13.12+20260303-x86_64-unknown-linux-gnu-install_only.tar.gz"
|
||||||
|
PIP_PATH="portable_env/bin/python -m pip"
|
||||||
|
PACKAGES_PATH="portable_env/lib/python3.13/site-packages"
|
||||||
|
rm start_macos.sh start_windows.bat
|
||||||
|
fi
|
||||||
|
|
||||||
|
# 2. Download and extract Python
|
||||||
|
cd ..
|
||||||
|
echo "Downloading Python for $PLATFORM..."
|
||||||
|
curl -L -o python-build.tar.gz "$PYTHON_URL"
|
||||||
|
tar -xzf python-build.tar.gz
|
||||||
|
mv python "text-generation-webui-${VERSION_CLEAN}/portable_env"
|
||||||
|
|
||||||
|
# 3. Prepare requirements file
|
||||||
|
REQ_FILE="requirements/portable/requirements_vulkan.txt"
|
||||||
|
|
||||||
|
cd "text-generation-webui-${VERSION_CLEAN}"
|
||||||
|
|
||||||
|
# 4. Install packages
|
||||||
|
echo "Installing Python packages from $REQ_FILE..."
|
||||||
|
$PIP_PATH install --target="./$PACKAGES_PATH" -r "$REQ_FILE"
|
||||||
|
|
||||||
|
# 5. Clean up
|
||||||
|
rm -rf .git cmd* update_wizard* Colab-TextGen-GPU.ipynb docker setup.cfg .github .gitignore requirements/ one_click.py
|
||||||
|
|
||||||
|
# 6. Create archive
|
||||||
|
cd ..
|
||||||
|
if [[ "$RUNNER_OS" == "Windows" ]]; then
|
||||||
|
ARCHIVE_NAME="textgen-portable-${VERSION_CLEAN}-${PLATFORM}-vulkan.zip"
|
||||||
|
echo "Creating archive: $ARCHIVE_NAME"
|
||||||
|
powershell -Command "Compress-Archive -Path text-generation-webui-${VERSION_CLEAN} -DestinationPath $ARCHIVE_NAME"
|
||||||
|
else
|
||||||
|
ARCHIVE_NAME="textgen-portable-${VERSION_CLEAN}-${PLATFORM}-vulkan.tar.gz"
|
||||||
|
echo "Creating archive: $ARCHIVE_NAME"
|
||||||
|
tar czf "$ARCHIVE_NAME" "text-generation-webui-${VERSION_CLEAN}"
|
||||||
|
fi
|
||||||
|
|
||||||
|
- name: Upload files to a GitHub release
|
||||||
|
id: upload-release
|
||||||
|
uses: svenstaro/upload-release-action@2.7.0
|
||||||
|
continue-on-error: true
|
||||||
|
with:
|
||||||
|
repo_token: ${{ secrets.GITHUB_TOKEN }}
|
||||||
|
file: ../textgen-portable-*
|
||||||
|
tag: ${{ inputs.version }}
|
||||||
|
file_glob: true
|
||||||
|
make_latest: false
|
||||||
|
overwrite: true
|
||||||
196
.github/workflows/build-portable-release.yml
vendored
Normal file
196
.github/workflows/build-portable-release.yml
vendored
Normal file
|
|
@ -0,0 +1,196 @@
|
||||||
|
name: Build CPU and macOS
|
||||||
|
|
||||||
|
on:
|
||||||
|
workflow_dispatch:
|
||||||
|
inputs:
|
||||||
|
version:
|
||||||
|
description: 'Version tag of text-generation-webui to build: v3.0'
|
||||||
|
default: 'v3.0'
|
||||||
|
required: true
|
||||||
|
type: string
|
||||||
|
config:
|
||||||
|
description: 'Override configurations to build: key1:item1-1,item1-2;key2:item2-1,item2-2'
|
||||||
|
default: 'Default'
|
||||||
|
required: false
|
||||||
|
type: string
|
||||||
|
exclude:
|
||||||
|
description: 'Exclude build configurations: key1-1:item1-1,key1-2:item1-2;key2-1:item2-1,key2-2:item2-2'
|
||||||
|
default: 'None'
|
||||||
|
required: false
|
||||||
|
type: string
|
||||||
|
workflow_call:
|
||||||
|
inputs:
|
||||||
|
version:
|
||||||
|
description: 'Version tag of text-generation-webui to build: v3.0'
|
||||||
|
default: 'v3.0'
|
||||||
|
required: true
|
||||||
|
type: string
|
||||||
|
config:
|
||||||
|
description: 'Configurations to build: key1:item1-1,item1-2;key2:item2-1,item2-2'
|
||||||
|
default: 'Default'
|
||||||
|
required: false
|
||||||
|
type: string
|
||||||
|
exclude:
|
||||||
|
description: 'Exclude build configurations: key1-1:item1-1,key1-2:item1-2;key2-1:item2-1,key2-2:item2-2'
|
||||||
|
default: 'None'
|
||||||
|
required: false
|
||||||
|
type: string
|
||||||
|
|
||||||
|
permissions:
|
||||||
|
contents: write
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
define_matrix:
|
||||||
|
name: Define Build Matrix
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
outputs:
|
||||||
|
matrix: ${{ steps.set-matrix.outputs.matrix }}
|
||||||
|
defaults:
|
||||||
|
run:
|
||||||
|
shell: pwsh
|
||||||
|
env:
|
||||||
|
CONFIGIN: ${{ inputs.config }}
|
||||||
|
EXCLUDEIN: ${{ inputs.exclude }}
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- name: Define Job Output
|
||||||
|
id: set-matrix
|
||||||
|
run: |
|
||||||
|
$matrix = @{
|
||||||
|
'os' = @('ubuntu-22.04', 'windows-2022', 'macos-14')
|
||||||
|
'pyver' = @("3.13")
|
||||||
|
}
|
||||||
|
|
||||||
|
if ($env:CONFIGIN -ne 'Default') {$env:CONFIGIN.split(';').foreach({$matrix[$_.split(':')[0]] = $_.split(':')[1].split(',')})}
|
||||||
|
|
||||||
|
if ($env:EXCLUDEIN -ne 'None') {
|
||||||
|
$exclusions = @()
|
||||||
|
$exclusions += $env:EXCLUDEIN.split(';').replace(':','=').replace(',',"`n") | ConvertFrom-StringData
|
||||||
|
$matrix['exclude'] = $exclusions
|
||||||
|
}
|
||||||
|
|
||||||
|
$matrixOut = ConvertTo-Json $matrix -Compress
|
||||||
|
Write-Output ('matrix=' + $matrixOut) >> $env:GITHUB_OUTPUT
|
||||||
|
|
||||||
|
build_wheels:
|
||||||
|
name: ${{ matrix.os }} ${{ matrix.pyver }}
|
||||||
|
needs: define_matrix
|
||||||
|
runs-on: ${{ matrix.os }}
|
||||||
|
strategy:
|
||||||
|
matrix: ${{ fromJSON(needs.define_matrix.outputs.matrix) }}
|
||||||
|
defaults:
|
||||||
|
run:
|
||||||
|
shell: pwsh
|
||||||
|
env:
|
||||||
|
PCKGVER: ${{ inputs.version }}
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v6
|
||||||
|
with:
|
||||||
|
repository: 'oobabooga/text-generation-webui'
|
||||||
|
ref: ${{ inputs.version }}
|
||||||
|
submodules: 'recursive'
|
||||||
|
|
||||||
|
- uses: actions/setup-python@v6
|
||||||
|
with:
|
||||||
|
python-version: ${{ matrix.pyver }}
|
||||||
|
|
||||||
|
- name: Build Package
|
||||||
|
shell: bash
|
||||||
|
run: |
|
||||||
|
VERSION_CLEAN="${{ inputs.version }}"
|
||||||
|
VERSION_CLEAN="${VERSION_CLEAN#v}"
|
||||||
|
cd ..
|
||||||
|
cp -r text-generation-webui "text-generation-webui-${VERSION_CLEAN}"
|
||||||
|
cd "text-generation-webui-${VERSION_CLEAN}"
|
||||||
|
|
||||||
|
# Remove extensions that need additional requirements
|
||||||
|
allowed=("character_bias" "gallery" "openai" "sd_api_pictures")
|
||||||
|
find extensions/ -mindepth 1 -maxdepth 1 -type d | grep -v -E "$(printf '%s|' "${allowed[@]}" | sed 's/|$//')" | xargs rm -rf
|
||||||
|
|
||||||
|
# Define common variables
|
||||||
|
VERSION="${{ inputs.version }}"
|
||||||
|
OS_TYPE="${{ matrix.os }}"
|
||||||
|
|
||||||
|
# 1. Set platform-specific variables
|
||||||
|
if [[ "$RUNNER_OS" == "Windows" ]]; then
|
||||||
|
PLATFORM="windows-cpu"
|
||||||
|
PYTHON_URL="https://github.com/astral-sh/python-build-standalone/releases/download/20260303/cpython-3.13.12+20260303-x86_64-pc-windows-msvc-install_only.tar.gz"
|
||||||
|
PIP_PATH="portable_env/python.exe -m pip"
|
||||||
|
PACKAGES_PATH="portable_env/Lib/site-packages"
|
||||||
|
rm start_linux.sh start_macos.sh
|
||||||
|
elif [[ "$RUNNER_OS" == "macOS" ]]; then
|
||||||
|
if [[ "$OS_TYPE" == "macos-15-intel" ]]; then
|
||||||
|
PLATFORM="macos-x86_64"
|
||||||
|
PYTHON_URL="https://github.com/astral-sh/python-build-standalone/releases/download/20260303/cpython-3.13.12+20260303-x86_64-apple-darwin-install_only.tar.gz"
|
||||||
|
REQ_TYPE="apple_intel"
|
||||||
|
else
|
||||||
|
PLATFORM="macos-arm64"
|
||||||
|
PYTHON_URL="https://github.com/astral-sh/python-build-standalone/releases/download/20260303/cpython-3.13.12+20260303-aarch64-apple-darwin-install_only.tar.gz"
|
||||||
|
REQ_TYPE="apple_silicon"
|
||||||
|
fi
|
||||||
|
PIP_PATH="portable_env/bin/python -m pip"
|
||||||
|
PACKAGES_PATH="portable_env/lib/python3.13/site-packages"
|
||||||
|
rm start_linux.sh start_windows.bat
|
||||||
|
else
|
||||||
|
# Linux case
|
||||||
|
PLATFORM="linux-cpu"
|
||||||
|
PYTHON_URL="https://github.com/astral-sh/python-build-standalone/releases/download/20260303/cpython-3.13.12+20260303-x86_64-unknown-linux-gnu-install_only.tar.gz"
|
||||||
|
PIP_PATH="portable_env/bin/python -m pip"
|
||||||
|
PACKAGES_PATH="portable_env/lib/python3.13/site-packages"
|
||||||
|
rm start_macos.sh start_windows.bat
|
||||||
|
fi
|
||||||
|
|
||||||
|
# 2. Download and extract Python
|
||||||
|
echo "Downloading Python for $PLATFORM..."
|
||||||
|
cd ..
|
||||||
|
curl -L -o python-build.tar.gz "$PYTHON_URL"
|
||||||
|
tar -xzf python-build.tar.gz
|
||||||
|
mv python "text-generation-webui-${VERSION_CLEAN}/portable_env"
|
||||||
|
|
||||||
|
# 3. Prepare requirements file based on platform
|
||||||
|
cd "text-generation-webui-${VERSION_CLEAN}"
|
||||||
|
|
||||||
|
# Select requirements file based on platform
|
||||||
|
if [[ "$RUNNER_OS" == "macOS" ]]; then
|
||||||
|
if [[ "$OS_TYPE" == "macos-15-intel" ]]; then
|
||||||
|
REQ_FILE="requirements/portable/requirements_apple_intel.txt"
|
||||||
|
else
|
||||||
|
REQ_FILE="requirements/portable/requirements_apple_silicon.txt"
|
||||||
|
fi
|
||||||
|
else
|
||||||
|
REQ_FILE="requirements/portable/requirements_cpu_only.txt"
|
||||||
|
fi
|
||||||
|
|
||||||
|
echo "Using requirements file: $REQ_FILE"
|
||||||
|
|
||||||
|
# 4. Install packages
|
||||||
|
echo "Installing Python packages from $REQ_FILE..."
|
||||||
|
$PIP_PATH install --target="./$PACKAGES_PATH" -r "$REQ_FILE"
|
||||||
|
|
||||||
|
# 5. Clean up
|
||||||
|
rm -rf .git cmd* update_wizard* Colab-TextGen-GPU.ipynb docker setup.cfg .github .gitignore requirements/ one_click.py
|
||||||
|
|
||||||
|
# 6. Create archive
|
||||||
|
cd ..
|
||||||
|
if [[ "$RUNNER_OS" == "Windows" ]]; then
|
||||||
|
ARCHIVE_NAME="textgen-portable-${VERSION_CLEAN}-${PLATFORM}.zip"
|
||||||
|
echo "Creating archive: $ARCHIVE_NAME"
|
||||||
|
powershell -Command "Compress-Archive -Path text-generation-webui-${VERSION_CLEAN} -DestinationPath $ARCHIVE_NAME"
|
||||||
|
else
|
||||||
|
ARCHIVE_NAME="textgen-portable-${VERSION_CLEAN}-${PLATFORM}.tar.gz"
|
||||||
|
echo "Creating archive: $ARCHIVE_NAME"
|
||||||
|
tar czf "$ARCHIVE_NAME" "text-generation-webui-${VERSION_CLEAN}"
|
||||||
|
fi
|
||||||
|
|
||||||
|
- name: Upload files to a GitHub release
|
||||||
|
id: upload-release
|
||||||
|
uses: svenstaro/upload-release-action@2.7.0
|
||||||
|
continue-on-error: true
|
||||||
|
with:
|
||||||
|
repo_token: ${{ secrets.GITHUB_TOKEN }}
|
||||||
|
file: ../textgen-portable-*
|
||||||
|
tag: ${{ inputs.version }}
|
||||||
|
file_glob: true
|
||||||
|
make_latest: false
|
||||||
|
overwrite: true
|
||||||
22
.github/workflows/stale.yml
vendored
22
.github/workflows/stale.yml
vendored
|
|
@ -1,22 +0,0 @@
|
||||||
name: Close inactive issues
|
|
||||||
on:
|
|
||||||
schedule:
|
|
||||||
- cron: "10 23 * * *"
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
close-issues:
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
permissions:
|
|
||||||
issues: write
|
|
||||||
pull-requests: write
|
|
||||||
steps:
|
|
||||||
- uses: actions/stale@v5
|
|
||||||
with:
|
|
||||||
stale-issue-message: ""
|
|
||||||
close-issue-message: "This issue has been closed due to inactivity for 6 weeks. If you believe it is still relevant, please leave a comment below. You can tag a developer in your comment."
|
|
||||||
days-before-issue-stale: 42
|
|
||||||
days-before-issue-close: 0
|
|
||||||
stale-issue-label: "stale"
|
|
||||||
days-before-pr-stale: -1
|
|
||||||
days-before-pr-close: -1
|
|
||||||
repo-token: ${{ secrets.GITHUB_TOKEN }}
|
|
||||||
61
.gitignore
vendored
61
.gitignore
vendored
|
|
@ -1,38 +1,33 @@
|
||||||
cache
|
/css
|
||||||
characters
|
/extensions
|
||||||
training/datasets
|
/installer_files
|
||||||
extensions/silero_tts/outputs
|
/repositories
|
||||||
extensions/elevenlabs_tts/outputs
|
/user_data
|
||||||
extensions/sd_api_pictures/outputs
|
|
||||||
extensions/multimodal/pipelines
|
.chroma
|
||||||
logs
|
.DS_Store
|
||||||
loras
|
.eslintrc.js
|
||||||
models
|
.idea
|
||||||
presets
|
.installer_state.json
|
||||||
repositories
|
.venv
|
||||||
softprompts
|
venv
|
||||||
torch-dumps
|
.envrc
|
||||||
*pycache*
|
.direnv
|
||||||
*/*pycache*
|
.vs
|
||||||
*/*/pycache*
|
|
||||||
venv/
|
|
||||||
.venv/
|
|
||||||
.vscode
|
.vscode
|
||||||
.idea/
|
|
||||||
*.bak
|
*.bak
|
||||||
*.ipynb
|
*.ipynb
|
||||||
*.log
|
*.log
|
||||||
|
*pycache*
|
||||||
settings.json
|
cert.pem
|
||||||
settings.yaml
|
key.pem
|
||||||
notification.mp3
|
package.json
|
||||||
img_bot*
|
package-lock.json
|
||||||
img_me*
|
|
||||||
prompts/[0-9]*
|
|
||||||
models/config-user.yaml
|
|
||||||
|
|
||||||
.DS_Store
|
|
||||||
Thumbs.db
|
Thumbs.db
|
||||||
.chroma
|
wandb
|
||||||
installer_files
|
|
||||||
/CMD_FLAGS.txt
|
# ignore user docker config and top level links to docker files
|
||||||
|
/docker-compose.yaml
|
||||||
|
/docker-compose.yml
|
||||||
|
/Dockerfile
|
||||||
|
.env
|
||||||
|
|
|
||||||
|
|
@ -1,3 +0,0 @@
|
||||||
# Only used by the one-click installer.
|
|
||||||
# Example:
|
|
||||||
# --listen --api
|
|
||||||
119
Colab-TextGen-GPU.ipynb
Normal file
119
Colab-TextGen-GPU.ipynb
Normal file
|
|
@ -0,0 +1,119 @@
|
||||||
|
{
|
||||||
|
"nbformat": 4,
|
||||||
|
"nbformat_minor": 0,
|
||||||
|
"metadata": {
|
||||||
|
"colab": {
|
||||||
|
"private_outputs": true,
|
||||||
|
"provenance": [],
|
||||||
|
"gpuType": "T4"
|
||||||
|
},
|
||||||
|
"kernelspec": {
|
||||||
|
"name": "python3",
|
||||||
|
"display_name": "Python 3"
|
||||||
|
},
|
||||||
|
"language_info": {
|
||||||
|
"name": "python"
|
||||||
|
},
|
||||||
|
"accelerator": "GPU"
|
||||||
|
},
|
||||||
|
"cells": [
|
||||||
|
{
|
||||||
|
"cell_type": "markdown",
|
||||||
|
"source": [
|
||||||
|
"# oobabooga/text-generation-webui\n",
|
||||||
|
"\n",
|
||||||
|
"After running both cells, a public gradio URL will appear at the bottom in around 10 minutes. You can optionally generate an API link.\n",
|
||||||
|
"\n",
|
||||||
|
"* Project page: https://github.com/oobabooga/text-generation-webui\n",
|
||||||
|
"* Gradio server status: https://status.gradio.app/"
|
||||||
|
],
|
||||||
|
"metadata": {
|
||||||
|
"id": "MFQl6-FjSYtY"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"cell_type": "code",
|
||||||
|
"source": [
|
||||||
|
"#@title 1. Keep this tab alive to prevent Colab from disconnecting you { display-mode: \"form\" }\n",
|
||||||
|
"\n",
|
||||||
|
"#@markdown Press play on the music player that will appear below:\n",
|
||||||
|
"%%html\n",
|
||||||
|
"<audio src=\"https://oobabooga.github.io/silence.m4a\" controls>"
|
||||||
|
],
|
||||||
|
"metadata": {
|
||||||
|
"id": "f7TVVj_z4flw"
|
||||||
|
},
|
||||||
|
"execution_count": null,
|
||||||
|
"outputs": []
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"cell_type": "code",
|
||||||
|
"source": [
|
||||||
|
"#@title 2. Launch the web UI\n",
|
||||||
|
"\n",
|
||||||
|
"#@markdown You can provide a direct GGUF link or a Hugging Face model URL.\n",
|
||||||
|
"\n",
|
||||||
|
"import os\n",
|
||||||
|
"from pathlib import Path\n",
|
||||||
|
"\n",
|
||||||
|
"os.environ.pop('PYTHONPATH', None)\n",
|
||||||
|
"os.environ.pop('MPLBACKEND', None)\n",
|
||||||
|
"\n",
|
||||||
|
"if Path.cwd().name != 'text-generation-webui':\n",
|
||||||
|
" print(\"\\033[1;32;1m\\n --> Installing the web UI. This will take a while, but after the initial setup, you can download and test as many models as you like.\\033[0;37;0m\\n\")\n",
|
||||||
|
"\n",
|
||||||
|
" !git clone https://github.com/oobabooga/text-generation-webui\n",
|
||||||
|
" %cd text-generation-webui\n",
|
||||||
|
"\n",
|
||||||
|
" # Install the project in an isolated environment\n",
|
||||||
|
" !GPU_CHOICE=A \\\n",
|
||||||
|
" LAUNCH_AFTER_INSTALL=FALSE \\\n",
|
||||||
|
" INSTALL_EXTENSIONS=FALSE \\\n",
|
||||||
|
" ./start_linux.sh\n",
|
||||||
|
"\n",
|
||||||
|
"# Parameters\n",
|
||||||
|
"model_url = \"https://huggingface.co/unsloth/Qwen3.5-9B-GGUF/resolve/main/Qwen3.5-9B-Q4_K_M.gguf\" #@param {type:\"string\"}\n",
|
||||||
|
"branch = \"\" #@param {type:\"string\"}\n",
|
||||||
|
"command_line_flags = \"--load-in-4bit --use_double_quant\" #@param {type:\"string\"}\n",
|
||||||
|
"api = False #@param {type:\"boolean\"}\n",
|
||||||
|
"\n",
|
||||||
|
"if api:\n",
|
||||||
|
" for param in ['--api', '--public-api']:\n",
|
||||||
|
" if param not in command_line_flags:\n",
|
||||||
|
" command_line_flags += f\" {param}\"\n",
|
||||||
|
"\n",
|
||||||
|
"model_url = model_url.strip()\n",
|
||||||
|
"model_name = \"\"\n",
|
||||||
|
"if model_url != \"\":\n",
|
||||||
|
" if not model_url.startswith('http'):\n",
|
||||||
|
" model_url = 'https://huggingface.co/' + model_url\n",
|
||||||
|
"\n",
|
||||||
|
" branch = branch.strip()\n",
|
||||||
|
" if '/resolve/' in model_url:\n",
|
||||||
|
" model_name = model_url.split('?')[0].split('/')[-1]\n",
|
||||||
|
" !python download-model.py {model_url}\n",
|
||||||
|
" else:\n",
|
||||||
|
" url_parts = model_url.strip('/').split('/')\n",
|
||||||
|
" model_name = f\"{url_parts[-2]}_{url_parts[-1]}\"\n",
|
||||||
|
" if branch not in ['', 'main']:\n",
|
||||||
|
" model_name += f\"_{branch}\"\n",
|
||||||
|
" !python download-model.py {model_url} --branch {branch}\n",
|
||||||
|
" else:\n",
|
||||||
|
" !python download-model.py {model_url}\n",
|
||||||
|
"\n",
|
||||||
|
"# Start the web UI\n",
|
||||||
|
"cmd = f\"./start_linux.sh {command_line_flags} --share\"\n",
|
||||||
|
"if model_name != \"\":\n",
|
||||||
|
" cmd += f\" --model {model_name}\"\n",
|
||||||
|
"\n",
|
||||||
|
"!$cmd"
|
||||||
|
],
|
||||||
|
"metadata": {
|
||||||
|
"id": "LGQ8BiMuXMDG",
|
||||||
|
"cellView": "form"
|
||||||
|
},
|
||||||
|
"execution_count": null,
|
||||||
|
"outputs": []
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
672
README.md
672
README.md
|
|
@ -1,89 +1,142 @@
|
||||||
**Breaking change: WebUI now uses PyTorch 2.1.**
|
<div align="center" markdown="1">
|
||||||
|
<sup>Special thanks to:</sup>
|
||||||
|
<br>
|
||||||
|
<br>
|
||||||
|
<a href="https://go.warp.dev/text-generation-webui">
|
||||||
|
<img alt="Warp sponsorship" width="400" src="https://raw.githubusercontent.com/warpdotdev/brand-assets/refs/heads/main/Github/Sponsor/Warp-Github-LG-02.png">
|
||||||
|
</a>
|
||||||
|
|
||||||
* For one-click installer users: If you encounter problems after updating, rerun the update script. If issues persist, delete the `installer_files` folder and use the start script to reinstall requirements.
|
### [Warp, built for coding with multiple AI agents](https://go.warp.dev/text-generation-webui)
|
||||||
* For manual installations, update PyTorch with the [provided command](https://github.com/oobabooga/text-generation-webui/#2-install-pytorch).
|
[Available for macOS, Linux, & Windows](https://go.warp.dev/text-generation-webui)<br>
|
||||||
|
</div>
|
||||||
|
<hr>
|
||||||
|
|
||||||
# Text generation web UI
|
# Text Generation Web UI
|
||||||
|
|
||||||
A Gradio web UI for Large Language Models.
|
A Gradio web UI for running Large Language Models locally. 100% private and offline. Supports text generation, vision, tool-calling, training, image generation, and more.
|
||||||
|
|
||||||
Its goal is to become the [AUTOMATIC1111/stable-diffusion-webui](https://github.com/AUTOMATIC1111/stable-diffusion-webui) of text generation.
|
[Try the Deep Reason extension](https://oobabooga.gumroad.com/l/deep_reason)
|
||||||
|
|
||||||
| |  |
|
| |  |
|
||||||
|:---:|:---:|
|
|:---:|:---:|
|
||||||
| |  |
|
| |  |
|
||||||
|
|
||||||
## Features
|
## Features
|
||||||
|
|
||||||
* 3 interface modes: default (two columns), notebook, and chat
|
- **Multiple backends**: [llama.cpp](https://github.com/ggerganov/llama.cpp), [Transformers](https://github.com/huggingface/transformers), [ExLlamaV3](https://github.com/turboderp-org/exllamav3), and [TensorRT-LLM](https://github.com/NVIDIA/TensorRT-LLM). Switch between backends and models without restarting.
|
||||||
* Multiple model backends: [transformers](https://github.com/huggingface/transformers), [llama.cpp](https://github.com/ggerganov/llama.cpp), [ExLlama](https://github.com/turboderp/exllama), [ExLlamaV2](https://github.com/turboderp/exllamav2), [AutoGPTQ](https://github.com/PanQiWei/AutoGPTQ), [GPTQ-for-LLaMa](https://github.com/qwopqwop200/GPTQ-for-LLaMa), [CTransformers](https://github.com/marella/ctransformers), [AutoAWQ](https://github.com/casper-hansen/AutoAWQ)
|
- **File attachments**: Upload text files, PDF documents, and .docx documents to talk about their contents.
|
||||||
* Dropdown menu for quickly switching between different models
|
- **Vision (multimodal)**: Attach images to messages for visual understanding ([tutorial](https://github.com/oobabooga/text-generation-webui/wiki/Multimodal-Tutorial)).
|
||||||
* LoRA: load and unload LoRAs on the fly, train a new LoRA using QLoRA
|
- **Tool-calling**: Models can call custom functions during chat — web search, page fetching, math, and more. Each tool is a single `.py` file, easy to create and extend ([tutorial](https://github.com/oobabooga/text-generation-webui/wiki/Tool-Calling-Tutorial)).
|
||||||
* Precise instruction templates for chat mode, including Llama-2-chat, Alpaca, Vicuna, WizardLM, StableLM, and many others
|
- **OpenAI-compatible API**: Chat and Completions endpoints with tool-calling support. Use as a local drop-in replacement for the OpenAI API ([examples](https://github.com/oobabooga/text-generation-webui/wiki/12-%E2%80%90-OpenAI-API#examples)).
|
||||||
* 4-bit, 8-bit, and CPU inference through the transformers library
|
- **Training**: Fine-tune LoRAs on multi-turn chat or raw text datasets. Supports resuming interrupted runs ([tutorial](https://github.com/oobabooga/text-generation-webui/wiki/05-%E2%80%90-Training-Tab)).
|
||||||
* Use llama.cpp models with transformers samplers (`llamacpp_HF` loader)
|
- **Image generation**: A dedicated tab for `diffusers` models like **Z-Image-Turbo**. Features 4-bit/8-bit quantization and a persistent gallery with metadata ([tutorial](https://github.com/oobabooga/text-generation-webui/wiki/Image-Generation-Tutorial)).
|
||||||
* [Multimodal pipelines, including LLaVA and MiniGPT-4](https://github.com/oobabooga/text-generation-webui/tree/main/extensions/multimodal)
|
- **Easy setup**: [Portable builds](https://github.com/oobabooga/text-generation-webui/releases) (zero setup, just unzip and run) for GGUF models on Windows/Linux/macOS, or a one-click installer for the full feature set.
|
||||||
* [Extensions framework](docs/Extensions.md)
|
- 100% offline and private, with zero telemetry, external resources, or remote update requests.
|
||||||
* [Custom chat characters](docs/Chat-mode.md)
|
- `instruct` mode for instruction-following (like ChatGPT), and `chat-instruct`/`chat` modes for talking to custom characters. Prompts are automatically formatted with Jinja2 templates.
|
||||||
* Very efficient text streaming
|
- Edit messages, navigate between message versions, and branch conversations at any point.
|
||||||
* Markdown output with LaTeX rendering, to use for instance with [GALACTICA](https://github.com/paperswithcode/galai)
|
- Free-form text generation in the Notebook tab without being limited to chat turns.
|
||||||
* API, including endpoints for websocket streaming ([see the examples](https://github.com/oobabooga/text-generation-webui/blob/main/api-examples))
|
- Multiple sampling parameters and generation options for sophisticated text generation control.
|
||||||
|
- Aesthetic UI with dark and light themes.
|
||||||
|
- Syntax highlighting for code blocks and LaTeX rendering for mathematical expressions.
|
||||||
|
- Extension support, with numerous built-in and user-contributed extensions available. See the [wiki](https://github.com/oobabooga/text-generation-webui/wiki/07-%E2%80%90-Extensions) and [extensions directory](https://github.com/oobabooga/text-generation-webui-extensions) for details.
|
||||||
|
|
||||||
To learn how to use the various features, check out the Documentation: https://github.com/oobabooga/text-generation-webui/tree/main/docs
|
## How to install
|
||||||
|
|
||||||
## Installation
|
#### ✅ Option 1: Portable builds (get started in 1 minute)
|
||||||
|
|
||||||
### One-click installers
|
No installation needed – just download, unzip and run. All dependencies included.
|
||||||
|
|
||||||
1) Clone or download the repository.
|
Download from here: **https://github.com/oobabooga/text-generation-webui/releases**
|
||||||
2) Run the `start_linux.sh`, `start_windows.bat`, `start_macos.sh`, or `start_wsl.bat` script depending on your OS.
|
|
||||||
3) Select your GPU vendor when asked.
|
|
||||||
4) Have fun!
|
|
||||||
|
|
||||||
#### How it works
|
- Builds are provided for Linux, Windows, and macOS, with options for CUDA, Vulkan, ROCm, and CPU-only.
|
||||||
|
- Compatible with GGUF (llama.cpp) models.
|
||||||
|
|
||||||
The script creates a folder called `installer_files` where it sets up a Conda environment using Miniconda. The installation is self-contained: if you want to reinstall, just delete `installer_files` and run the start script again.
|
#### Option 2: Manual portable install with venv
|
||||||
|
|
||||||
To launch the webui in the future after it is already installed, run the same `start` script.
|
Very fast setup that should work on any Python 3.9+:
|
||||||
|
|
||||||
#### Getting updates
|
```bash
|
||||||
|
# Clone repository
|
||||||
|
git clone https://github.com/oobabooga/text-generation-webui
|
||||||
|
cd text-generation-webui
|
||||||
|
|
||||||
Run `update_linux.sh`, `update_windows.bat`, `update_macos.sh`, or `update_wsl.bat`.
|
# Create virtual environment
|
||||||
|
python -m venv venv
|
||||||
|
|
||||||
#### Running commands
|
# Activate virtual environment
|
||||||
|
# On Windows:
|
||||||
|
venv\Scripts\activate
|
||||||
|
# On macOS/Linux:
|
||||||
|
source venv/bin/activate
|
||||||
|
|
||||||
If you ever need to install something manually in the `installer_files` environment, you can launch an interactive shell using the cmd script: `cmd_linux.sh`, `cmd_windows.bat`, `cmd_macos.sh`, or `cmd_wsl.bat`.
|
# Install dependencies (choose appropriate file under requirements/portable for your hardware)
|
||||||
|
pip install -r requirements/portable/requirements.txt --upgrade
|
||||||
|
|
||||||
#### Defining command-line flags
|
# Launch server (basic command)
|
||||||
|
python server.py --portable --api --auto-launch
|
||||||
|
|
||||||
To define persistent command-line flags like `--listen` or `--api`, edit the `CMD_FLAGS.txt` file with a text editor and add them there. Flags can also be provided directly to the start scripts, for instance, `./start-linux.sh --listen`.
|
# When done working, deactivate
|
||||||
|
deactivate
|
||||||
|
```
|
||||||
|
|
||||||
#### Other info
|
#### Option 3: One-click installer
|
||||||
|
|
||||||
* There is no need to run any of those scripts as admin/root.
|
For users who need additional backends (ExLlamaV3, Transformers), training, image generation, or extensions (TTS, voice input, translation, etc). Requires ~10GB disk space and downloads PyTorch.
|
||||||
* For additional instructions about AMD setup, WSL setup, and nvcc installation, consult [this page](https://github.com/oobabooga/text-generation-webui/blob/main/docs/One-Click-Installers.md).
|
|
||||||
* The installer has been tested mostly on NVIDIA GPUs. If you can find a way to improve it for your AMD/Intel Arc/Mac Metal GPU, you are highly encouraged to submit a PR to this repository. The main file to be edited is `one_click.py`.
|
|
||||||
* For automated installation, you can use the `GPU_CHOICE`, `LAUNCH_AFTER_INSTALL`, and `INSTALL_EXTENSIONS` environment variables. For instance: `GPU_CHOICE=A LAUNCH_AFTER_INSTALL=False INSTALL_EXTENSIONS=False ./start_linux.sh`.
|
|
||||||
|
|
||||||
### Manual installation using Conda
|
1. Clone the repository, or [download its source code](https://github.com/oobabooga/text-generation-webui/archive/refs/heads/main.zip) and extract it.
|
||||||
|
2. Run the startup script for your OS: `start_windows.bat`, `start_linux.sh`, or `start_macos.sh`.
|
||||||
|
3. When prompted, select your GPU vendor.
|
||||||
|
4. After installation, open `http://127.0.0.1:7860` in your browser.
|
||||||
|
|
||||||
Recommended if you have some experience with the command-line.
|
To restart the web UI later, run the same `start_` script.
|
||||||
|
|
||||||
|
You can pass command-line flags directly (e.g., `./start_linux.sh --help`), or add them to `user_data/CMD_FLAGS.txt` (e.g., `--api` to enable the API).
|
||||||
|
|
||||||
|
To update, run the update script for your OS: `update_wizard_windows.bat`, `update_wizard_linux.sh`, or `update_wizard_macos.sh`.
|
||||||
|
|
||||||
|
To reinstall with a fresh Python environment, delete the `installer_files` folder and run the `start_` script again.
|
||||||
|
|
||||||
|
<details>
|
||||||
|
<summary>
|
||||||
|
One-click installer details
|
||||||
|
</summary>
|
||||||
|
|
||||||
|
### One-click-installer
|
||||||
|
|
||||||
|
The script uses Miniforge to set up a Conda environment in the `installer_files` folder.
|
||||||
|
|
||||||
|
If you ever need to install something manually in the `installer_files` environment, you can launch an interactive shell using the cmd script: `cmd_linux.sh`, `cmd_windows.bat`, or `cmd_macos.sh`.
|
||||||
|
|
||||||
|
* There is no need to run any of those scripts (`start_`, `update_wizard_`, or `cmd_`) as admin/root.
|
||||||
|
* To install requirements for extensions, it is recommended to use the update wizard script with the "Install/update extensions requirements" option. At the end, this script will install the main requirements for the project to make sure that they take precedence in case of version conflicts.
|
||||||
|
* For automated installation, you can use the `GPU_CHOICE`, `LAUNCH_AFTER_INSTALL`, and `INSTALL_EXTENSIONS` environment variables. For instance: `GPU_CHOICE=A LAUNCH_AFTER_INSTALL=FALSE INSTALL_EXTENSIONS=TRUE ./start_linux.sh`.
|
||||||
|
|
||||||
|
</details>
|
||||||
|
|
||||||
|
<details>
|
||||||
|
<summary>
|
||||||
|
Manual full installation with conda or docker
|
||||||
|
</summary>
|
||||||
|
|
||||||
|
### Full installation with Conda
|
||||||
|
|
||||||
#### 0. Install Conda
|
#### 0. Install Conda
|
||||||
|
|
||||||
https://docs.conda.io/en/latest/miniconda.html
|
https://github.com/conda-forge/miniforge
|
||||||
|
|
||||||
On Linux or WSL, it can be automatically installed with these two commands ([source](https://educe-ubc.github.io/conda.html)):
|
On Linux or WSL, Miniforge can be automatically installed with these two commands:
|
||||||
|
|
||||||
```
|
```
|
||||||
curl -sL "https://repo.anaconda.com/miniconda/Miniconda3-latest-Linux-x86_64.sh" > "Miniconda3.sh"
|
curl -sL "https://github.com/conda-forge/miniforge/releases/latest/download/Miniforge3-Linux-x86_64.sh" > "Miniforge3.sh"
|
||||||
bash Miniconda3.sh
|
bash Miniforge3.sh
|
||||||
```
|
```
|
||||||
|
|
||||||
|
For other platforms, download from: https://github.com/conda-forge/miniforge/releases/latest
|
||||||
|
|
||||||
#### 1. Create a new conda environment
|
#### 1. Create a new conda environment
|
||||||
|
|
||||||
```
|
```
|
||||||
conda create -n textgen python=3.10
|
conda create -n textgen python=3.13
|
||||||
conda activate textgen
|
conda activate textgen
|
||||||
```
|
```
|
||||||
|
|
||||||
|
|
@ -91,330 +144,323 @@ conda activate textgen
|
||||||
|
|
||||||
| System | GPU | Command |
|
| System | GPU | Command |
|
||||||
|--------|---------|---------|
|
|--------|---------|---------|
|
||||||
| Linux/WSL | NVIDIA | `pip3 install torch torchvision torchaudio --index-url https://download.pytorch.org/whl/cu118` |
|
| Linux/WSL | NVIDIA | `pip3 install torch==2.9.1 --index-url https://download.pytorch.org/whl/cu128` |
|
||||||
| Linux/WSL | CPU only | `pip3 install torch torchvision torchaudio --index-url https://download.pytorch.org/whl/cpu` |
|
| Linux/WSL | CPU only | `pip3 install torch==2.9.1 --index-url https://download.pytorch.org/whl/cpu` |
|
||||||
| Linux | AMD | `pip3 install torch torchvision torchaudio --index-url https://download.pytorch.org/whl/rocm5.6` |
|
| Linux | AMD | `pip3 install https://repo.radeon.com/rocm/manylinux/rocm-rel-7.2/torch-2.9.1%2Brocm7.2.0.lw.git7e1940d4-cp313-cp313-linux_x86_64.whl` |
|
||||||
| MacOS + MPS | Any | `pip3 install torch torchvision torchaudio` |
|
| MacOS + MPS | Any | `pip3 install torch==2.9.1` |
|
||||||
| Windows | NVIDIA | `pip3 install torch torchvision torchaudio --index-url https://download.pytorch.org/whl/cu118` |
|
| Windows | NVIDIA | `pip3 install torch==2.9.1 --index-url https://download.pytorch.org/whl/cu128` |
|
||||||
| Windows | CPU only | `pip3 install torch torchvision torchaudio` |
|
| Windows | CPU only | `pip3 install torch==2.9.1` |
|
||||||
|
|
||||||
The up-to-date commands can be found here: https://pytorch.org/get-started/locally/.
|
The up-to-date commands can be found here: https://pytorch.org/get-started/locally/.
|
||||||
|
|
||||||
|
If you need `nvcc` to compile some library manually, you will additionally need to install this:
|
||||||
|
|
||||||
|
```
|
||||||
|
conda install -y -c "nvidia/label/cuda-12.8.1" cuda
|
||||||
|
```
|
||||||
|
|
||||||
#### 3. Install the web UI
|
#### 3. Install the web UI
|
||||||
|
|
||||||
```
|
```
|
||||||
git clone https://github.com/oobabooga/text-generation-webui
|
git clone https://github.com/oobabooga/text-generation-webui
|
||||||
cd text-generation-webui
|
cd text-generation-webui
|
||||||
pip install -r requirements.txt
|
pip install -r requirements/full/<requirements file according to table below>
|
||||||
```
|
```
|
||||||
|
|
||||||
#### AMD, Metal, Intel Arc, and CPUs without AVX2
|
Requirements file to use:
|
||||||
|
|
||||||
1) Replace the last command above with
|
| GPU | requirements file to use |
|
||||||
|
|--------|---------|
|
||||||
```
|
| NVIDIA | `requirements.txt` |
|
||||||
pip install -r requirements_nowheels.txt
|
| AMD | `requirements_amd.txt` |
|
||||||
```
|
| CPU only | `requirements_cpu_only.txt` |
|
||||||
|
| Apple Intel | `requirements_apple_intel.txt` |
|
||||||
2) Manually install llama-cpp-python using the appropriate command for your hardware: [Installation from PyPI](https://github.com/abetlen/llama-cpp-python#installation-from-pypi).
|
| Apple Silicon | `requirements_apple_silicon.txt` |
|
||||||
|
|
||||||
3) Do the same for CTransformers: [Installation](https://github.com/marella/ctransformers#installation).
|
### Start the web UI
|
||||||
|
|
||||||
4) AMD: Manually install AutoGPTQ: [Installation](https://github.com/PanQiWei/AutoGPTQ#installation).
|
|
||||||
|
|
||||||
5) AMD: Manually install [ExLlama](https://github.com/turboderp/exllama) by simply cloning it into the `repositories` folder (it will be automatically compiled at runtime after that):
|
|
||||||
|
|
||||||
```
|
```
|
||||||
|
conda activate textgen
|
||||||
cd text-generation-webui
|
cd text-generation-webui
|
||||||
git clone https://github.com/turboderp/exllama repositories/exllama
|
python server.py
|
||||||
```
|
```
|
||||||
|
|
||||||
#### bitsandbytes on older NVIDIA GPUs
|
Then browse to
|
||||||
|
|
||||||
bitsandbytes >= 0.39 may not work. In that case, to use `--load-in-8bit`, you may have to downgrade like this:
|
`http://127.0.0.1:7860`
|
||||||
|
|
||||||
* Linux: `pip install bitsandbytes==0.38.1`
|
#### Manual install
|
||||||
* Windows: `pip install https://github.com/jllllll/bitsandbytes-windows-webui/raw/main/bitsandbytes-0.38.1-py3-none-any.whl`
|
|
||||||
|
The `requirements*.txt` above contain various wheels precompiled through GitHub Actions. If you wish to compile things manually, or if you need to because no suitable wheels are available for your hardware, you can use `requirements_nowheels.txt` and then install your desired loaders manually.
|
||||||
|
|
||||||
### Alternative: Docker
|
### Alternative: Docker
|
||||||
|
|
||||||
```
|
```
|
||||||
ln -s docker/{Dockerfile,docker-compose.yml,.dockerignore} .
|
For NVIDIA GPU:
|
||||||
|
ln -s docker/{nvidia/Dockerfile,nvidia/docker-compose.yml,.dockerignore} .
|
||||||
|
For AMD GPU:
|
||||||
|
ln -s docker/{amd/Dockerfile,amd/docker-compose.yml,.dockerignore} .
|
||||||
|
For Intel GPU:
|
||||||
|
ln -s docker/{intel/Dockerfile,intel/docker-compose.yml,.dockerignore} .
|
||||||
|
For CPU only
|
||||||
|
ln -s docker/{cpu/Dockerfile,cpu/docker-compose.yml,.dockerignore} .
|
||||||
cp docker/.env.example .env
|
cp docker/.env.example .env
|
||||||
# Edit .env and set TORCH_CUDA_ARCH_LIST based on your GPU model
|
#Create logs/cache dir :
|
||||||
|
mkdir -p user_data/logs user_data/cache
|
||||||
|
# Edit .env and set:
|
||||||
|
# TORCH_CUDA_ARCH_LIST based on your GPU model
|
||||||
|
# APP_RUNTIME_GID your host user's group id (run `id -g` in a terminal)
|
||||||
|
# BUILD_EXTENIONS optionally add comma separated list of extensions to build
|
||||||
|
# Edit user_data/CMD_FLAGS.txt and add in it the options you want to execute (like --listen --cpu)
|
||||||
|
#
|
||||||
docker compose up --build
|
docker compose up --build
|
||||||
```
|
```
|
||||||
|
|
||||||
* You need to have docker compose v2.17 or higher installed. See [this guide](https://github.com/oobabooga/text-generation-webui/blob/main/docs/Docker.md) for instructions.
|
* You need to have Docker Compose v2.17 or higher installed. See [this guide](https://github.com/oobabooga/text-generation-webui/wiki/09-%E2%80%90-Docker) for instructions.
|
||||||
* For additional docker files, check out [this repository](https://github.com/Atinoda/text-generation-webui-docker).
|
* For additional docker files, check out [this repository](https://github.com/Atinoda/text-generation-webui-docker).
|
||||||
|
|
||||||
### Updating the requirements
|
### Updating the requirements
|
||||||
|
|
||||||
From time to time, the `requirements.txt` changes. To update, use these commands:
|
From time to time, the `requirements*.txt` change. To update, use these commands:
|
||||||
|
|
||||||
```
|
```
|
||||||
conda activate textgen
|
conda activate textgen
|
||||||
cd text-generation-webui
|
cd text-generation-webui
|
||||||
pip install -r requirements.txt --upgrade
|
pip install -r <requirements file that you have used> --upgrade
|
||||||
```
|
```
|
||||||
|
</details>
|
||||||
## Downloading models
|
|
||||||
|
|
||||||
Models should be placed in the `text-generation-webui/models` folder. They are usually downloaded from [Hugging Face](https://huggingface.co/models?pipeline_tag=text-generation&sort=downloads).
|
|
||||||
|
|
||||||
* Transformers or GPTQ models are made of several files and must be placed in a subfolder. Example:
|
|
||||||
|
|
||||||
```
|
|
||||||
text-generation-webui
|
|
||||||
├── models
|
|
||||||
│ ├── lmsys_vicuna-33b-v1.3
|
|
||||||
│ │ ├── config.json
|
|
||||||
│ │ ├── generation_config.json
|
|
||||||
│ │ ├── pytorch_model-00001-of-00007.bin
|
|
||||||
│ │ ├── pytorch_model-00002-of-00007.bin
|
|
||||||
│ │ ├── pytorch_model-00003-of-00007.bin
|
|
||||||
│ │ ├── pytorch_model-00004-of-00007.bin
|
|
||||||
│ │ ├── pytorch_model-00005-of-00007.bin
|
|
||||||
│ │ ├── pytorch_model-00006-of-00007.bin
|
|
||||||
│ │ ├── pytorch_model-00007-of-00007.bin
|
|
||||||
│ │ ├── pytorch_model.bin.index.json
|
|
||||||
│ │ ├── special_tokens_map.json
|
|
||||||
│ │ ├── tokenizer_config.json
|
|
||||||
│ │ └── tokenizer.model
|
|
||||||
```
|
|
||||||
|
|
||||||
* GGUF models are a single file and should be placed directly into `models`. Example:
|
|
||||||
|
|
||||||
```
|
|
||||||
text-generation-webui
|
|
||||||
├── models
|
|
||||||
│ ├── llama-2-13b-chat.Q4_K_M.gguf
|
|
||||||
```
|
|
||||||
|
|
||||||
In both cases, you can use the "Model" tab of the UI to download the model from Hugging Face automatically. It is also possible to download via the command-line with `python download-model.py organization/model` (use `--help` to see all the options).
|
|
||||||
|
|
||||||
#### GPT-4chan
|
|
||||||
|
|
||||||
<details>
|
<details>
|
||||||
<summary>
|
<summary>
|
||||||
Instructions
|
List of command-line flags
|
||||||
</summary>
|
</summary>
|
||||||
|
|
||||||
[GPT-4chan](https://huggingface.co/ykilcher/gpt-4chan) has been shut down from Hugging Face, so you need to download it elsewhere. You have two options:
|
```txt
|
||||||
|
usage: server.py [-h] [--user-data-dir USER_DATA_DIR] [--multi-user] [--model MODEL] [--lora LORA [LORA ...]] [--model-dir MODEL_DIR] [--lora-dir LORA_DIR] [--model-menu] [--settings SETTINGS]
|
||||||
|
[--extensions EXTENSIONS [EXTENSIONS ...]] [--verbose] [--idle-timeout IDLE_TIMEOUT] [--image-model IMAGE_MODEL] [--image-model-dir IMAGE_MODEL_DIR] [--image-dtype {bfloat16,float16}]
|
||||||
|
[--image-attn-backend {flash_attention_2,sdpa}] [--image-cpu-offload] [--image-compile] [--image-quant {none,bnb-8bit,bnb-4bit,torchao-int8wo,torchao-fp4,torchao-float8wo}]
|
||||||
|
[--loader LOADER] [--ctx-size N] [--cache-type N] [--model-draft MODEL_DRAFT] [--draft-max DRAFT_MAX] [--gpu-layers-draft GPU_LAYERS_DRAFT] [--device-draft DEVICE_DRAFT]
|
||||||
|
[--ctx-size-draft CTX_SIZE_DRAFT] [--spec-type {none,ngram-mod,ngram-simple,ngram-map-k,ngram-map-k4v,ngram-cache}] [--spec-ngram-size-n SPEC_NGRAM_SIZE_N]
|
||||||
|
[--spec-ngram-size-m SPEC_NGRAM_SIZE_M] [--spec-ngram-min-hits SPEC_NGRAM_MIN_HITS] [--gpu-layers N] [--cpu-moe] [--mmproj MMPROJ] [--streaming-llm] [--tensor-split TENSOR_SPLIT]
|
||||||
|
[--row-split] [--no-mmap] [--mlock] [--no-kv-offload] [--batch-size BATCH_SIZE] [--ubatch-size UBATCH_SIZE] [--threads THREADS] [--threads-batch THREADS_BATCH] [--numa]
|
||||||
|
[--parallel PARALLEL] [--fit-target FIT_TARGET] [--extra-flags EXTRA_FLAGS] [--cpu] [--cpu-memory CPU_MEMORY] [--disk] [--disk-cache-dir DISK_CACHE_DIR] [--load-in-8bit] [--bf16]
|
||||||
|
[--no-cache] [--trust-remote-code] [--force-safetensors] [--no_use_fast] [--attn-implementation IMPLEMENTATION] [--load-in-4bit] [--use_double_quant] [--compute_dtype COMPUTE_DTYPE]
|
||||||
|
[--quant_type QUANT_TYPE] [--gpu-split GPU_SPLIT] [--enable-tp] [--tp-backend TP_BACKEND] [--cfg-cache] [--listen] [--listen-port LISTEN_PORT] [--listen-host LISTEN_HOST] [--share]
|
||||||
|
[--auto-launch] [--gradio-auth GRADIO_AUTH] [--gradio-auth-path GRADIO_AUTH_PATH] [--ssl-keyfile SSL_KEYFILE] [--ssl-certfile SSL_CERTFILE] [--subpath SUBPATH] [--old-colors]
|
||||||
|
[--portable] [--api] [--public-api] [--public-api-id PUBLIC_API_ID] [--api-port API_PORT] [--api-key API_KEY] [--admin-key ADMIN_KEY] [--api-enable-ipv6] [--api-disable-ipv4]
|
||||||
|
[--nowebui] [--temperature N] [--dynatemp-low N] [--dynatemp-high N] [--dynatemp-exponent N] [--smoothing-factor N] [--smoothing-curve N] [--min-p N] [--top-p N] [--top-k N]
|
||||||
|
[--typical-p N] [--xtc-threshold N] [--xtc-probability N] [--epsilon-cutoff N] [--eta-cutoff N] [--tfs N] [--top-a N] [--top-n-sigma N] [--adaptive-target N] [--adaptive-decay N]
|
||||||
|
[--dry-multiplier N] [--dry-allowed-length N] [--dry-base N] [--repetition-penalty N] [--frequency-penalty N] [--presence-penalty N] [--encoder-repetition-penalty N]
|
||||||
|
[--no-repeat-ngram-size N] [--repetition-penalty-range N] [--penalty-alpha N] [--guidance-scale N] [--mirostat-mode N] [--mirostat-tau N] [--mirostat-eta N]
|
||||||
|
[--do-sample | --no-do-sample] [--dynamic-temperature | --no-dynamic-temperature] [--temperature-last | --no-temperature-last] [--sampler-priority N] [--dry-sequence-breakers N]
|
||||||
|
[--enable-thinking | --no-enable-thinking] [--reasoning-effort N] [--chat-template-file CHAT_TEMPLATE_FILE]
|
||||||
|
|
||||||
* Torrent: [16-bit](https://archive.org/details/gpt4chan_model_float16) / [32-bit](https://archive.org/details/gpt4chan_model)
|
Text Generation Web UI
|
||||||
* Direct download: [16-bit](https://theswissbay.ch/pdf/_notpdf_/gpt4chan_model_float16/) / [32-bit](https://theswissbay.ch/pdf/_notpdf_/gpt4chan_model/)
|
|
||||||
|
|
||||||
The 32-bit version is only relevant if you intend to run the model in CPU mode. Otherwise, you should use the 16-bit version.
|
options:
|
||||||
|
-h, --help show this help message and exit
|
||||||
|
|
||||||
After downloading the model, follow these steps:
|
Basic settings:
|
||||||
|
--user-data-dir USER_DATA_DIR Path to the user data directory. Default: auto-detected.
|
||||||
|
--multi-user Multi-user mode. Chat histories are not saved or automatically loaded. Best suited for small trusted teams.
|
||||||
|
--model MODEL Name of the model to load by default.
|
||||||
|
--lora LORA [LORA ...] The list of LoRAs to load. If you want to load more than one LoRA, write the names separated by spaces.
|
||||||
|
--model-dir MODEL_DIR Path to directory with all the models.
|
||||||
|
--lora-dir LORA_DIR Path to directory with all the loras.
|
||||||
|
--model-menu Show a model menu in the terminal when the web UI is first launched.
|
||||||
|
--settings SETTINGS Load the default interface settings from this yaml file. See user_data/settings-template.yaml for an example. If you create a file called
|
||||||
|
user_data/settings.yaml, this file will be loaded by default without the need to use the --settings flag.
|
||||||
|
--extensions EXTENSIONS [EXTENSIONS ...] The list of extensions to load. If you want to load more than one extension, write the names separated by spaces.
|
||||||
|
--verbose Print the prompts to the terminal.
|
||||||
|
--idle-timeout IDLE_TIMEOUT Unload model after this many minutes of inactivity. It will be automatically reloaded when you try to use it again.
|
||||||
|
|
||||||
1. Place the files under `models/gpt4chan_model_float16` or `models/gpt4chan_model`.
|
Image model:
|
||||||
2. Place GPT-J 6B's config.json file in that same folder: [config.json](https://huggingface.co/EleutherAI/gpt-j-6B/raw/main/config.json).
|
--image-model IMAGE_MODEL Name of the image model to select on startup (overrides saved setting).
|
||||||
3. Download GPT-J 6B's tokenizer files (they will be automatically detected when you attempt to load GPT-4chan):
|
--image-model-dir IMAGE_MODEL_DIR Path to directory with all the image models.
|
||||||
|
--image-dtype {bfloat16,float16} Data type for image model.
|
||||||
|
--image-attn-backend {flash_attention_2,sdpa} Attention backend for image model.
|
||||||
|
--image-cpu-offload Enable CPU offloading for image model.
|
||||||
|
--image-compile Compile the image model for faster inference.
|
||||||
|
--image-quant {none,bnb-8bit,bnb-4bit,torchao-int8wo,torchao-fp4,torchao-float8wo}
|
||||||
|
Quantization method for image model.
|
||||||
|
|
||||||
|
Model loader:
|
||||||
|
--loader LOADER Choose the model loader manually, otherwise, it will get autodetected. Valid options: Transformers, llama.cpp, ExLlamav3_HF, ExLlamav3, TensorRT-
|
||||||
|
LLM.
|
||||||
|
|
||||||
|
Context and cache:
|
||||||
|
--ctx-size, --n_ctx, --max_seq_len N Context size in tokens. 0 = auto for llama.cpp (requires gpu-layers=-1), 8192 for other loaders.
|
||||||
|
--cache-type, --cache_type N KV cache type; valid options: llama.cpp - fp16, q8_0, q4_0; ExLlamaV3 - fp16, q2 to q8 (can specify k_bits and v_bits separately, e.g. q4_q8).
|
||||||
|
|
||||||
|
Speculative decoding:
|
||||||
|
--model-draft MODEL_DRAFT Path to the draft model for speculative decoding.
|
||||||
|
--draft-max DRAFT_MAX Number of tokens to draft for speculative decoding.
|
||||||
|
--gpu-layers-draft GPU_LAYERS_DRAFT Number of layers to offload to the GPU for the draft model.
|
||||||
|
--device-draft DEVICE_DRAFT Comma-separated list of devices to use for offloading the draft model. Example: CUDA0,CUDA1
|
||||||
|
--ctx-size-draft CTX_SIZE_DRAFT Size of the prompt context for the draft model. If 0, uses the same as the main model.
|
||||||
|
--spec-type {none,ngram-mod,ngram-simple,ngram-map-k,ngram-map-k4v,ngram-cache}
|
||||||
|
Draftless speculative decoding type. Recommended: ngram-mod.
|
||||||
|
--spec-ngram-size-n SPEC_NGRAM_SIZE_N N-gram lookup size for ngram speculative decoding.
|
||||||
|
--spec-ngram-size-m SPEC_NGRAM_SIZE_M Draft n-gram size for ngram speculative decoding.
|
||||||
|
--spec-ngram-min-hits SPEC_NGRAM_MIN_HITS Minimum n-gram hits for ngram-map speculative decoding.
|
||||||
|
|
||||||
|
llama.cpp:
|
||||||
|
--gpu-layers, --n-gpu-layers N Number of layers to offload to the GPU. -1 = auto.
|
||||||
|
--cpu-moe Move the experts to the CPU (for MoE models).
|
||||||
|
--mmproj MMPROJ Path to the mmproj file for vision models.
|
||||||
|
--streaming-llm Activate StreamingLLM to avoid re-evaluating the entire prompt when old messages are removed.
|
||||||
|
--tensor-split TENSOR_SPLIT Split the model across multiple GPUs. Comma-separated list of proportions. Example: 60,40.
|
||||||
|
--row-split Split the model by rows across GPUs. This may improve multi-gpu performance.
|
||||||
|
--no-mmap Prevent mmap from being used.
|
||||||
|
--mlock Force the system to keep the model in RAM.
|
||||||
|
--no-kv-offload Do not offload the K, Q, V to the GPU. This saves VRAM but reduces the performance.
|
||||||
|
--batch-size BATCH_SIZE Maximum number of prompt tokens to batch together when calling llama-server. This is the application level batch size.
|
||||||
|
--ubatch-size UBATCH_SIZE Maximum number of prompt tokens to batch together when calling llama-server. This is the max physical batch size for computation (device level).
|
||||||
|
--threads THREADS Number of threads to use.
|
||||||
|
--threads-batch THREADS_BATCH Number of threads to use for batches/prompt processing.
|
||||||
|
--numa Activate NUMA task allocation for llama.cpp.
|
||||||
|
--parallel PARALLEL Number of parallel request slots. The context size is divided equally among slots. For example, to have 4 slots with 8192 context each, set
|
||||||
|
ctx_size to 32768.
|
||||||
|
--fit-target FIT_TARGET Target VRAM margin per device for auto GPU layers, comma-separated list of values in MiB. A single value is broadcast across all devices.
|
||||||
|
Default: 1024.
|
||||||
|
--extra-flags EXTRA_FLAGS Extra flags to pass to llama-server. Format: "flag1=value1,flag2,flag3=value3". Example: "override-tensor=exps=CPU"
|
||||||
|
|
||||||
|
Transformers/Accelerate:
|
||||||
|
--cpu Use the CPU to generate text. Warning: Training on CPU is extremely slow.
|
||||||
|
--cpu-memory CPU_MEMORY Maximum CPU memory in GiB. Use this for CPU offloading.
|
||||||
|
--disk If the model is too large for your GPU(s) and CPU combined, send the remaining layers to the disk.
|
||||||
|
--disk-cache-dir DISK_CACHE_DIR Directory to save the disk cache to.
|
||||||
|
--load-in-8bit Load the model with 8-bit precision (using bitsandbytes).
|
||||||
|
--bf16 Load the model with bfloat16 precision. Requires NVIDIA Ampere GPU.
|
||||||
|
--no-cache Set use_cache to False while generating text. This reduces VRAM usage slightly, but it comes at a performance cost.
|
||||||
|
--trust-remote-code Set trust_remote_code=True while loading the model. Necessary for some models.
|
||||||
|
--force-safetensors Set use_safetensors=True while loading the model. This prevents arbitrary code execution.
|
||||||
|
--no_use_fast Set use_fast=False while loading the tokenizer (it's True by default). Use this if you have any problems related to use_fast.
|
||||||
|
--attn-implementation IMPLEMENTATION Attention implementation. Valid options: sdpa, eager, flash_attention_2.
|
||||||
|
|
||||||
|
bitsandbytes 4-bit:
|
||||||
|
--load-in-4bit Load the model with 4-bit precision (using bitsandbytes).
|
||||||
|
--use_double_quant use_double_quant for 4-bit.
|
||||||
|
--compute_dtype COMPUTE_DTYPE compute dtype for 4-bit. Valid options: bfloat16, float16, float32.
|
||||||
|
--quant_type QUANT_TYPE quant_type for 4-bit. Valid options: nf4, fp4.
|
||||||
|
|
||||||
|
ExLlamaV3:
|
||||||
|
--gpu-split GPU_SPLIT Comma-separated list of VRAM (in GB) to use per GPU device for model layers. Example: 20,7,7.
|
||||||
|
--enable-tp, --enable_tp Enable Tensor Parallelism (TP) to split the model across GPUs.
|
||||||
|
--tp-backend TP_BACKEND The backend for tensor parallelism. Valid options: native, nccl. Default: native.
|
||||||
|
--cfg-cache Create an additional cache for CFG negative prompts. Necessary to use CFG with that loader.
|
||||||
|
|
||||||
|
Gradio:
|
||||||
|
--listen Make the web UI reachable from your local network.
|
||||||
|
--listen-port LISTEN_PORT The listening port that the server will use.
|
||||||
|
--listen-host LISTEN_HOST The hostname that the server will use.
|
||||||
|
--share Create a public URL. This is useful for running the web UI on Google Colab or similar.
|
||||||
|
--auto-launch Open the web UI in the default browser upon launch.
|
||||||
|
--gradio-auth GRADIO_AUTH Set Gradio authentication password in the format "username:password". Multiple credentials can also be supplied with "u1:p1,u2:p2,u3:p3".
|
||||||
|
--gradio-auth-path GRADIO_AUTH_PATH Set the Gradio authentication file path. The file should contain one or more user:password pairs in the same format as above.
|
||||||
|
--ssl-keyfile SSL_KEYFILE The path to the SSL certificate key file.
|
||||||
|
--ssl-certfile SSL_CERTFILE The path to the SSL certificate cert file.
|
||||||
|
--subpath SUBPATH Customize the subpath for gradio, use with reverse proxy
|
||||||
|
--old-colors Use the legacy Gradio colors, before the December/2024 update.
|
||||||
|
--portable Hide features not available in portable mode like training.
|
||||||
|
|
||||||
|
API:
|
||||||
|
--api Enable the API extension.
|
||||||
|
--public-api Create a public URL for the API using Cloudflare.
|
||||||
|
--public-api-id PUBLIC_API_ID Tunnel ID for named Cloudflare Tunnel. Use together with public-api option.
|
||||||
|
--api-port API_PORT The listening port for the API.
|
||||||
|
--api-key API_KEY API authentication key.
|
||||||
|
--admin-key ADMIN_KEY API authentication key for admin tasks like loading and unloading models. If not set, will be the same as --api-key.
|
||||||
|
--api-enable-ipv6 Enable IPv6 for the API
|
||||||
|
--api-disable-ipv4 Disable IPv4 for the API
|
||||||
|
--nowebui Do not launch the Gradio UI. Useful for launching the API in standalone mode.
|
||||||
|
|
||||||
|
API generation defaults:
|
||||||
|
--temperature N Temperature
|
||||||
|
--dynatemp-low N Dynamic temperature low
|
||||||
|
--dynatemp-high N Dynamic temperature high
|
||||||
|
--dynatemp-exponent N Dynamic temperature exponent
|
||||||
|
--smoothing-factor N Smoothing factor
|
||||||
|
--smoothing-curve N Smoothing curve
|
||||||
|
--min-p N Min P
|
||||||
|
--top-p N Top P
|
||||||
|
--top-k N Top K
|
||||||
|
--typical-p N Typical P
|
||||||
|
--xtc-threshold N XTC threshold
|
||||||
|
--xtc-probability N XTC probability
|
||||||
|
--epsilon-cutoff N Epsilon cutoff
|
||||||
|
--eta-cutoff N Eta cutoff
|
||||||
|
--tfs N TFS
|
||||||
|
--top-a N Top A
|
||||||
|
--top-n-sigma N Top N Sigma
|
||||||
|
--adaptive-target N Adaptive target
|
||||||
|
--adaptive-decay N Adaptive decay
|
||||||
|
--dry-multiplier N DRY multiplier
|
||||||
|
--dry-allowed-length N DRY allowed length
|
||||||
|
--dry-base N DRY base
|
||||||
|
--repetition-penalty N Repetition penalty
|
||||||
|
--frequency-penalty N Frequency penalty
|
||||||
|
--presence-penalty N Presence penalty
|
||||||
|
--encoder-repetition-penalty N Encoder repetition penalty
|
||||||
|
--no-repeat-ngram-size N No repeat ngram size
|
||||||
|
--repetition-penalty-range N Repetition penalty range
|
||||||
|
--penalty-alpha N Penalty alpha
|
||||||
|
--guidance-scale N Guidance scale
|
||||||
|
--mirostat-mode N Mirostat mode
|
||||||
|
--mirostat-tau N Mirostat tau
|
||||||
|
--mirostat-eta N Mirostat eta
|
||||||
|
--do-sample, --no-do-sample Do sample
|
||||||
|
--dynamic-temperature, --no-dynamic-temperature Dynamic temperature
|
||||||
|
--temperature-last, --no-temperature-last Temperature last
|
||||||
|
--sampler-priority N Sampler priority
|
||||||
|
--dry-sequence-breakers N DRY sequence breakers
|
||||||
|
--enable-thinking, --no-enable-thinking Enable thinking
|
||||||
|
--reasoning-effort N Reasoning effort
|
||||||
|
--chat-template-file CHAT_TEMPLATE_FILE Path to a chat template file (.jinja, .jinja2, or .yaml) to use as the default instruction template for API requests. Overrides the model's
|
||||||
|
built-in template.
|
||||||
```
|
```
|
||||||
python download-model.py EleutherAI/gpt-j-6B --text-only
|
|
||||||
```
|
|
||||||
|
|
||||||
When you load this model in default or notebook modes, the "HTML" tab will show the generated text in 4chan format:
|
|
||||||
|
|
||||||

|
|
||||||
|
|
||||||
</details>
|
</details>
|
||||||
|
|
||||||
## Starting the web UI
|
## Downloading models
|
||||||
|
|
||||||
conda activate textgen
|
1. Download a GGUF model file from [Hugging Face](https://huggingface.co/models?pipeline_tag=text-generation&sort=downloads&search=gguf).
|
||||||
cd text-generation-webui
|
2. Place it in the `user_data/models` folder.
|
||||||
python server.py
|
|
||||||
|
|
||||||
Then browse to
|
That's it. The UI will detect it automatically.
|
||||||
|
|
||||||
`http://localhost:7860/?__theme=dark`
|
To check what will fit your GPU, you can use the [VRAM Calculator](https://huggingface.co/spaces/oobabooga/accurate-gguf-vram-calculator).
|
||||||
|
|
||||||
Optionally, you can use the following command-line flags:
|
<details>
|
||||||
|
<summary>Other model types (Transformers, EXL3)</summary>
|
||||||
|
|
||||||
#### Basic settings
|
Models that consist of multiple files (like 16-bit Transformers models and EXL3 models) should be placed in a subfolder inside `user_data/models`:
|
||||||
|
|
||||||
| Flag | Description |
|
```
|
||||||
|--------------------------------------------|-------------|
|
text-generation-webui
|
||||||
| `-h`, `--help` | Show this help message and exit. |
|
└── user_data
|
||||||
| `--multi-user` | Multi-user mode. Chat histories are not saved or automatically loaded. WARNING: this is highly experimental. |
|
└── models
|
||||||
| `--character CHARACTER` | The name of the character to load in chat mode by default. |
|
└── Qwen_Qwen3-8B
|
||||||
| `--model MODEL` | Name of the model to load by default. |
|
├── config.json
|
||||||
| `--lora LORA [LORA ...]` | The list of LoRAs to load. If you want to load more than one LoRA, write the names separated by spaces. |
|
├── generation_config.json
|
||||||
| `--model-dir MODEL_DIR` | Path to directory with all the models. |
|
├── model-00001-of-00004.safetensors
|
||||||
| `--lora-dir LORA_DIR` | Path to directory with all the loras. |
|
├── ...
|
||||||
| `--model-menu` | Show a model menu in the terminal when the web UI is first launched. |
|
├── tokenizer_config.json
|
||||||
| `--settings SETTINGS_FILE` | Load the default interface settings from this yaml file. See `settings-template.yaml` for an example. If you create a file called `settings.yaml`, this file will be loaded by default without the need to use the `--settings` flag. |
|
└── tokenizer.json
|
||||||
| `--extensions EXTENSIONS [EXTENSIONS ...]` | The list of extensions to load. If you want to load more than one extension, write the names separated by spaces. |
|
```
|
||||||
| `--verbose` | Print the prompts to the terminal. |
|
|
||||||
| `--chat-buttons` | Show buttons on chat tab instead of hover menu. |
|
|
||||||
|
|
||||||
#### Model loader
|
These formats require the one-click installer (not the portable build).
|
||||||
|
</details>
|
||||||
|
|
||||||
| Flag | Description |
|
## Documentation
|
||||||
|--------------------------------------------|-------------|
|
|
||||||
| `--loader LOADER` | Choose the model loader manually, otherwise, it will get autodetected. Valid options: transformers, autogptq, gptq-for-llama, exllama, exllama_hf, llamacpp, rwkv, ctransformers |
|
|
||||||
|
|
||||||
#### Accelerate/transformers
|
https://github.com/oobabooga/text-generation-webui/wiki
|
||||||
|
|
||||||
| Flag | Description |
|
|
||||||
|---------------------------------------------|-------------|
|
|
||||||
| `--cpu` | Use the CPU to generate text. Warning: Training on CPU is extremely slow.|
|
|
||||||
| `--auto-devices` | Automatically split the model across the available GPU(s) and CPU. |
|
|
||||||
| `--gpu-memory GPU_MEMORY [GPU_MEMORY ...]` | Maximum GPU memory in GiB to be allocated per GPU. Example: `--gpu-memory 10` for a single GPU, `--gpu-memory 10 5` for two GPUs. You can also set values in MiB like `--gpu-memory 3500MiB`. |
|
|
||||||
| `--cpu-memory CPU_MEMORY` | Maximum CPU memory in GiB to allocate for offloaded weights. Same as above.|
|
|
||||||
| `--disk` | If the model is too large for your GPU(s) and CPU combined, send the remaining layers to the disk. |
|
|
||||||
| `--disk-cache-dir DISK_CACHE_DIR` | Directory to save the disk cache to. Defaults to `cache/`. |
|
|
||||||
| `--load-in-8bit` | Load the model with 8-bit precision (using bitsandbytes).|
|
|
||||||
| `--bf16` | Load the model with bfloat16 precision. Requires NVIDIA Ampere GPU. |
|
|
||||||
| `--no-cache` | Set `use_cache` to False while generating text. This reduces the VRAM usage a bit with a performance cost. |
|
|
||||||
| `--xformers` | Use xformer's memory efficient attention. This should increase your tokens/s. |
|
|
||||||
| `--sdp-attention` | Use torch 2.0's sdp attention. |
|
|
||||||
| `--trust-remote-code` | Set trust_remote_code=True while loading a model. Necessary for ChatGLM and Falcon. |
|
|
||||||
| `--use_fast` | Set use_fast=True while loading a tokenizer. |
|
|
||||||
|
|
||||||
#### Accelerate 4-bit
|
|
||||||
|
|
||||||
⚠️ Requires minimum compute of 7.0 on Windows at the moment.
|
|
||||||
|
|
||||||
| Flag | Description |
|
|
||||||
|---------------------------------------------|-------------|
|
|
||||||
| `--load-in-4bit` | Load the model with 4-bit precision (using bitsandbytes). |
|
|
||||||
| `--compute_dtype COMPUTE_DTYPE` | compute dtype for 4-bit. Valid options: bfloat16, float16, float32. |
|
|
||||||
| `--quant_type QUANT_TYPE` | quant_type for 4-bit. Valid options: nf4, fp4. |
|
|
||||||
| `--use_double_quant` | use_double_quant for 4-bit. |
|
|
||||||
|
|
||||||
#### GGUF (for llama.cpp and ctransformers)
|
|
||||||
|
|
||||||
| Flag | Description |
|
|
||||||
|-------------|-------------|
|
|
||||||
| `--threads` | Number of threads to use. |
|
|
||||||
| `--threads-batch THREADS_BATCH` | Number of threads to use for batches/prompt processing. |
|
|
||||||
| `--n_batch` | Maximum number of prompt tokens to batch together when calling llama_eval. |
|
|
||||||
| `--n-gpu-layers N_GPU_LAYERS` | Number of layers to offload to the GPU. Only works if llama-cpp-python was compiled with BLAS. Set this to 1000000000 to offload all layers to the GPU. |
|
|
||||||
| `--n_ctx N_CTX` | Size of the prompt context. |
|
|
||||||
|
|
||||||
#### llama.cpp
|
|
||||||
|
|
||||||
| Flag | Description |
|
|
||||||
|---------------|---------------|
|
|
||||||
| `--mul_mat_q` | Activate new mulmat kernels. |
|
|
||||||
| `--tensor_split TENSOR_SPLIT` | Split the model across multiple GPUs, comma-separated list of proportions, e.g. 18,17 |
|
|
||||||
| `--llama_cpp_seed SEED` | Seed for llama-cpp models. Default 0 (random). |
|
|
||||||
| `--cache-capacity CACHE_CAPACITY` | Maximum cache capacity. Examples: 2000MiB, 2GiB. When provided without units, bytes will be assumed. |
|
|
||||||
|`--cfg-cache` | llamacpp_HF: Create an additional cache for CFG negative prompts. |
|
|
||||||
| `--no-mmap` | Prevent mmap from being used. |
|
|
||||||
| `--mlock` | Force the system to keep the model in RAM. |
|
|
||||||
| `--numa` | Activate NUMA task allocation for llama.cpp |
|
|
||||||
| `--cpu` | Use the CPU version of llama-cpp-python instead of the GPU-accelerated version. |
|
|
||||||
|
|
||||||
#### ctransformers
|
|
||||||
|
|
||||||
| Flag | Description |
|
|
||||||
|-------------|-------------|
|
|
||||||
| `--model_type MODEL_TYPE` | Model type of pre-quantized model. Currently gpt2, gptj, gptneox, falcon, llama, mpt, starcoder (gptbigcode), dollyv2, and replit are supported. |
|
|
||||||
|
|
||||||
#### AutoGPTQ
|
|
||||||
|
|
||||||
| Flag | Description |
|
|
||||||
|------------------|-------------|
|
|
||||||
| `--triton` | Use triton. |
|
|
||||||
| `--no_inject_fused_attention` | Disable the use of fused attention, which will use less VRAM at the cost of slower inference. |
|
|
||||||
| `--no_inject_fused_mlp` | Triton mode only: disable the use of fused MLP, which will use less VRAM at the cost of slower inference. |
|
|
||||||
| `--no_use_cuda_fp16` | This can make models faster on some systems. |
|
|
||||||
| `--desc_act` | For models that don't have a quantize_config.json, this parameter is used to define whether to set desc_act or not in BaseQuantizeConfig. |
|
|
||||||
| `--disable_exllama` | Disable ExLlama kernel, which can improve inference speed on some systems. |
|
|
||||||
|
|
||||||
#### ExLlama
|
|
||||||
|
|
||||||
| Flag | Description |
|
|
||||||
|------------------|-------------|
|
|
||||||
|`--gpu-split` | Comma-separated list of VRAM (in GB) to use per GPU device for model layers, e.g. `20,7,7` |
|
|
||||||
|`--max_seq_len MAX_SEQ_LEN` | Maximum sequence length. |
|
|
||||||
|`--cfg-cache` | ExLlama_HF: Create an additional cache for CFG negative prompts. Necessary to use CFG with that loader, but not necessary for CFG with base ExLlama. |
|
|
||||||
|
|
||||||
#### GPTQ-for-LLaMa
|
|
||||||
|
|
||||||
| Flag | Description |
|
|
||||||
|---------------------------|-------------|
|
|
||||||
| `--wbits WBITS` | Load a pre-quantized model with specified precision in bits. 2, 3, 4 and 8 are supported. |
|
|
||||||
| `--model_type MODEL_TYPE` | Model type of pre-quantized model. Currently LLaMA, OPT, and GPT-J are supported. |
|
|
||||||
| `--groupsize GROUPSIZE` | Group size. |
|
|
||||||
| `--pre_layer PRE_LAYER [PRE_LAYER ...]` | The number of layers to allocate to the GPU. Setting this parameter enables CPU offloading for 4-bit models. For multi-gpu, write the numbers separated by spaces, eg `--pre_layer 30 60`. |
|
|
||||||
| `--checkpoint CHECKPOINT` | The path to the quantized checkpoint file. If not specified, it will be automatically detected. |
|
|
||||||
| `--monkey-patch` | Apply the monkey patch for using LoRAs with quantized models.
|
|
||||||
|
|
||||||
#### DeepSpeed
|
|
||||||
|
|
||||||
| Flag | Description |
|
|
||||||
|---------------------------------------|-------------|
|
|
||||||
| `--deepspeed` | Enable the use of DeepSpeed ZeRO-3 for inference via the Transformers integration. |
|
|
||||||
| `--nvme-offload-dir NVME_OFFLOAD_DIR` | DeepSpeed: Directory to use for ZeRO-3 NVME offloading. |
|
|
||||||
| `--local_rank LOCAL_RANK` | DeepSpeed: Optional argument for distributed setups. |
|
|
||||||
|
|
||||||
#### RWKV
|
|
||||||
|
|
||||||
| Flag | Description |
|
|
||||||
|---------------------------------|-------------|
|
|
||||||
| `--rwkv-strategy RWKV_STRATEGY` | RWKV: The strategy to use while loading the model. Examples: "cpu fp32", "cuda fp16", "cuda fp16i8". |
|
|
||||||
| `--rwkv-cuda-on` | RWKV: Compile the CUDA kernel for better performance. |
|
|
||||||
|
|
||||||
#### RoPE (for llama.cpp, ExLlama, ExLlamaV2, and transformers)
|
|
||||||
|
|
||||||
| Flag | Description |
|
|
||||||
|------------------|-------------|
|
|
||||||
| `--alpha_value ALPHA_VALUE` | Positional embeddings alpha factor for NTK RoPE scaling. Use either this or compress_pos_emb, not both. |
|
|
||||||
| `--rope_freq_base ROPE_FREQ_BASE` | If greater than 0, will be used instead of alpha_value. Those two are related by rope_freq_base = 10000 * alpha_value ^ (64 / 63). |
|
|
||||||
| `--compress_pos_emb COMPRESS_POS_EMB` | Positional embeddings compression factor. Should be set to (context length) / (model's original context length). Equal to 1/rope_freq_scale. |
|
|
||||||
|
|
||||||
#### Gradio
|
|
||||||
|
|
||||||
| Flag | Description |
|
|
||||||
|---------------------------------------|-------------|
|
|
||||||
| `--listen` | Make the web UI reachable from your local network. |
|
|
||||||
| `--listen-host LISTEN_HOST` | The hostname that the server will use. |
|
|
||||||
| `--listen-port LISTEN_PORT` | The listening port that the server will use. |
|
|
||||||
| `--share` | Create a public URL. This is useful for running the web UI on Google Colab or similar. |
|
|
||||||
| `--auto-launch` | Open the web UI in the default browser upon launch. |
|
|
||||||
| `--gradio-auth USER:PWD` | set gradio authentication like "username:password"; or comma-delimit multiple like "u1:p1,u2:p2,u3:p3" |
|
|
||||||
| `--gradio-auth-path GRADIO_AUTH_PATH` | Set the gradio authentication file path. The file should contain one or more user:password pairs in this format: "u1:p1,u2:p2,u3:p3" |
|
|
||||||
| `--ssl-keyfile SSL_KEYFILE` | The path to the SSL certificate key file. |
|
|
||||||
| `--ssl-certfile SSL_CERTFILE` | The path to the SSL certificate cert file. |
|
|
||||||
|
|
||||||
#### API
|
|
||||||
|
|
||||||
| Flag | Description |
|
|
||||||
|---------------------------------------|-------------|
|
|
||||||
| `--api` | Enable the API extension. |
|
|
||||||
| `--public-api` | Create a public URL for the API using Cloudfare. |
|
|
||||||
| `--public-api-id PUBLIC_API_ID` | Tunnel ID for named Cloudflare Tunnel. Use together with public-api option. |
|
|
||||||
| `--api-blocking-port BLOCKING_PORT` | The listening port for the blocking API. |
|
|
||||||
| `--api-streaming-port STREAMING_PORT` | The listening port for the streaming API. |
|
|
||||||
|
|
||||||
#### Multimodal
|
|
||||||
|
|
||||||
| Flag | Description |
|
|
||||||
|---------------------------------------|-------------|
|
|
||||||
| `--multimodal-pipeline PIPELINE` | The multimodal pipeline to use. Examples: `llava-7b`, `llava-13b`. |
|
|
||||||
|
|
||||||
## Presets
|
|
||||||
|
|
||||||
Inference settings presets can be created under `presets/` as yaml files. These files are detected automatically at startup.
|
|
||||||
|
|
||||||
The presets that are included by default are the result of a contest that received 7215 votes. More details can be found [here](https://github.com/oobabooga/oobabooga.github.io/blob/main/arena/results.md).
|
|
||||||
|
|
||||||
## Contributing
|
|
||||||
|
|
||||||
If you would like to contribute to the project, check out the [Contributing guidelines](https://github.com/oobabooga/text-generation-webui/wiki/Contributing-guidelines).
|
|
||||||
|
|
||||||
## Community
|
## Community
|
||||||
|
|
||||||
* Subreddit: https://www.reddit.com/r/oobabooga/
|
https://www.reddit.com/r/Oobabooga/
|
||||||
* Discord: https://discord.gg/jwZCF2dPQN
|
|
||||||
|
|
||||||
## Acknowledgment
|
## Acknowledgments
|
||||||
|
|
||||||
In August 2023, [Andreessen Horowitz](https://a16z.com/) (a16z) provided a generous grant to encourage and support my independent work on this project. I am **extremely** grateful for their trust and recognition, which will allow me to dedicate more time towards realizing the full potential of text-generation-webui.
|
- In August 2023, [Andreessen Horowitz](https://a16z.com/) (a16z) provided a generous grant to encourage and support my independent work on this project. I am **extremely** grateful for their trust and recognition.
|
||||||
|
- This project was inspired by [AUTOMATIC1111/stable-diffusion-webui](https://github.com/AUTOMATIC1111/stable-diffusion-webui) and wouldn't exist without it.
|
||||||
|
|
|
||||||
|
|
@ -1,112 +0,0 @@
|
||||||
import asyncio
|
|
||||||
import html
|
|
||||||
import json
|
|
||||||
import sys
|
|
||||||
|
|
||||||
try:
|
|
||||||
import websockets
|
|
||||||
except ImportError:
|
|
||||||
print("Websockets package not found. Make sure it's installed.")
|
|
||||||
|
|
||||||
# For local streaming, the websockets are hosted without ssl - ws://
|
|
||||||
HOST = 'localhost:5005'
|
|
||||||
URI = f'ws://{HOST}/api/v1/chat-stream'
|
|
||||||
|
|
||||||
# For reverse-proxied streaming, the remote will likely host with ssl - wss://
|
|
||||||
# URI = 'wss://your-uri-here.trycloudflare.com/api/v1/stream'
|
|
||||||
|
|
||||||
|
|
||||||
async def run(user_input, history):
|
|
||||||
# Note: the selected defaults change from time to time.
|
|
||||||
request = {
|
|
||||||
'user_input': user_input,
|
|
||||||
'max_new_tokens': 250,
|
|
||||||
'auto_max_new_tokens': False,
|
|
||||||
'max_tokens_second': 0,
|
|
||||||
'history': history,
|
|
||||||
'mode': 'instruct', # Valid options: 'chat', 'chat-instruct', 'instruct'
|
|
||||||
'character': 'Example',
|
|
||||||
'instruction_template': 'Vicuna-v1.1', # Will get autodetected if unset
|
|
||||||
'your_name': 'You',
|
|
||||||
# 'name1': 'name of user', # Optional
|
|
||||||
# 'name2': 'name of character', # Optional
|
|
||||||
# 'context': 'character context', # Optional
|
|
||||||
# 'greeting': 'greeting', # Optional
|
|
||||||
# 'name1_instruct': 'You', # Optional
|
|
||||||
# 'name2_instruct': 'Assistant', # Optional
|
|
||||||
# 'context_instruct': 'context_instruct', # Optional
|
|
||||||
# 'turn_template': 'turn_template', # Optional
|
|
||||||
'regenerate': False,
|
|
||||||
'_continue': False,
|
|
||||||
'chat_instruct_command': 'Continue the chat dialogue below. Write a single reply for the character "<|character|>".\n\n<|prompt|>',
|
|
||||||
|
|
||||||
# Generation params. If 'preset' is set to different than 'None', the values
|
|
||||||
# in presets/preset-name.yaml are used instead of the individual numbers.
|
|
||||||
'preset': 'None',
|
|
||||||
'do_sample': True,
|
|
||||||
'temperature': 0.7,
|
|
||||||
'top_p': 0.1,
|
|
||||||
'typical_p': 1,
|
|
||||||
'epsilon_cutoff': 0, # In units of 1e-4
|
|
||||||
'eta_cutoff': 0, # In units of 1e-4
|
|
||||||
'tfs': 1,
|
|
||||||
'top_a': 0,
|
|
||||||
'repetition_penalty': 1.18,
|
|
||||||
'repetition_penalty_range': 0,
|
|
||||||
'top_k': 40,
|
|
||||||
'min_length': 0,
|
|
||||||
'no_repeat_ngram_size': 0,
|
|
||||||
'num_beams': 1,
|
|
||||||
'penalty_alpha': 0,
|
|
||||||
'length_penalty': 1,
|
|
||||||
'early_stopping': False,
|
|
||||||
'mirostat_mode': 0,
|
|
||||||
'mirostat_tau': 5,
|
|
||||||
'mirostat_eta': 0.1,
|
|
||||||
'grammar_string': '',
|
|
||||||
'guidance_scale': 1,
|
|
||||||
'negative_prompt': '',
|
|
||||||
|
|
||||||
'seed': -1,
|
|
||||||
'add_bos_token': True,
|
|
||||||
'truncation_length': 2048,
|
|
||||||
'ban_eos_token': False,
|
|
||||||
'custom_token_bans': '',
|
|
||||||
'skip_special_tokens': True,
|
|
||||||
'stopping_strings': []
|
|
||||||
}
|
|
||||||
|
|
||||||
async with websockets.connect(URI, ping_interval=None) as websocket:
|
|
||||||
await websocket.send(json.dumps(request))
|
|
||||||
|
|
||||||
while True:
|
|
||||||
incoming_data = await websocket.recv()
|
|
||||||
incoming_data = json.loads(incoming_data)
|
|
||||||
|
|
||||||
match incoming_data['event']:
|
|
||||||
case 'text_stream':
|
|
||||||
yield incoming_data['history']
|
|
||||||
case 'stream_end':
|
|
||||||
return
|
|
||||||
|
|
||||||
|
|
||||||
async def print_response_stream(user_input, history):
|
|
||||||
cur_len = 0
|
|
||||||
async for new_history in run(user_input, history):
|
|
||||||
cur_message = new_history['visible'][-1][1][cur_len:]
|
|
||||||
cur_len += len(cur_message)
|
|
||||||
print(html.unescape(cur_message), end='')
|
|
||||||
sys.stdout.flush() # If we don't flush, we won't see tokens in realtime.
|
|
||||||
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
|
||||||
user_input = "Please give me a step-by-step guide on how to plant a tree in my backyard."
|
|
||||||
|
|
||||||
# Basic example
|
|
||||||
history = {'internal': [], 'visible': []}
|
|
||||||
|
|
||||||
# "Continue" example. Make sure to set '_continue' to True above
|
|
||||||
# arr = [user_input, 'Surely, here is']
|
|
||||||
# history = {'internal': [arr], 'visible': [arr]}
|
|
||||||
|
|
||||||
asyncio.run(print_response_stream(user_input, history))
|
|
||||||
|
|
@ -1,92 +0,0 @@
|
||||||
import html
|
|
||||||
import json
|
|
||||||
|
|
||||||
import requests
|
|
||||||
|
|
||||||
# For local streaming, the websockets are hosted without ssl - http://
|
|
||||||
HOST = 'localhost:5000'
|
|
||||||
URI = f'http://{HOST}/api/v1/chat'
|
|
||||||
|
|
||||||
# For reverse-proxied streaming, the remote will likely host with ssl - https://
|
|
||||||
# URI = 'https://your-uri-here.trycloudflare.com/api/v1/chat'
|
|
||||||
|
|
||||||
|
|
||||||
def run(user_input, history):
|
|
||||||
request = {
|
|
||||||
'user_input': user_input,
|
|
||||||
'max_new_tokens': 250,
|
|
||||||
'auto_max_new_tokens': False,
|
|
||||||
'max_tokens_second': 0,
|
|
||||||
'history': history,
|
|
||||||
'mode': 'instruct', # Valid options: 'chat', 'chat-instruct', 'instruct'
|
|
||||||
'character': 'Example',
|
|
||||||
'instruction_template': 'Vicuna-v1.1', # Will get autodetected if unset
|
|
||||||
'your_name': 'You',
|
|
||||||
# 'name1': 'name of user', # Optional
|
|
||||||
# 'name2': 'name of character', # Optional
|
|
||||||
# 'context': 'character context', # Optional
|
|
||||||
# 'greeting': 'greeting', # Optional
|
|
||||||
# 'name1_instruct': 'You', # Optional
|
|
||||||
# 'name2_instruct': 'Assistant', # Optional
|
|
||||||
# 'context_instruct': 'context_instruct', # Optional
|
|
||||||
# 'turn_template': 'turn_template', # Optional
|
|
||||||
'regenerate': False,
|
|
||||||
'_continue': False,
|
|
||||||
'chat_instruct_command': 'Continue the chat dialogue below. Write a single reply for the character "<|character|>".\n\n<|prompt|>',
|
|
||||||
|
|
||||||
# Generation params. If 'preset' is set to different than 'None', the values
|
|
||||||
# in presets/preset-name.yaml are used instead of the individual numbers.
|
|
||||||
'preset': 'None',
|
|
||||||
'do_sample': True,
|
|
||||||
'temperature': 0.7,
|
|
||||||
'top_p': 0.1,
|
|
||||||
'typical_p': 1,
|
|
||||||
'epsilon_cutoff': 0, # In units of 1e-4
|
|
||||||
'eta_cutoff': 0, # In units of 1e-4
|
|
||||||
'tfs': 1,
|
|
||||||
'top_a': 0,
|
|
||||||
'repetition_penalty': 1.18,
|
|
||||||
'repetition_penalty_range': 0,
|
|
||||||
'top_k': 40,
|
|
||||||
'min_length': 0,
|
|
||||||
'no_repeat_ngram_size': 0,
|
|
||||||
'num_beams': 1,
|
|
||||||
'penalty_alpha': 0,
|
|
||||||
'length_penalty': 1,
|
|
||||||
'early_stopping': False,
|
|
||||||
'mirostat_mode': 0,
|
|
||||||
'mirostat_tau': 5,
|
|
||||||
'mirostat_eta': 0.1,
|
|
||||||
'grammar_string': '',
|
|
||||||
'guidance_scale': 1,
|
|
||||||
'negative_prompt': '',
|
|
||||||
|
|
||||||
'seed': -1,
|
|
||||||
'add_bos_token': True,
|
|
||||||
'truncation_length': 2048,
|
|
||||||
'ban_eos_token': False,
|
|
||||||
'custom_token_bans': '',
|
|
||||||
'skip_special_tokens': True,
|
|
||||||
'stopping_strings': []
|
|
||||||
}
|
|
||||||
|
|
||||||
response = requests.post(URI, json=request)
|
|
||||||
|
|
||||||
if response.status_code == 200:
|
|
||||||
result = response.json()['results'][0]['history']
|
|
||||||
print(json.dumps(result, indent=4))
|
|
||||||
print()
|
|
||||||
print(html.unescape(result['visible'][-1][1]))
|
|
||||||
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
|
||||||
user_input = "Please give me a step-by-step guide on how to plant a tree in my backyard."
|
|
||||||
|
|
||||||
# Basic example
|
|
||||||
history = {'internal': [], 'visible': []}
|
|
||||||
|
|
||||||
# "Continue" example. Make sure to set '_continue' to True above
|
|
||||||
# arr = [user_input, 'Surely, here is']
|
|
||||||
# history = {'internal': [arr], 'visible': [arr]}
|
|
||||||
|
|
||||||
run(user_input, history)
|
|
||||||
|
|
@ -1,176 +0,0 @@
|
||||||
#!/usr/bin/env python3
|
|
||||||
|
|
||||||
import requests
|
|
||||||
|
|
||||||
HOST = '0.0.0.0:5000'
|
|
||||||
|
|
||||||
|
|
||||||
def generate(prompt, tokens=200):
|
|
||||||
request = {'prompt': prompt, 'max_new_tokens': tokens}
|
|
||||||
response = requests.post(f'http://{HOST}/api/v1/generate', json=request)
|
|
||||||
|
|
||||||
if response.status_code == 200:
|
|
||||||
return response.json()['results'][0]['text']
|
|
||||||
|
|
||||||
|
|
||||||
def model_api(request):
|
|
||||||
response = requests.post(f'http://{HOST}/api/v1/model', json=request)
|
|
||||||
return response.json()
|
|
||||||
|
|
||||||
|
|
||||||
# print some common settings
|
|
||||||
def print_basic_model_info(response):
|
|
||||||
basic_settings = ['truncation_length', 'instruction_template']
|
|
||||||
print("Model: ", response['result']['model_name'])
|
|
||||||
print("Lora(s): ", response['result']['lora_names'])
|
|
||||||
for setting in basic_settings:
|
|
||||||
print(setting, "=", response['result']['shared.settings'][setting])
|
|
||||||
|
|
||||||
|
|
||||||
# model info
|
|
||||||
def model_info():
|
|
||||||
response = model_api({'action': 'info'})
|
|
||||||
print_basic_model_info(response)
|
|
||||||
|
|
||||||
|
|
||||||
# simple loader
|
|
||||||
def model_load(model_name):
|
|
||||||
return model_api({'action': 'load', 'model_name': model_name})
|
|
||||||
|
|
||||||
|
|
||||||
# complex loader
|
|
||||||
def complex_model_load(model):
|
|
||||||
|
|
||||||
def guess_groupsize(model_name):
|
|
||||||
if '1024g' in model_name:
|
|
||||||
return 1024
|
|
||||||
elif '128g' in model_name:
|
|
||||||
return 128
|
|
||||||
elif '32g' in model_name:
|
|
||||||
return 32
|
|
||||||
else:
|
|
||||||
return -1
|
|
||||||
|
|
||||||
req = {
|
|
||||||
'action': 'load',
|
|
||||||
'model_name': model,
|
|
||||||
'args': {
|
|
||||||
'loader': 'AutoGPTQ',
|
|
||||||
|
|
||||||
'bf16': False,
|
|
||||||
'load_in_8bit': False,
|
|
||||||
'groupsize': 0,
|
|
||||||
'wbits': 0,
|
|
||||||
|
|
||||||
# llama.cpp
|
|
||||||
'threads': 0,
|
|
||||||
'n_batch': 512,
|
|
||||||
'no_mmap': False,
|
|
||||||
'mlock': False,
|
|
||||||
'cache_capacity': None,
|
|
||||||
'n_gpu_layers': 0,
|
|
||||||
'n_ctx': 2048,
|
|
||||||
|
|
||||||
# RWKV
|
|
||||||
'rwkv_strategy': None,
|
|
||||||
'rwkv_cuda_on': False,
|
|
||||||
|
|
||||||
# b&b 4-bit
|
|
||||||
# 'load_in_4bit': False,
|
|
||||||
# 'compute_dtype': 'float16',
|
|
||||||
# 'quant_type': 'nf4',
|
|
||||||
# 'use_double_quant': False,
|
|
||||||
|
|
||||||
# "cpu": false,
|
|
||||||
# "auto_devices": false,
|
|
||||||
# "gpu_memory": null,
|
|
||||||
# "cpu_memory": null,
|
|
||||||
# "disk": false,
|
|
||||||
# "disk_cache_dir": "cache",
|
|
||||||
},
|
|
||||||
}
|
|
||||||
|
|
||||||
model = model.lower()
|
|
||||||
|
|
||||||
if '4bit' in model or 'gptq' in model or 'int4' in model:
|
|
||||||
req['args']['wbits'] = 4
|
|
||||||
req['args']['groupsize'] = guess_groupsize(model)
|
|
||||||
elif '3bit' in model:
|
|
||||||
req['args']['wbits'] = 3
|
|
||||||
req['args']['groupsize'] = guess_groupsize(model)
|
|
||||||
else:
|
|
||||||
req['args']['gptq_for_llama'] = False
|
|
||||||
|
|
||||||
if '8bit' in model:
|
|
||||||
req['args']['load_in_8bit'] = True
|
|
||||||
elif '-hf' in model or 'fp16' in model:
|
|
||||||
if '7b' in model:
|
|
||||||
req['args']['bf16'] = True # for 24GB
|
|
||||||
elif '13b' in model:
|
|
||||||
req['args']['load_in_8bit'] = True # for 24GB
|
|
||||||
elif 'gguf' in model:
|
|
||||||
# req['args']['threads'] = 16
|
|
||||||
if '7b' in model:
|
|
||||||
req['args']['n_gpu_layers'] = 100
|
|
||||||
elif '13b' in model:
|
|
||||||
req['args']['n_gpu_layers'] = 100
|
|
||||||
elif '30b' in model or '33b' in model:
|
|
||||||
req['args']['n_gpu_layers'] = 59 # 24GB
|
|
||||||
elif '65b' in model:
|
|
||||||
req['args']['n_gpu_layers'] = 42 # 24GB
|
|
||||||
elif 'rwkv' in model:
|
|
||||||
req['args']['rwkv_cuda_on'] = True
|
|
||||||
if '14b' in model:
|
|
||||||
req['args']['rwkv_strategy'] = 'cuda f16i8' # 24GB
|
|
||||||
else:
|
|
||||||
req['args']['rwkv_strategy'] = 'cuda f16' # 24GB
|
|
||||||
|
|
||||||
return model_api(req)
|
|
||||||
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
|
||||||
for model in model_api({'action': 'list'})['result']:
|
|
||||||
try:
|
|
||||||
resp = complex_model_load(model)
|
|
||||||
|
|
||||||
if 'error' in resp:
|
|
||||||
print(f"❌ {model} FAIL Error: {resp['error']['message']}")
|
|
||||||
continue
|
|
||||||
else:
|
|
||||||
print_basic_model_info(resp)
|
|
||||||
|
|
||||||
ans = generate("0,1,1,2,3,5,8,13,", tokens=2)
|
|
||||||
|
|
||||||
if '21' in ans:
|
|
||||||
print(f"✅ {model} PASS ({ans})")
|
|
||||||
else:
|
|
||||||
print(f"❌ {model} FAIL ({ans})")
|
|
||||||
|
|
||||||
except Exception as e:
|
|
||||||
print(f"❌ {model} FAIL Exception: {repr(e)}")
|
|
||||||
|
|
||||||
|
|
||||||
# 0,1,1,2,3,5,8,13, is the fibonacci sequence, the next number is 21.
|
|
||||||
# Some results below.
|
|
||||||
""" $ ./model-api-example.py
|
|
||||||
Model: 4bit_gpt4-x-alpaca-13b-native-4bit-128g-cuda
|
|
||||||
Lora(s): []
|
|
||||||
truncation_length = 2048
|
|
||||||
instruction_template = Alpaca
|
|
||||||
✅ 4bit_gpt4-x-alpaca-13b-native-4bit-128g-cuda PASS (21)
|
|
||||||
Model: 4bit_WizardLM-13B-Uncensored-4bit-128g
|
|
||||||
Lora(s): []
|
|
||||||
truncation_length = 2048
|
|
||||||
instruction_template = WizardLM
|
|
||||||
✅ 4bit_WizardLM-13B-Uncensored-4bit-128g PASS (21)
|
|
||||||
Model: Aeala_VicUnlocked-alpaca-30b-4bit
|
|
||||||
Lora(s): []
|
|
||||||
truncation_length = 2048
|
|
||||||
instruction_template = Alpaca
|
|
||||||
✅ Aeala_VicUnlocked-alpaca-30b-4bit PASS (21)
|
|
||||||
Model: alpaca-30b-4bit
|
|
||||||
Lora(s): []
|
|
||||||
truncation_length = 2048
|
|
||||||
instruction_template = Alpaca
|
|
||||||
✅ alpaca-30b-4bit PASS (21)
|
|
||||||
"""
|
|
||||||
|
|
@ -1,86 +0,0 @@
|
||||||
import asyncio
|
|
||||||
import json
|
|
||||||
import sys
|
|
||||||
|
|
||||||
try:
|
|
||||||
import websockets
|
|
||||||
except ImportError:
|
|
||||||
print("Websockets package not found. Make sure it's installed.")
|
|
||||||
|
|
||||||
# For local streaming, the websockets are hosted without ssl - ws://
|
|
||||||
HOST = 'localhost:5005'
|
|
||||||
URI = f'ws://{HOST}/api/v1/stream'
|
|
||||||
|
|
||||||
# For reverse-proxied streaming, the remote will likely host with ssl - wss://
|
|
||||||
# URI = 'wss://your-uri-here.trycloudflare.com/api/v1/stream'
|
|
||||||
|
|
||||||
|
|
||||||
async def run(context):
|
|
||||||
# Note: the selected defaults change from time to time.
|
|
||||||
request = {
|
|
||||||
'prompt': context,
|
|
||||||
'max_new_tokens': 250,
|
|
||||||
'auto_max_new_tokens': False,
|
|
||||||
'max_tokens_second': 0,
|
|
||||||
|
|
||||||
# Generation params. If 'preset' is set to different than 'None', the values
|
|
||||||
# in presets/preset-name.yaml are used instead of the individual numbers.
|
|
||||||
'preset': 'None',
|
|
||||||
'do_sample': True,
|
|
||||||
'temperature': 0.7,
|
|
||||||
'top_p': 0.1,
|
|
||||||
'typical_p': 1,
|
|
||||||
'epsilon_cutoff': 0, # In units of 1e-4
|
|
||||||
'eta_cutoff': 0, # In units of 1e-4
|
|
||||||
'tfs': 1,
|
|
||||||
'top_a': 0,
|
|
||||||
'repetition_penalty': 1.18,
|
|
||||||
'repetition_penalty_range': 0,
|
|
||||||
'top_k': 40,
|
|
||||||
'min_length': 0,
|
|
||||||
'no_repeat_ngram_size': 0,
|
|
||||||
'num_beams': 1,
|
|
||||||
'penalty_alpha': 0,
|
|
||||||
'length_penalty': 1,
|
|
||||||
'early_stopping': False,
|
|
||||||
'mirostat_mode': 0,
|
|
||||||
'mirostat_tau': 5,
|
|
||||||
'mirostat_eta': 0.1,
|
|
||||||
'grammar_string': '',
|
|
||||||
'guidance_scale': 1,
|
|
||||||
'negative_prompt': '',
|
|
||||||
|
|
||||||
'seed': -1,
|
|
||||||
'add_bos_token': True,
|
|
||||||
'truncation_length': 2048,
|
|
||||||
'ban_eos_token': False,
|
|
||||||
'custom_token_bans': '',
|
|
||||||
'skip_special_tokens': True,
|
|
||||||
'stopping_strings': []
|
|
||||||
}
|
|
||||||
|
|
||||||
async with websockets.connect(URI, ping_interval=None) as websocket:
|
|
||||||
await websocket.send(json.dumps(request))
|
|
||||||
|
|
||||||
yield context # Remove this if you just want to see the reply
|
|
||||||
|
|
||||||
while True:
|
|
||||||
incoming_data = await websocket.recv()
|
|
||||||
incoming_data = json.loads(incoming_data)
|
|
||||||
|
|
||||||
match incoming_data['event']:
|
|
||||||
case 'text_stream':
|
|
||||||
yield incoming_data['text']
|
|
||||||
case 'stream_end':
|
|
||||||
return
|
|
||||||
|
|
||||||
|
|
||||||
async def print_response_stream(prompt):
|
|
||||||
async for response in run(prompt):
|
|
||||||
print(response, end='')
|
|
||||||
sys.stdout.flush() # If we don't flush, we won't see tokens in realtime.
|
|
||||||
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
|
||||||
prompt = "In order to make homemade bread, follow these steps:\n1)"
|
|
||||||
asyncio.run(print_response_stream(prompt))
|
|
||||||
|
|
@ -1,63 +0,0 @@
|
||||||
import requests
|
|
||||||
|
|
||||||
# For local streaming, the websockets are hosted without ssl - http://
|
|
||||||
HOST = 'localhost:5000'
|
|
||||||
URI = f'http://{HOST}/api/v1/generate'
|
|
||||||
|
|
||||||
# For reverse-proxied streaming, the remote will likely host with ssl - https://
|
|
||||||
# URI = 'https://your-uri-here.trycloudflare.com/api/v1/generate'
|
|
||||||
|
|
||||||
|
|
||||||
def run(prompt):
|
|
||||||
request = {
|
|
||||||
'prompt': prompt,
|
|
||||||
'max_new_tokens': 250,
|
|
||||||
'auto_max_new_tokens': False,
|
|
||||||
'max_tokens_second': 0,
|
|
||||||
|
|
||||||
# Generation params. If 'preset' is set to different than 'None', the values
|
|
||||||
# in presets/preset-name.yaml are used instead of the individual numbers.
|
|
||||||
'preset': 'None',
|
|
||||||
'do_sample': True,
|
|
||||||
'temperature': 0.7,
|
|
||||||
'top_p': 0.1,
|
|
||||||
'typical_p': 1,
|
|
||||||
'epsilon_cutoff': 0, # In units of 1e-4
|
|
||||||
'eta_cutoff': 0, # In units of 1e-4
|
|
||||||
'tfs': 1,
|
|
||||||
'top_a': 0,
|
|
||||||
'repetition_penalty': 1.18,
|
|
||||||
'repetition_penalty_range': 0,
|
|
||||||
'top_k': 40,
|
|
||||||
'min_length': 0,
|
|
||||||
'no_repeat_ngram_size': 0,
|
|
||||||
'num_beams': 1,
|
|
||||||
'penalty_alpha': 0,
|
|
||||||
'length_penalty': 1,
|
|
||||||
'early_stopping': False,
|
|
||||||
'mirostat_mode': 0,
|
|
||||||
'mirostat_tau': 5,
|
|
||||||
'mirostat_eta': 0.1,
|
|
||||||
'grammar_string': '',
|
|
||||||
'guidance_scale': 1,
|
|
||||||
'negative_prompt': '',
|
|
||||||
|
|
||||||
'seed': -1,
|
|
||||||
'add_bos_token': True,
|
|
||||||
'truncation_length': 2048,
|
|
||||||
'ban_eos_token': False,
|
|
||||||
'custom_token_bans': '',
|
|
||||||
'skip_special_tokens': True,
|
|
||||||
'stopping_strings': []
|
|
||||||
}
|
|
||||||
|
|
||||||
response = requests.post(URI, json=request)
|
|
||||||
|
|
||||||
if response.status_code == 200:
|
|
||||||
result = response.json()['results'][0]['text']
|
|
||||||
print(prompt + result)
|
|
||||||
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
|
||||||
prompt = "In order to make homemade bread, follow these steps:\n1)"
|
|
||||||
run(prompt)
|
|
||||||
|
|
@ -1,8 +1,8 @@
|
||||||
#!/bin/bash
|
#!/usr/bin/env bash
|
||||||
|
|
||||||
cd "$(dirname "${BASH_SOURCE[0]}")"
|
cd "$(dirname "${BASH_SOURCE[0]}")"
|
||||||
|
|
||||||
if [[ "$(pwd)" =~ " " ]]; then echo This script relies on Miniconda which can not be silently installed under a path with spaces. && exit; fi
|
if [[ "$(pwd)" =~ " " ]]; then echo This script relies on Miniforge which can not be silently installed under a path with spaces. && exit; fi
|
||||||
|
|
||||||
# deactivate existing conda envs as needed to avoid conflicts
|
# deactivate existing conda envs as needed to avoid conflicts
|
||||||
{ conda deactivate && conda deactivate && conda deactivate; } 2> /dev/null
|
{ conda deactivate && conda deactivate && conda deactivate; } 2> /dev/null
|
||||||
|
|
|
||||||
|
|
@ -2,7 +2,7 @@
|
||||||
|
|
||||||
cd "$(dirname "${BASH_SOURCE[0]}")"
|
cd "$(dirname "${BASH_SOURCE[0]}")"
|
||||||
|
|
||||||
if [[ "$(pwd)" =~ " " ]]; then echo This script relies on Miniconda which can not be silently installed under a path with spaces. && exit; fi
|
if [[ "$(pwd)" =~ " " ]]; then echo This script relies on Miniforge which can not be silently installed under a path with spaces. && exit; fi
|
||||||
|
|
||||||
# deactivate existing conda envs as needed to avoid conflicts
|
# deactivate existing conda envs as needed to avoid conflicts
|
||||||
{ conda deactivate && conda deactivate && conda deactivate; } 2> /dev/null
|
{ conda deactivate && conda deactivate && conda deactivate; } 2> /dev/null
|
||||||
|
|
|
||||||
|
|
@ -4,7 +4,7 @@ cd /D "%~dp0"
|
||||||
|
|
||||||
set PATH=%PATH%;%SystemRoot%\system32
|
set PATH=%PATH%;%SystemRoot%\system32
|
||||||
|
|
||||||
echo "%CD%"| findstr /C:" " >nul && echo This script relies on Miniconda which can not be silently installed under a path with spaces. && goto end
|
echo "%CD%"| findstr /C:" " >nul && echo This script relies on Miniforge which can not be silently installed under a path with spaces. && goto end
|
||||||
|
|
||||||
@rem fix failed install when installing to a separate drive
|
@rem fix failed install when installing to a separate drive
|
||||||
set TMP=%cd%\installer_files
|
set TMP=%cd%\installer_files
|
||||||
|
|
@ -21,11 +21,12 @@ set INSTALL_ENV_DIR=%cd%\installer_files\env
|
||||||
set PYTHONNOUSERSITE=1
|
set PYTHONNOUSERSITE=1
|
||||||
set PYTHONPATH=
|
set PYTHONPATH=
|
||||||
set PYTHONHOME=
|
set PYTHONHOME=
|
||||||
|
set PYTHONUTF8=1
|
||||||
set "CUDA_PATH=%INSTALL_ENV_DIR%"
|
set "CUDA_PATH=%INSTALL_ENV_DIR%"
|
||||||
set "CUDA_HOME=%CUDA_PATH%"
|
set "CUDA_HOME=%CUDA_PATH%"
|
||||||
|
|
||||||
@rem activate installer env
|
@rem activate installer env
|
||||||
call "%CONDA_ROOT_PREFIX%\condabin\conda.bat" activate "%INSTALL_ENV_DIR%" || ( echo. && echo Miniconda hook not found. && goto end )
|
call "%CONDA_ROOT_PREFIX%\condabin\conda.bat" activate "%INSTALL_ENV_DIR%" || ( echo. && echo Miniforge hook not found. && goto end )
|
||||||
|
|
||||||
@rem enter commands
|
@rem enter commands
|
||||||
cmd /k "%*"
|
cmd /k "%*"
|
||||||
|
|
|
||||||
11
cmd_wsl.bat
11
cmd_wsl.bat
|
|
@ -1,11 +0,0 @@
|
||||||
@echo off
|
|
||||||
|
|
||||||
cd /D "%~dp0"
|
|
||||||
|
|
||||||
set PATH=%PATH%;%SystemRoot%\system32
|
|
||||||
|
|
||||||
@rem sed -i 's/\x0D$//' ./wsl.sh converts newlines to unix format in the wsl script
|
|
||||||
call wsl -e bash -lic "sed -i 's/\x0D$//' ./wsl.sh; source ./wsl.sh cmd"
|
|
||||||
|
|
||||||
:end
|
|
||||||
pause
|
|
||||||
|
|
@ -1,38 +0,0 @@
|
||||||
'''
|
|
||||||
|
|
||||||
Converts a transformers model to safetensors format and shards it.
|
|
||||||
|
|
||||||
This makes it faster to load (because of safetensors) and lowers its RAM usage
|
|
||||||
while loading (because of sharding).
|
|
||||||
|
|
||||||
Based on the original script by 81300:
|
|
||||||
|
|
||||||
https://gist.github.com/81300/fe5b08bff1cba45296a829b9d6b0f303
|
|
||||||
|
|
||||||
'''
|
|
||||||
|
|
||||||
import argparse
|
|
||||||
from pathlib import Path
|
|
||||||
|
|
||||||
import torch
|
|
||||||
from transformers import AutoModelForCausalLM, AutoTokenizer
|
|
||||||
|
|
||||||
parser = argparse.ArgumentParser(formatter_class=lambda prog: argparse.HelpFormatter(prog, max_help_position=54))
|
|
||||||
parser.add_argument('MODEL', type=str, default=None, nargs='?', help="Path to the input model.")
|
|
||||||
parser.add_argument('--output', type=str, default=None, help='Path to the output folder (default: models/{model_name}_safetensors).')
|
|
||||||
parser.add_argument("--max-shard-size", type=str, default="2GB", help="Maximum size of a shard in GB or MB (default: %(default)s).")
|
|
||||||
parser.add_argument('--bf16', action='store_true', help='Load the model with bfloat16 precision. Requires NVIDIA Ampere GPU.')
|
|
||||||
args = parser.parse_args()
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
|
||||||
path = Path(args.MODEL)
|
|
||||||
model_name = path.name
|
|
||||||
|
|
||||||
print(f"Loading {model_name}...")
|
|
||||||
model = AutoModelForCausalLM.from_pretrained(path, low_cpu_mem_usage=True, torch_dtype=torch.bfloat16 if args.bf16 else torch.float16)
|
|
||||||
tokenizer = AutoTokenizer.from_pretrained(path)
|
|
||||||
|
|
||||||
out_folder = args.output or Path(f"models/{model_name}_safetensors")
|
|
||||||
print(f"Saving the converted model to {out_folder} with a maximum shard size of {args.max_shard_size}...")
|
|
||||||
model.save_pretrained(out_folder, max_shard_size=args.max_shard_size, safe_serialization=True)
|
|
||||||
tokenizer.save_pretrained(out_folder)
|
|
||||||
BIN
css/Inter/Inter-Italic-VariableFont_opsz,wght.ttf
Normal file
BIN
css/Inter/Inter-Italic-VariableFont_opsz,wght.ttf
Normal file
Binary file not shown.
BIN
css/Inter/Inter-VariableFont_opsz,wght.ttf
Normal file
BIN
css/Inter/Inter-VariableFont_opsz,wght.ttf
Normal file
Binary file not shown.
129
css/chat_style-Dark.css
Normal file
129
css/chat_style-Dark.css
Normal file
|
|
@ -0,0 +1,129 @@
|
||||||
|
.message {
|
||||||
|
display: grid;
|
||||||
|
align-items: start;
|
||||||
|
grid-template-columns: 60px minmax(0, 1fr);
|
||||||
|
width: min(100%, calc(724px + 60px));
|
||||||
|
padding-bottom: 22px;
|
||||||
|
padding-top: 6px;
|
||||||
|
font-size: 18px;
|
||||||
|
font-family: Roboto, Arial, sans-serif; /* Modern font */
|
||||||
|
line-height: 1.5;
|
||||||
|
}
|
||||||
|
|
||||||
|
.circle-you,
|
||||||
|
.circle-bot {
|
||||||
|
background-color: #2b2b2b; /* Darker background for circles */
|
||||||
|
border-radius: 50%; /* Perfect circle */
|
||||||
|
border: 1px solid #4a90e2; /* Soft blue border */
|
||||||
|
box-shadow: 0 4px 8px rgb(0 0 0 / 50%); /* Soft shadow for depth */
|
||||||
|
}
|
||||||
|
|
||||||
|
.circle-bot img,
|
||||||
|
.circle-you img {
|
||||||
|
border-radius: 50%; /* Make images circular */
|
||||||
|
width: 100%;
|
||||||
|
height: 100%;
|
||||||
|
object-fit: cover;
|
||||||
|
}
|
||||||
|
|
||||||
|
.circle-you, .circle-bot {
|
||||||
|
width: 64px; /* Smaller size for modern look */
|
||||||
|
height: 64px;
|
||||||
|
}
|
||||||
|
|
||||||
|
.text {
|
||||||
|
padding-left: 12px; /* Reduced padding for a cleaner layout */
|
||||||
|
color: #f0f0f0; /* Light text color for readability */
|
||||||
|
}
|
||||||
|
|
||||||
|
.text p {
|
||||||
|
margin-top: 2px;
|
||||||
|
}
|
||||||
|
|
||||||
|
.username {
|
||||||
|
padding-left: 10px;
|
||||||
|
font-size: 20px;
|
||||||
|
font-weight: bold;
|
||||||
|
color: #e0e0e0; /* Light gray text */
|
||||||
|
transition: color 0.3s ease; /* Smooth color transition */
|
||||||
|
}
|
||||||
|
|
||||||
|
.username:hover {
|
||||||
|
color: #4a90e2; /* Blue color on hover */
|
||||||
|
}
|
||||||
|
|
||||||
|
.message-body {
|
||||||
|
position: relative;
|
||||||
|
border: 1px solid rgb(255 255 255 / 10%); /* Soft white border */
|
||||||
|
border-radius: 8px; /* Slightly rounded corners */
|
||||||
|
padding: 15px;
|
||||||
|
background: #1e1e1e; /* Dark background */
|
||||||
|
box-shadow: 0 4px 10px rgb(0 0 0 / 30%); /* Subtle shadow for depth */
|
||||||
|
transition: background 0.3s ease; /* Smooth transition for background */
|
||||||
|
}
|
||||||
|
|
||||||
|
.message-body:hover {
|
||||||
|
background: #252525; /* Slightly lighter on hover */
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Adds 2 extra lines at the top and bottom of the message */
|
||||||
|
.message-body::before,
|
||||||
|
.message-body::after {
|
||||||
|
content: "";
|
||||||
|
position: absolute;
|
||||||
|
left: 10px;
|
||||||
|
right: 10px;
|
||||||
|
height: 1px;
|
||||||
|
background-color: rgb(255 255 255 / 5%); /* Faded lines for subtle separation */
|
||||||
|
}
|
||||||
|
|
||||||
|
.message-body::before {
|
||||||
|
top: 4px;
|
||||||
|
}
|
||||||
|
|
||||||
|
.message-body::after {
|
||||||
|
bottom: 4px;
|
||||||
|
}
|
||||||
|
|
||||||
|
.message-body img {
|
||||||
|
max-width: 300px;
|
||||||
|
max-height: 300px;
|
||||||
|
border-radius: 10px; /* Rounded corners for images */
|
||||||
|
}
|
||||||
|
|
||||||
|
.message-body p {
|
||||||
|
color: #e0e0e0 !important; /* Light color for text */
|
||||||
|
}
|
||||||
|
|
||||||
|
.message-body p em {
|
||||||
|
color: #a6a6a6 !important; /* Softer gray for emphasized text */
|
||||||
|
}
|
||||||
|
|
||||||
|
@media screen and (width <= 688px) {
|
||||||
|
.message {
|
||||||
|
display: grid;
|
||||||
|
align-items: start;
|
||||||
|
grid-template-columns: 60px minmax(0, 1fr);
|
||||||
|
padding-bottom: 25px;
|
||||||
|
font-size: 15px;
|
||||||
|
font-family: Roboto, Arial, sans-serif; /* Modern font */
|
||||||
|
line-height: 1.5;
|
||||||
|
}
|
||||||
|
|
||||||
|
.circle-you, .circle-bot {
|
||||||
|
width: 40px; /* Smaller size for mobile */
|
||||||
|
height: 40px;
|
||||||
|
}
|
||||||
|
|
||||||
|
.text {
|
||||||
|
padding-left: 10px; /* Reduced padding for mobile */
|
||||||
|
}
|
||||||
|
|
||||||
|
.message-body p {
|
||||||
|
font-size: 14px !important;
|
||||||
|
}
|
||||||
|
|
||||||
|
.username {
|
||||||
|
font-size: 18px; /* Smaller username for mobile */
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
@ -2,8 +2,11 @@
|
||||||
|
|
||||||
.message {
|
.message {
|
||||||
display: grid;
|
display: grid;
|
||||||
|
align-items: start;
|
||||||
grid-template-columns: 60px minmax(0, 1fr);
|
grid-template-columns: 60px minmax(0, 1fr);
|
||||||
padding-bottom: 28px;
|
width: min(100%, calc(724px + 60px + 90px));
|
||||||
|
padding-bottom: 21px;
|
||||||
|
padding-top: 7px;
|
||||||
font-size: 18px;
|
font-size: 18px;
|
||||||
font-family: 'Noto Sans', Arial, sans-serif;
|
font-family: 'Noto Sans', Arial, sans-serif;
|
||||||
line-height: 1.428571429;
|
line-height: 1.428571429;
|
||||||
|
|
@ -33,7 +36,7 @@
|
||||||
.text {
|
.text {
|
||||||
/* Change this to move the message box further left or right depending on the size of your profile pic */
|
/* Change this to move the message box further left or right depending on the size of your profile pic */
|
||||||
padding-left: 90px;
|
padding-left: 90px;
|
||||||
text-shadow: 2px 2px 2px rgb(0, 0, 0, 0.4);
|
text-shadow: 2px 2px 2px rgb(0 0 0 / 40%);
|
||||||
}
|
}
|
||||||
|
|
||||||
.text p {
|
.text p {
|
||||||
|
|
@ -44,37 +47,37 @@
|
||||||
padding-left: 10px;
|
padding-left: 10px;
|
||||||
font-size: 22px;
|
font-size: 22px;
|
||||||
font-weight: bold;
|
font-weight: bold;
|
||||||
border-top: 1px solid rgb(51, 64, 90);
|
border-top: 1px solid rgb(51 64 90);
|
||||||
padding: 3px;
|
padding: 3px;
|
||||||
}
|
}
|
||||||
|
|
||||||
.message-body {
|
.message-body {
|
||||||
position: relative;
|
position: relative;
|
||||||
border-radius: 1rem;
|
border: 1px solid rgb(255 255 255 / 45.9%);
|
||||||
border: 1px solid rgba(255, 255, 255, 0.459);
|
|
||||||
border-radius: 10px;
|
border-radius: 10px;
|
||||||
padding: 10px;
|
padding: 10px;
|
||||||
padding-top: 5px;
|
padding-top: 5px;
|
||||||
|
|
||||||
/* Message gradient background color - remove the line bellow if you don't want a background color or gradient */
|
/* Message gradient background color - remove the line bellow if you don't want a background color or gradient */
|
||||||
background: linear-gradient(to bottom, #171730, #1b263f);
|
background: linear-gradient(to bottom, #171730, #1b263f);
|
||||||
}
|
}
|
||||||
|
|
||||||
/* Adds 2 extra lines at the top and bottom of the message */
|
/* Adds 2 extra lines at the top and bottom of the message */
|
||||||
.message-body:before,
|
.message-body::before,
|
||||||
.message-body:after {
|
.message-body::after {
|
||||||
content: "";
|
content: "";
|
||||||
position: absolute;
|
position: absolute;
|
||||||
left: 10px;
|
left: 10px;
|
||||||
right: 10px;
|
right: 10px;
|
||||||
height: 1px;
|
height: 1px;
|
||||||
background-color: rgba(255, 255, 255, 0.13);
|
background-color: rgb(255 255 255 / 13%);
|
||||||
}
|
}
|
||||||
|
|
||||||
.message-body:before {
|
.message-body::before {
|
||||||
top: 6px;
|
top: 6px;
|
||||||
}
|
}
|
||||||
|
|
||||||
.message-body:after {
|
.message-body::after {
|
||||||
bottom: 6px;
|
bottom: 6px;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -84,21 +87,21 @@
|
||||||
border-radius: 20px;
|
border-radius: 20px;
|
||||||
}
|
}
|
||||||
|
|
||||||
.message-body p {
|
.message-body p, .message-body li {
|
||||||
margin-bottom: 0 !important;
|
|
||||||
font-size: 18px !important;
|
font-size: 18px !important;
|
||||||
line-height: 1.428571429 !important;
|
color: rgb(243 244 246) !important;
|
||||||
color: rgb(243, 244, 246) !important;
|
text-shadow: 2px 2px 2px rgb(0 0 0);
|
||||||
text-shadow: 2px 2px 2px rgb(0, 0, 0);
|
font-weight: 500;
|
||||||
}
|
}
|
||||||
|
|
||||||
.message-body p em {
|
.message-body p em {
|
||||||
color: rgb(138, 138, 138) !important;
|
color: rgb(138 138 138) !important;
|
||||||
}
|
}
|
||||||
|
|
||||||
@media screen and (max-width: 688px) {
|
@media screen and (width <= 688px) {
|
||||||
.message {
|
.message {
|
||||||
display: grid;
|
display: grid;
|
||||||
|
align-items: start;
|
||||||
grid-template-columns: 60px minmax(0, 1fr);
|
grid-template-columns: 60px minmax(0, 1fr);
|
||||||
padding-bottom: 25px;
|
padding-bottom: 25px;
|
||||||
font-size: 15px;
|
font-size: 15px;
|
||||||
|
|
@ -120,10 +123,10 @@
|
||||||
}
|
}
|
||||||
|
|
||||||
.text {
|
.text {
|
||||||
padding-left: 0px;
|
padding-left: 0;
|
||||||
}
|
}
|
||||||
|
|
||||||
.message-body p {
|
.message-body p, .message-body li {
|
||||||
font-size: 16px !important;
|
font-size: 16px !important;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -16,6 +16,8 @@
|
||||||
}
|
}
|
||||||
|
|
||||||
.message {
|
.message {
|
||||||
padding-bottom: 30px;
|
padding-bottom: 1.5em;
|
||||||
|
padding-top: 0.5em;
|
||||||
grid-template-columns: 70px minmax(0, 1fr);
|
grid-template-columns: 70px minmax(0, 1fr);
|
||||||
|
width: min(100%, calc(724px + 70px));
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -1,23 +1,31 @@
|
||||||
.message {
|
.message {
|
||||||
display: grid;
|
display: grid;
|
||||||
|
align-items: start;
|
||||||
grid-template-columns: 60px minmax(0, 1fr);
|
grid-template-columns: 60px minmax(0, 1fr);
|
||||||
padding-bottom: 25px;
|
width: min(100%, calc(724px + 60px));
|
||||||
|
padding-bottom: 1.5em;
|
||||||
|
padding-top: 0.5em;
|
||||||
font-size: 15px;
|
font-size: 15px;
|
||||||
font-family: 'Noto Sans', Helvetica, Arial, sans-serif;
|
font-family: 'Noto Sans', Helvetica, Arial, sans-serif;
|
||||||
line-height: 23px !important;
|
line-height: 22.5px !important;
|
||||||
|
}
|
||||||
|
|
||||||
|
.message-body {
|
||||||
|
margin-top: 3px;
|
||||||
|
font-size: 15px !important;
|
||||||
}
|
}
|
||||||
|
|
||||||
.circle-you {
|
.circle-you {
|
||||||
width: 50px;
|
width: 50px;
|
||||||
height: 50px;
|
height: 50px;
|
||||||
background-color: rgb(238, 78, 59);
|
background-color: rgb(238 78 59);
|
||||||
border-radius: 50%;
|
border-radius: 50%;
|
||||||
}
|
}
|
||||||
|
|
||||||
.circle-bot {
|
.circle-bot {
|
||||||
width: 50px;
|
width: 50px;
|
||||||
height: 50px;
|
height: 50px;
|
||||||
background-color: rgb(59, 78, 244);
|
background-color: rgb(59 78 244);
|
||||||
border-radius: 50%;
|
border-radius: 50%;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -29,10 +37,6 @@
|
||||||
object-fit: cover;
|
object-fit: cover;
|
||||||
}
|
}
|
||||||
|
|
||||||
.text p {
|
|
||||||
margin-top: 5px;
|
|
||||||
}
|
|
||||||
|
|
||||||
.username {
|
.username {
|
||||||
font-weight: bold;
|
font-weight: bold;
|
||||||
}
|
}
|
||||||
|
|
@ -43,17 +47,15 @@
|
||||||
border-radius: 20px;
|
border-radius: 20px;
|
||||||
}
|
}
|
||||||
|
|
||||||
.message-body p {
|
.message-body p, .message-body li {
|
||||||
margin-bottom: 0 !important;
|
font-weight: 500;
|
||||||
font-size: 15px !important;
|
|
||||||
line-height: 23px !important;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
.dark .message-body p em {
|
.dark .message-body p em {
|
||||||
color: rgb(138, 138, 138) !important;
|
color: rgb(138 138 138) !important;
|
||||||
}
|
}
|
||||||
|
|
||||||
.message-body p em {
|
.message-body p em {
|
||||||
color: rgb(110, 110, 110) !important;
|
color: rgb(110 110 110) !important;
|
||||||
font-weight: 500;
|
font-weight: 500;
|
||||||
}
|
}
|
||||||
|
|
@ -1,5 +1,7 @@
|
||||||
.message {
|
.message {
|
||||||
padding-bottom: 25px;
|
width: min(100%, calc(724px + 60px));
|
||||||
|
padding-bottom: 22px;
|
||||||
|
padding-top: 3px;
|
||||||
font-size: 15px;
|
font-size: 15px;
|
||||||
font-family: 'Noto Sans', Helvetica, Arial, sans-serif;
|
font-family: 'Noto Sans', Helvetica, Arial, sans-serif;
|
||||||
line-height: 1.428571429;
|
line-height: 1.428571429;
|
||||||
|
|
@ -8,14 +10,14 @@
|
||||||
.circle-you {
|
.circle-you {
|
||||||
width: 50px;
|
width: 50px;
|
||||||
height: 50px;
|
height: 50px;
|
||||||
background-color: rgb(238, 78, 59);
|
background-color: rgb(238 78 59);
|
||||||
border-radius: 50%;
|
border-radius: 50%;
|
||||||
}
|
}
|
||||||
|
|
||||||
.circle-bot {
|
.circle-bot {
|
||||||
width: 50px;
|
width: 50px;
|
||||||
height: 50px;
|
height: 50px;
|
||||||
background-color: rgb(59, 78, 244);
|
background-color: rgb(59 78 244);
|
||||||
border-radius: 50%;
|
border-radius: 50%;
|
||||||
float: left;
|
float: left;
|
||||||
margin-right: 10px;
|
margin-right: 10px;
|
||||||
|
|
@ -47,7 +49,7 @@
|
||||||
|
|
||||||
.circle-you + .text {
|
.circle-you + .text {
|
||||||
float: right;
|
float: right;
|
||||||
background-color: rgb(0, 132, 255);
|
background-color: rgb(0 132 255);
|
||||||
margin-right: 10px;
|
margin-right: 10px;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -59,8 +61,10 @@
|
||||||
text-align: right;
|
text-align: right;
|
||||||
}
|
}
|
||||||
|
|
||||||
.dark .circle-bot + .text div, .dark .circle-bot + .text * {
|
.dark .circle-bot + .text div, .dark .circle-bot + .text *,
|
||||||
color: #000;
|
.dark .chat .message .circle-bot + .text .message-body :is(h1, h2, h3, h4, h5, h6),
|
||||||
|
.dark .chat .message .circle-bot + .text .message-body a {
|
||||||
|
color: #000 !important;
|
||||||
}
|
}
|
||||||
|
|
||||||
.text {
|
.text {
|
||||||
|
|
@ -75,25 +79,29 @@
|
||||||
font-weight: bold;
|
font-weight: bold;
|
||||||
}
|
}
|
||||||
|
|
||||||
.message-body {
|
|
||||||
}
|
|
||||||
|
|
||||||
.message-body img {
|
.message-body img {
|
||||||
max-width: 300px;
|
max-width: 300px;
|
||||||
max-height: 300px;
|
max-height: 300px;
|
||||||
border-radius: 20px;
|
border-radius: 20px;
|
||||||
}
|
}
|
||||||
|
|
||||||
.message-body p {
|
.message-body p, .message-body li {
|
||||||
margin-bottom: 0 !important;
|
|
||||||
font-size: 15px !important;
|
font-size: 15px !important;
|
||||||
line-height: 1.428571429 !important;
|
font-weight: 500;
|
||||||
}
|
}
|
||||||
|
|
||||||
.dark .message-body p em {
|
.dark .message-body p em {
|
||||||
color: rgb(138, 138, 138) !important;
|
color: rgb(138 138 138) !important;
|
||||||
}
|
}
|
||||||
|
|
||||||
.message-body p em {
|
.message-body p em {
|
||||||
color: rgb(110, 110, 110) !important;
|
color: rgb(110 110 110) !important;
|
||||||
|
}
|
||||||
|
|
||||||
|
.editing-textarea {
|
||||||
|
width: max(30rem) !important;
|
||||||
|
}
|
||||||
|
|
||||||
|
.circle-you + .text .edit-control-button, .circle-you + .text .editing-textarea {
|
||||||
|
color: #000 !important;
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -1,55 +1,97 @@
|
||||||
.message {
|
.message {
|
||||||
padding-bottom: 25px;
|
display: block;
|
||||||
|
width: min(100%, 724px);
|
||||||
|
padding-top: 0;
|
||||||
|
padding-bottom: 21px;
|
||||||
font-size: 15px;
|
font-size: 15px;
|
||||||
font-family: 'Noto Sans', Helvetica, Arial, sans-serif;
|
font-family: 'Noto Sans', Helvetica, Arial, sans-serif;
|
||||||
line-height: 1.428571429;
|
line-height: 1.428571429;
|
||||||
|
grid-template-columns: none;
|
||||||
}
|
}
|
||||||
|
|
||||||
.text-you {
|
.circle-you, .circle-bot {
|
||||||
|
display: none;
|
||||||
|
}
|
||||||
|
|
||||||
|
.text {
|
||||||
|
max-width: 65%;
|
||||||
|
border-radius: 18px;
|
||||||
|
padding: 12px 16px;
|
||||||
|
margin-bottom: 8px;
|
||||||
|
clear: both;
|
||||||
|
box-shadow: 0 1px 2px rgb(0 0 0 / 10%);
|
||||||
|
}
|
||||||
|
|
||||||
|
.username {
|
||||||
|
font-weight: 600;
|
||||||
|
margin-bottom: 8px;
|
||||||
|
opacity: 0.65;
|
||||||
|
padding-left: 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* User messages - right aligned, WhatsApp green */
|
||||||
|
.circle-you + .text {
|
||||||
background-color: #d9fdd3;
|
background-color: #d9fdd3;
|
||||||
border-radius: 15px;
|
|
||||||
padding: 10px;
|
|
||||||
padding-top: 5px;
|
|
||||||
float: right;
|
float: right;
|
||||||
|
margin-left: auto;
|
||||||
|
margin-right: 8px;
|
||||||
}
|
}
|
||||||
|
|
||||||
.text-bot {
|
.circle-you + .text .username {
|
||||||
background-color: #f2f2f2;
|
display: none;
|
||||||
border-radius: 15px;
|
|
||||||
padding: 10px;
|
|
||||||
padding-top: 5px;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
.dark .text-you {
|
/* Bot messages - left aligned, white */
|
||||||
background-color: #005c4b;
|
.circle-bot + .text {
|
||||||
color: #111b21;
|
background-color: #fff;
|
||||||
|
float: left;
|
||||||
|
margin-right: auto;
|
||||||
|
margin-left: 8px;
|
||||||
|
border: 1px solid #e5e5e5;
|
||||||
}
|
}
|
||||||
|
|
||||||
.dark .text-bot {
|
.circle-bot + .text .message-actions {
|
||||||
background-color: #1f2937;
|
bottom: -25px !important;
|
||||||
color: #111b21;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
.text-bot p, .text-you p {
|
/* Dark theme colors */
|
||||||
margin-top: 5px;
|
.dark .circle-you + .text {
|
||||||
|
background-color: #144d37;
|
||||||
|
color: #e4e6ea;
|
||||||
|
box-shadow: 0 1px 2px rgb(0 0 0 / 30%);
|
||||||
|
}
|
||||||
|
|
||||||
|
.dark .circle-bot + .text {
|
||||||
|
background-color: #202c33;
|
||||||
|
color: #e4e6ea;
|
||||||
|
border: 1px solid #3c4043;
|
||||||
|
box-shadow: 0 1px 2px rgb(0 0 0 / 30%);
|
||||||
|
}
|
||||||
|
|
||||||
|
.dark .username {
|
||||||
|
opacity: 0.7;
|
||||||
}
|
}
|
||||||
|
|
||||||
.message-body img {
|
.message-body img {
|
||||||
max-width: 300px;
|
max-width: 300px;
|
||||||
max-height: 300px;
|
max-height: 300px;
|
||||||
border-radius: 20px;
|
border-radius: 12px;
|
||||||
}
|
}
|
||||||
|
|
||||||
.message-body p {
|
.message-body p, .message-body li {
|
||||||
margin-bottom: 0 !important;
|
|
||||||
font-size: 15px !important;
|
font-size: 15px !important;
|
||||||
line-height: 1.428571429 !important;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
.dark .message-body p em {
|
.dark .message-body p em {
|
||||||
color: rgb(138, 138, 138) !important;
|
color: rgb(170 170 170) !important;
|
||||||
}
|
}
|
||||||
|
|
||||||
.message-body p em {
|
.message-body p em {
|
||||||
color: rgb(110, 110, 110) !important;
|
color: rgb(100 100 100) !important;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/* Message actions positioning */
|
||||||
|
.message-actions {
|
||||||
|
margin-top: 8px;
|
||||||
|
}
|
||||||
|
|
||||||
|
|
|
||||||
111
css/highlightjs/github-dark.min.css
vendored
Normal file
111
css/highlightjs/github-dark.min.css
vendored
Normal file
|
|
@ -0,0 +1,111 @@
|
||||||
|
html body gradio-app .gradio-container pre code.hljs {
|
||||||
|
display: block;
|
||||||
|
overflow-x: auto;
|
||||||
|
padding: 1em
|
||||||
|
}
|
||||||
|
|
||||||
|
html body gradio-app .gradio-container code.hljs {
|
||||||
|
padding: 3px 5px
|
||||||
|
}
|
||||||
|
|
||||||
|
/*!
|
||||||
|
Theme: GitHub Dark
|
||||||
|
Description: Dark theme as seen on github.com
|
||||||
|
Author: github.com
|
||||||
|
Maintainer: @Hirse
|
||||||
|
Updated: 2021-05-15
|
||||||
|
|
||||||
|
Outdated base version: https://github.com/primer/github-syntax-dark
|
||||||
|
Current colors taken from GitHub's CSS
|
||||||
|
*/
|
||||||
|
html body gradio-app .gradio-container .hljs {
|
||||||
|
color: #c9d1d9;
|
||||||
|
background: #0d1117
|
||||||
|
}
|
||||||
|
|
||||||
|
html body gradio-app .gradio-container .hljs-doctag,
|
||||||
|
html body gradio-app .gradio-container .hljs-keyword,
|
||||||
|
html body gradio-app .gradio-container .hljs-meta .hljs-keyword,
|
||||||
|
html body gradio-app .gradio-container .hljs-template-tag,
|
||||||
|
html body gradio-app .gradio-container .hljs-template-variable,
|
||||||
|
html body gradio-app .gradio-container .hljs-type,
|
||||||
|
html body gradio-app .gradio-container .hljs-variable.language_ {
|
||||||
|
color: #ff7b72
|
||||||
|
}
|
||||||
|
|
||||||
|
html body gradio-app .gradio-container .hljs-title,
|
||||||
|
html body gradio-app .gradio-container .hljs-title.class_,
|
||||||
|
html body gradio-app .gradio-container .hljs-title.class_.inherited__,
|
||||||
|
html body gradio-app .gradio-container .hljs-title.function_ {
|
||||||
|
color: #d2a8ff
|
||||||
|
}
|
||||||
|
|
||||||
|
html body gradio-app .gradio-container .hljs-attr,
|
||||||
|
html body gradio-app .gradio-container .hljs-attribute,
|
||||||
|
html body gradio-app .gradio-container .hljs-literal,
|
||||||
|
html body gradio-app .gradio-container .hljs-meta,
|
||||||
|
html body gradio-app .gradio-container .hljs-number,
|
||||||
|
html body gradio-app .gradio-container .hljs-operator,
|
||||||
|
html body gradio-app .gradio-container .hljs-selector-attr,
|
||||||
|
html body gradio-app .gradio-container .hljs-selector-class,
|
||||||
|
html body gradio-app .gradio-container .hljs-selector-id,
|
||||||
|
html body gradio-app .gradio-container .hljs-variable {
|
||||||
|
color: #79c0ff
|
||||||
|
}
|
||||||
|
|
||||||
|
html body gradio-app .gradio-container .hljs-meta .hljs-string,
|
||||||
|
html body gradio-app .gradio-container .hljs-regexp,
|
||||||
|
html body gradio-app .gradio-container .hljs-string {
|
||||||
|
color: #a5d6ff
|
||||||
|
}
|
||||||
|
|
||||||
|
html body gradio-app .gradio-container .hljs-built_in,
|
||||||
|
html body gradio-app .gradio-container .hljs-symbol {
|
||||||
|
color: #ffa657
|
||||||
|
}
|
||||||
|
|
||||||
|
html body gradio-app .gradio-container .hljs-code,
|
||||||
|
html body gradio-app .gradio-container .hljs-comment,
|
||||||
|
html body gradio-app .gradio-container .hljs-formula {
|
||||||
|
color: #8b949e
|
||||||
|
}
|
||||||
|
|
||||||
|
html body gradio-app .gradio-container .hljs-name,
|
||||||
|
html body gradio-app .gradio-container .hljs-quote,
|
||||||
|
html body gradio-app .gradio-container .hljs-selector-pseudo,
|
||||||
|
html body gradio-app .gradio-container .hljs-selector-tag {
|
||||||
|
color: #7ee787
|
||||||
|
}
|
||||||
|
|
||||||
|
html body gradio-app .gradio-container .hljs-subst {
|
||||||
|
color: #c9d1d9
|
||||||
|
}
|
||||||
|
|
||||||
|
html body gradio-app .gradio-container .hljs-section {
|
||||||
|
color: #1f6feb;
|
||||||
|
font-weight: 700
|
||||||
|
}
|
||||||
|
|
||||||
|
html body gradio-app .gradio-container .hljs-bullet {
|
||||||
|
color: #f2cc60
|
||||||
|
}
|
||||||
|
|
||||||
|
html body gradio-app .gradio-container .hljs-emphasis {
|
||||||
|
color: #c9d1d9;
|
||||||
|
font-style: italic
|
||||||
|
}
|
||||||
|
|
||||||
|
html body gradio-app .gradio-container .hljs-strong {
|
||||||
|
color: #c9d1d9;
|
||||||
|
font-weight: 700
|
||||||
|
}
|
||||||
|
|
||||||
|
html body gradio-app .gradio-container .hljs-addition {
|
||||||
|
color: #aff5b4;
|
||||||
|
background-color: #033a16
|
||||||
|
}
|
||||||
|
|
||||||
|
html body gradio-app .gradio-container .hljs-deletion {
|
||||||
|
color: #ffdcd7;
|
||||||
|
background-color: #67060c
|
||||||
|
}
|
||||||
111
css/highlightjs/github.min.css
vendored
Normal file
111
css/highlightjs/github.min.css
vendored
Normal file
|
|
@ -0,0 +1,111 @@
|
||||||
|
html body gradio-app .gradio-container pre code.hljs {
|
||||||
|
display: block;
|
||||||
|
overflow-x: auto;
|
||||||
|
padding: 1em
|
||||||
|
}
|
||||||
|
|
||||||
|
html body gradio-app .gradio-container code.hljs {
|
||||||
|
padding: 3px 5px
|
||||||
|
}
|
||||||
|
|
||||||
|
/*!
|
||||||
|
Theme: GitHub
|
||||||
|
Description: Light theme as seen on github.com
|
||||||
|
Author: github.com
|
||||||
|
Maintainer: @Hirse
|
||||||
|
Updated: 2021-05-15
|
||||||
|
|
||||||
|
Outdated base version: https://github.com/primer/github-syntax-light
|
||||||
|
Current colors taken from GitHub's CSS
|
||||||
|
*/
|
||||||
|
html body gradio-app .gradio-container .hljs {
|
||||||
|
color: #24292e;
|
||||||
|
background: #fff
|
||||||
|
}
|
||||||
|
|
||||||
|
html body gradio-app .gradio-container .hljs-doctag,
|
||||||
|
html body gradio-app .gradio-container .hljs-keyword,
|
||||||
|
html body gradio-app .gradio-container .hljs-meta .hljs-keyword,
|
||||||
|
html body gradio-app .gradio-container .hljs-template-tag,
|
||||||
|
html body gradio-app .gradio-container .hljs-template-variable,
|
||||||
|
html body gradio-app .gradio-container .hljs-type,
|
||||||
|
html body gradio-app .gradio-container .hljs-variable.language_ {
|
||||||
|
color: #d73a49
|
||||||
|
}
|
||||||
|
|
||||||
|
html body gradio-app .gradio-container .hljs-title,
|
||||||
|
html body gradio-app .gradio-container .hljs-title.class_,
|
||||||
|
html body gradio-app .gradio-container .hljs-title.class_.inherited__,
|
||||||
|
html body gradio-app .gradio-container .hljs-title.function_ {
|
||||||
|
color: #6f42c1
|
||||||
|
}
|
||||||
|
|
||||||
|
html body gradio-app .gradio-container .hljs-attr,
|
||||||
|
html body gradio-app .gradio-container .hljs-attribute,
|
||||||
|
html body gradio-app .gradio-container .hljs-literal,
|
||||||
|
html body gradio-app .gradio-container .hljs-meta,
|
||||||
|
html body gradio-app .gradio-container .hljs-number,
|
||||||
|
html body gradio-app .gradio-container .hljs-operator,
|
||||||
|
html body gradio-app .gradio-container .hljs-selector-attr,
|
||||||
|
html body gradio-app .gradio-container .hljs-selector-class,
|
||||||
|
html body gradio-app .gradio-container .hljs-selector-id,
|
||||||
|
html body gradio-app .gradio-container .hljs-variable {
|
||||||
|
color: #005cc5
|
||||||
|
}
|
||||||
|
|
||||||
|
html body gradio-app .gradio-container .hljs-meta .hljs-string,
|
||||||
|
html body gradio-app .gradio-container .hljs-regexp,
|
||||||
|
html body gradio-app .gradio-container .hljs-string {
|
||||||
|
color: #032f62
|
||||||
|
}
|
||||||
|
|
||||||
|
html body gradio-app .gradio-container .hljs-built_in,
|
||||||
|
html body gradio-app .gradio-container .hljs-symbol {
|
||||||
|
color: #e36209
|
||||||
|
}
|
||||||
|
|
||||||
|
html body gradio-app .gradio-container .hljs-code,
|
||||||
|
html body gradio-app .gradio-container .hljs-comment,
|
||||||
|
html body gradio-app .gradio-container .hljs-formula {
|
||||||
|
color: #6a737d
|
||||||
|
}
|
||||||
|
|
||||||
|
html body gradio-app .gradio-container .hljs-name,
|
||||||
|
html body gradio-app .gradio-container .hljs-quote,
|
||||||
|
html body gradio-app .gradio-container .hljs-selector-pseudo,
|
||||||
|
html body gradio-app .gradio-container .hljs-selector-tag {
|
||||||
|
color: #22863a
|
||||||
|
}
|
||||||
|
|
||||||
|
html body gradio-app .gradio-container .hljs-subst {
|
||||||
|
color: #24292e
|
||||||
|
}
|
||||||
|
|
||||||
|
html body gradio-app .gradio-container .hljs-section {
|
||||||
|
color: #005cc5;
|
||||||
|
font-weight: 700
|
||||||
|
}
|
||||||
|
|
||||||
|
html body gradio-app .gradio-container .hljs-bullet {
|
||||||
|
color: #735c0f
|
||||||
|
}
|
||||||
|
|
||||||
|
html body gradio-app .gradio-container .hljs-emphasis {
|
||||||
|
color: #24292e;
|
||||||
|
font-style: italic
|
||||||
|
}
|
||||||
|
|
||||||
|
html body gradio-app .gradio-container .hljs-strong {
|
||||||
|
color: #24292e;
|
||||||
|
font-weight: 700
|
||||||
|
}
|
||||||
|
|
||||||
|
html body gradio-app .gradio-container .hljs-addition {
|
||||||
|
color: #22863a;
|
||||||
|
background-color: #f0fff4
|
||||||
|
}
|
||||||
|
|
||||||
|
html body gradio-app .gradio-container .hljs-deletion {
|
||||||
|
color: #b31d28;
|
||||||
|
background-color: #ffeef0
|
||||||
|
}
|
||||||
1
css/highlightjs/highlightjs-copy.min.css
vendored
Normal file
1
css/highlightjs/highlightjs-copy.min.css
vendored
Normal file
|
|
@ -0,0 +1 @@
|
||||||
|
.hljs-copy-wrapper{position:relative;overflow:hidden}.hljs-copy-wrapper:hover .hljs-copy-button,.hljs-copy-button:focus{transform:translateX(0)}.hljs-copy-button{position:absolute;transform:translateX(calc(100% + 1.125em));top:1em;right:1em;width:2rem;height:2rem;text-indent:-9999px;color:#fff;border-radius:.25rem;border:1px solid #ffffff22;background-color:#2d2b57;background-color:var(--hljs-theme-background);background-image:url('data:image/svg+xml;utf-8,<svg width="16" height="16" viewBox="0 0 24 24" fill="none" xmlns="http://www.w3.org/2000/svg"><path fill-rule="evenodd" clip-rule="evenodd" d="M6 5C5.73478 5 5.48043 5.10536 5.29289 5.29289C5.10536 5.48043 5 5.73478 5 6V20C5 20.2652 5.10536 20.5196 5.29289 20.7071C5.48043 20.8946 5.73478 21 6 21H18C18.2652 21 18.5196 20.8946 18.7071 20.7071C18.8946 20.5196 19 20.2652 19 20V6C19 5.73478 18.8946 5.48043 18.7071 5.29289C18.5196 5.10536 18.2652 5 18 5H16C15.4477 5 15 4.55228 15 4C15 3.44772 15.4477 3 16 3H18C18.7956 3 19.5587 3.31607 20.1213 3.87868C20.6839 4.44129 21 5.20435 21 6V20C21 20.7957 20.6839 21.5587 20.1213 22.1213C19.5587 22.6839 18.7957 23 18 23H6C5.20435 23 4.44129 22.6839 3.87868 22.1213C3.31607 21.5587 3 20.7957 3 20V6C3 5.20435 3.31607 4.44129 3.87868 3.87868C4.44129 3.31607 5.20435 3 6 3H8C8.55228 3 9 3.44772 9 4C9 4.55228 8.55228 5 8 5H6Z" fill="white"/><path fill-rule="evenodd" clip-rule="evenodd" d="M7 3C7 1.89543 7.89543 1 9 1H15C16.1046 1 17 1.89543 17 3V5C17 6.10457 16.1046 7 15 7H9C7.89543 7 7 6.10457 7 5V3ZM15 3H9V5H15V3Z" fill="white"/></svg>');background-repeat:no-repeat;background-position:center;transition:background-color 200ms ease,transform 200ms ease-out}.hljs-copy-button:hover{border-color:#ffffff44}.hljs-copy-button:active{border-color:#ffffff66}.hljs-copy-button[data-copied="true"]{text-indent:0;width:auto;background-image:none}@media(prefers-reduced-motion){.hljs-copy-button{transition:none}}.hljs-copy-alert{clip:rect(0 0 0 0);clip-path:inset(50%);height:1px;overflow:hidden;position:absolute;white-space:nowrap;width:1px}
|
||||||
|
|
@ -1,104 +0,0 @@
|
||||||
#parent #container {
|
|
||||||
background-color: #eef2ff;
|
|
||||||
padding: 17px;
|
|
||||||
}
|
|
||||||
|
|
||||||
#parent #container .reply {
|
|
||||||
background-color: rgb(214, 218, 240);
|
|
||||||
border-bottom-color: rgb(183, 197, 217);
|
|
||||||
border-bottom-style: solid;
|
|
||||||
border-bottom-width: 1px;
|
|
||||||
border-image-outset: 0;
|
|
||||||
border-image-repeat: stretch;
|
|
||||||
border-image-slice: 100%;
|
|
||||||
border-image-source: none;
|
|
||||||
border-image-width: 1;
|
|
||||||
border-left-color: rgb(0, 0, 0);
|
|
||||||
border-left-style: none;
|
|
||||||
border-left-width: 0px;
|
|
||||||
border-right-color: rgb(183, 197, 217);
|
|
||||||
border-right-style: solid;
|
|
||||||
border-right-width: 1px;
|
|
||||||
border-top-color: rgb(0, 0, 0);
|
|
||||||
border-top-style: none;
|
|
||||||
border-top-width: 0px;
|
|
||||||
color: rgb(0, 0, 0);
|
|
||||||
display: table;
|
|
||||||
font-family: arial, helvetica, sans-serif;
|
|
||||||
font-size: 13.3333px;
|
|
||||||
margin-bottom: 4px;
|
|
||||||
margin-left: 0px;
|
|
||||||
margin-right: 0px;
|
|
||||||
margin-top: 4px;
|
|
||||||
overflow-x: hidden;
|
|
||||||
overflow-y: hidden;
|
|
||||||
padding-bottom: 4px;
|
|
||||||
padding-left: 2px;
|
|
||||||
padding-right: 2px;
|
|
||||||
padding-top: 4px;
|
|
||||||
}
|
|
||||||
|
|
||||||
#parent #container .number {
|
|
||||||
color: rgb(0, 0, 0);
|
|
||||||
font-family: arial, helvetica, sans-serif;
|
|
||||||
font-size: 13.3333px;
|
|
||||||
width: 342.65px;
|
|
||||||
margin-right: 7px;
|
|
||||||
}
|
|
||||||
|
|
||||||
#parent #container .op {
|
|
||||||
color: rgb(0, 0, 0);
|
|
||||||
font-family: arial, helvetica, sans-serif;
|
|
||||||
font-size: 13.3333px;
|
|
||||||
margin-bottom: 8px;
|
|
||||||
margin-left: 0px;
|
|
||||||
margin-right: 0px;
|
|
||||||
margin-top: 4px;
|
|
||||||
overflow-x: hidden;
|
|
||||||
overflow-y: hidden;
|
|
||||||
}
|
|
||||||
|
|
||||||
#parent #container .op blockquote {
|
|
||||||
margin-left: 0px !important;
|
|
||||||
}
|
|
||||||
|
|
||||||
#parent #container .name {
|
|
||||||
color: rgb(17, 119, 67);
|
|
||||||
font-family: arial, helvetica, sans-serif;
|
|
||||||
font-size: 13.3333px;
|
|
||||||
font-weight: 700;
|
|
||||||
margin-left: 7px;
|
|
||||||
}
|
|
||||||
|
|
||||||
#parent #container .quote {
|
|
||||||
color: rgb(221, 0, 0);
|
|
||||||
font-family: arial, helvetica, sans-serif;
|
|
||||||
font-size: 13.3333px;
|
|
||||||
text-decoration-color: rgb(221, 0, 0);
|
|
||||||
text-decoration-line: underline;
|
|
||||||
text-decoration-style: solid;
|
|
||||||
text-decoration-thickness: auto;
|
|
||||||
}
|
|
||||||
|
|
||||||
#parent #container .greentext {
|
|
||||||
color: rgb(120, 153, 34);
|
|
||||||
font-family: arial, helvetica, sans-serif;
|
|
||||||
font-size: 13.3333px;
|
|
||||||
}
|
|
||||||
|
|
||||||
#parent #container blockquote {
|
|
||||||
margin: 0px !important;
|
|
||||||
margin-block-start: 1em;
|
|
||||||
margin-block-end: 1em;
|
|
||||||
margin-inline-start: 40px;
|
|
||||||
margin-inline-end: 40px;
|
|
||||||
margin-top: 13.33px !important;
|
|
||||||
margin-bottom: 13.33px !important;
|
|
||||||
margin-left: 40px !important;
|
|
||||||
margin-right: 40px !important;
|
|
||||||
}
|
|
||||||
|
|
||||||
#parent #container .message_4chan {
|
|
||||||
color: black;
|
|
||||||
border: none;
|
|
||||||
}
|
|
||||||
|
|
@ -1,64 +1,97 @@
|
||||||
.message {
|
.chat {
|
||||||
display: grid;
|
background: transparent;
|
||||||
grid-template-columns: 60px 1fr;
|
padding: 0;
|
||||||
padding-bottom: 25px;
|
padding-top: 0;
|
||||||
font-size: 15px;
|
}
|
||||||
font-family: 'Noto Sans', Helvetica, Arial, sans-serif;
|
|
||||||
line-height: 22px;
|
.chat > .messages:first-child {
|
||||||
|
padding-top: 0 !important;
|
||||||
|
}
|
||||||
|
|
||||||
|
.chat .message-body p, .chat .message-body li {
|
||||||
|
font-size: 1rem !important;
|
||||||
|
line-height: 28px !important;
|
||||||
|
}
|
||||||
|
|
||||||
|
.dark .chat .message-body :is(p,li,h1,h2,h3,h4,h5,h6),
|
||||||
|
.dark .chat .message-body em:not(:is(h1,h2,h3,h4,h5,h6,b,strong) em),
|
||||||
|
.dark .chat .message-body q:not(:is(h1,h2,h3,h4,h5,h6,b,strong) q) {
|
||||||
|
color: #d1d5db !important;
|
||||||
|
}
|
||||||
|
|
||||||
|
.chat .message-body :is(th, td),
|
||||||
|
.prose hr {
|
||||||
|
border-color: #40404096 !important;
|
||||||
|
}
|
||||||
|
|
||||||
|
.dark .chat .message-body :is(th, td),
|
||||||
|
.dark .prose hr {
|
||||||
|
border-color: rgb(255 255 255 / 30%) !important;
|
||||||
|
}
|
||||||
|
|
||||||
|
.chat .message-body :is(p, ul, ol) {
|
||||||
|
margin: 1.25em 0 !important;
|
||||||
|
}
|
||||||
|
|
||||||
|
.chat .message-body :is(p, ul, ol):first-child {
|
||||||
|
margin-top: 0 !important;
|
||||||
|
}
|
||||||
|
|
||||||
|
.chat .message-body :is(p, ul, ol):last-child {
|
||||||
|
margin-bottom: 0 !important;
|
||||||
|
}
|
||||||
|
|
||||||
|
.user-message, .assistant-message {
|
||||||
|
font-family: Inter, Helvetica, Arial, sans-serif;
|
||||||
|
}
|
||||||
|
|
||||||
|
.message:first-child {
|
||||||
|
padding-top: 0;
|
||||||
}
|
}
|
||||||
|
|
||||||
.username {
|
.username {
|
||||||
display: none;
|
display: none;
|
||||||
}
|
}
|
||||||
|
|
||||||
.message-body p {
|
.chat .user-message {
|
||||||
font-size: 15px !important;
|
background: #f3f4f6;
|
||||||
line-height: 22px !important;
|
padding: 1.5rem 1rem;
|
||||||
margin-bottom: 1.25em !important;
|
padding-bottom: 2rem;
|
||||||
|
border-radius: 0;
|
||||||
|
border-bottom-right-radius: 0;
|
||||||
}
|
}
|
||||||
|
|
||||||
.chat .message-body ul, .chat .message-body ol {
|
.chat .assistant-message {
|
||||||
margin-bottom: 1.25em !important;
|
padding: 1.5rem 1rem;
|
||||||
}
|
padding-bottom: 2rem;
|
||||||
|
border-radius: 0;
|
||||||
.dark .message-body p em {
|
border: 0;
|
||||||
color: rgb(198, 202, 214) !important;
|
|
||||||
}
|
|
||||||
|
|
||||||
.message-body p em {
|
|
||||||
color: rgb(110, 110, 110) !important;
|
|
||||||
}
|
|
||||||
|
|
||||||
.gradio-container .chat .assistant-message {
|
|
||||||
padding: 15px;
|
|
||||||
border-radius: 20px;
|
|
||||||
background-color: #0000000f;
|
|
||||||
margin-top: 9px !important;
|
|
||||||
margin-bottom: 18px !important;
|
|
||||||
}
|
|
||||||
|
|
||||||
.gradio-container .chat .user-message {
|
|
||||||
padding: 15px;
|
|
||||||
border-radius: 20px;
|
|
||||||
margin-bottom: 9px !important;
|
|
||||||
}
|
|
||||||
|
|
||||||
.gradio-container .chat .assistant-message:last-child, .gradio-container .chat .user-message:last-child {
|
|
||||||
margin-bottom: 0px !important;
|
|
||||||
}
|
|
||||||
|
|
||||||
.dark .chat .assistant-message {
|
|
||||||
background-color: #1f2937;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
.dark .chat .user-message {
|
.dark .chat .user-message {
|
||||||
background-color: transparent;
|
background: var(--light-gray);
|
||||||
}
|
}
|
||||||
|
|
||||||
code {
|
.dark .chat .assistant-message {
|
||||||
background-color: white !important;
|
background: transparent;
|
||||||
}
|
}
|
||||||
|
|
||||||
.dark code {
|
.chat .user-message .text,
|
||||||
background-color: #0e1321 !important;
|
.chat .assistant-message .text {
|
||||||
|
max-width: 724px;
|
||||||
|
margin-left: auto;
|
||||||
|
margin-right: auto;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Create space between two assistant messages in a row */
|
||||||
|
.assistant-message + .assistant-message {
|
||||||
|
margin-top: 1.5rem;
|
||||||
|
}
|
||||||
|
|
||||||
|
pre > code {
|
||||||
|
background-color: #f3f4f6 !important;
|
||||||
|
}
|
||||||
|
|
||||||
|
.dark pre > code {
|
||||||
|
background-color: #1f2937 !important;
|
||||||
}
|
}
|
||||||
|
|
@ -1,33 +1,33 @@
|
||||||
.container {
|
.readable-container {
|
||||||
max-width: 600px;
|
max-width: 600px;
|
||||||
margin-left: auto;
|
margin-left: auto;
|
||||||
margin-right: auto;
|
margin-right: auto;
|
||||||
background-color: rgb(31, 41, 55);
|
background-color: rgb(31 41 55);
|
||||||
padding: 3em;
|
padding: 3em;
|
||||||
word-break: break-word;
|
word-break: break-word;
|
||||||
overflow-wrap: anywhere;
|
overflow-wrap: anywhere;
|
||||||
color: #efefef !important;
|
color: #efefef !important;
|
||||||
}
|
}
|
||||||
|
|
||||||
.container p, .container li {
|
.readable-container p, .readable-container li {
|
||||||
font-size: 16px !important;
|
font-size: 16px !important;
|
||||||
color: #efefef !important;
|
color: #efefef !important;
|
||||||
margin-bottom: 22px;
|
margin-bottom: 22px;
|
||||||
line-height: 1.4 !important;
|
line-height: 1.4 !important;
|
||||||
}
|
}
|
||||||
|
|
||||||
.container li > p {
|
.readable-container li > p {
|
||||||
display: inline !important;
|
display: inline !important;
|
||||||
}
|
}
|
||||||
|
|
||||||
.container code {
|
.readable-container code {
|
||||||
overflow-x: auto;
|
overflow-x: auto;
|
||||||
}
|
}
|
||||||
|
|
||||||
.container :not(pre) > code {
|
.readable-container :not(pre) > code {
|
||||||
white-space: normal !important;
|
white-space: normal !important;
|
||||||
}
|
}
|
||||||
|
|
||||||
.container .hoverable {
|
.readable-container .hoverable {
|
||||||
font-size: 14px;
|
font-size: 14px;
|
||||||
}
|
}
|
||||||
BIN
css/katex/fonts/KaTeX_AMS-Regular.ttf
Normal file
BIN
css/katex/fonts/KaTeX_AMS-Regular.ttf
Normal file
Binary file not shown.
BIN
css/katex/fonts/KaTeX_AMS-Regular.woff
Normal file
BIN
css/katex/fonts/KaTeX_AMS-Regular.woff
Normal file
Binary file not shown.
BIN
css/katex/fonts/KaTeX_AMS-Regular.woff2
Normal file
BIN
css/katex/fonts/KaTeX_AMS-Regular.woff2
Normal file
Binary file not shown.
BIN
css/katex/fonts/KaTeX_Caligraphic-Bold.ttf
Normal file
BIN
css/katex/fonts/KaTeX_Caligraphic-Bold.ttf
Normal file
Binary file not shown.
BIN
css/katex/fonts/KaTeX_Caligraphic-Bold.woff
Normal file
BIN
css/katex/fonts/KaTeX_Caligraphic-Bold.woff
Normal file
Binary file not shown.
BIN
css/katex/fonts/KaTeX_Caligraphic-Bold.woff2
Normal file
BIN
css/katex/fonts/KaTeX_Caligraphic-Bold.woff2
Normal file
Binary file not shown.
BIN
css/katex/fonts/KaTeX_Caligraphic-Regular.ttf
Normal file
BIN
css/katex/fonts/KaTeX_Caligraphic-Regular.ttf
Normal file
Binary file not shown.
BIN
css/katex/fonts/KaTeX_Caligraphic-Regular.woff
Normal file
BIN
css/katex/fonts/KaTeX_Caligraphic-Regular.woff
Normal file
Binary file not shown.
BIN
css/katex/fonts/KaTeX_Caligraphic-Regular.woff2
Normal file
BIN
css/katex/fonts/KaTeX_Caligraphic-Regular.woff2
Normal file
Binary file not shown.
BIN
css/katex/fonts/KaTeX_Fraktur-Bold.ttf
Normal file
BIN
css/katex/fonts/KaTeX_Fraktur-Bold.ttf
Normal file
Binary file not shown.
BIN
css/katex/fonts/KaTeX_Fraktur-Bold.woff
Normal file
BIN
css/katex/fonts/KaTeX_Fraktur-Bold.woff
Normal file
Binary file not shown.
BIN
css/katex/fonts/KaTeX_Fraktur-Bold.woff2
Normal file
BIN
css/katex/fonts/KaTeX_Fraktur-Bold.woff2
Normal file
Binary file not shown.
BIN
css/katex/fonts/KaTeX_Fraktur-Regular.ttf
Normal file
BIN
css/katex/fonts/KaTeX_Fraktur-Regular.ttf
Normal file
Binary file not shown.
BIN
css/katex/fonts/KaTeX_Fraktur-Regular.woff
Normal file
BIN
css/katex/fonts/KaTeX_Fraktur-Regular.woff
Normal file
Binary file not shown.
BIN
css/katex/fonts/KaTeX_Fraktur-Regular.woff2
Normal file
BIN
css/katex/fonts/KaTeX_Fraktur-Regular.woff2
Normal file
Binary file not shown.
BIN
css/katex/fonts/KaTeX_Main-Bold.ttf
Normal file
BIN
css/katex/fonts/KaTeX_Main-Bold.ttf
Normal file
Binary file not shown.
BIN
css/katex/fonts/KaTeX_Main-Bold.woff
Normal file
BIN
css/katex/fonts/KaTeX_Main-Bold.woff
Normal file
Binary file not shown.
BIN
css/katex/fonts/KaTeX_Main-Bold.woff2
Normal file
BIN
css/katex/fonts/KaTeX_Main-Bold.woff2
Normal file
Binary file not shown.
BIN
css/katex/fonts/KaTeX_Main-BoldItalic.ttf
Normal file
BIN
css/katex/fonts/KaTeX_Main-BoldItalic.ttf
Normal file
Binary file not shown.
BIN
css/katex/fonts/KaTeX_Main-BoldItalic.woff
Normal file
BIN
css/katex/fonts/KaTeX_Main-BoldItalic.woff
Normal file
Binary file not shown.
BIN
css/katex/fonts/KaTeX_Main-BoldItalic.woff2
Normal file
BIN
css/katex/fonts/KaTeX_Main-BoldItalic.woff2
Normal file
Binary file not shown.
BIN
css/katex/fonts/KaTeX_Main-Italic.ttf
Normal file
BIN
css/katex/fonts/KaTeX_Main-Italic.ttf
Normal file
Binary file not shown.
BIN
css/katex/fonts/KaTeX_Main-Italic.woff
Normal file
BIN
css/katex/fonts/KaTeX_Main-Italic.woff
Normal file
Binary file not shown.
BIN
css/katex/fonts/KaTeX_Main-Italic.woff2
Normal file
BIN
css/katex/fonts/KaTeX_Main-Italic.woff2
Normal file
Binary file not shown.
BIN
css/katex/fonts/KaTeX_Main-Regular.ttf
Normal file
BIN
css/katex/fonts/KaTeX_Main-Regular.ttf
Normal file
Binary file not shown.
BIN
css/katex/fonts/KaTeX_Main-Regular.woff
Normal file
BIN
css/katex/fonts/KaTeX_Main-Regular.woff
Normal file
Binary file not shown.
BIN
css/katex/fonts/KaTeX_Main-Regular.woff2
Normal file
BIN
css/katex/fonts/KaTeX_Main-Regular.woff2
Normal file
Binary file not shown.
BIN
css/katex/fonts/KaTeX_Math-BoldItalic.ttf
Normal file
BIN
css/katex/fonts/KaTeX_Math-BoldItalic.ttf
Normal file
Binary file not shown.
BIN
css/katex/fonts/KaTeX_Math-BoldItalic.woff
Normal file
BIN
css/katex/fonts/KaTeX_Math-BoldItalic.woff
Normal file
Binary file not shown.
BIN
css/katex/fonts/KaTeX_Math-BoldItalic.woff2
Normal file
BIN
css/katex/fonts/KaTeX_Math-BoldItalic.woff2
Normal file
Binary file not shown.
BIN
css/katex/fonts/KaTeX_Math-Italic.ttf
Normal file
BIN
css/katex/fonts/KaTeX_Math-Italic.ttf
Normal file
Binary file not shown.
BIN
css/katex/fonts/KaTeX_Math-Italic.woff
Normal file
BIN
css/katex/fonts/KaTeX_Math-Italic.woff
Normal file
Binary file not shown.
BIN
css/katex/fonts/KaTeX_Math-Italic.woff2
Normal file
BIN
css/katex/fonts/KaTeX_Math-Italic.woff2
Normal file
Binary file not shown.
BIN
css/katex/fonts/KaTeX_SansSerif-Bold.ttf
Normal file
BIN
css/katex/fonts/KaTeX_SansSerif-Bold.ttf
Normal file
Binary file not shown.
BIN
css/katex/fonts/KaTeX_SansSerif-Bold.woff
Normal file
BIN
css/katex/fonts/KaTeX_SansSerif-Bold.woff
Normal file
Binary file not shown.
BIN
css/katex/fonts/KaTeX_SansSerif-Bold.woff2
Normal file
BIN
css/katex/fonts/KaTeX_SansSerif-Bold.woff2
Normal file
Binary file not shown.
BIN
css/katex/fonts/KaTeX_SansSerif-Italic.ttf
Normal file
BIN
css/katex/fonts/KaTeX_SansSerif-Italic.ttf
Normal file
Binary file not shown.
BIN
css/katex/fonts/KaTeX_SansSerif-Italic.woff
Normal file
BIN
css/katex/fonts/KaTeX_SansSerif-Italic.woff
Normal file
Binary file not shown.
BIN
css/katex/fonts/KaTeX_SansSerif-Italic.woff2
Normal file
BIN
css/katex/fonts/KaTeX_SansSerif-Italic.woff2
Normal file
Binary file not shown.
BIN
css/katex/fonts/KaTeX_SansSerif-Regular.ttf
Normal file
BIN
css/katex/fonts/KaTeX_SansSerif-Regular.ttf
Normal file
Binary file not shown.
BIN
css/katex/fonts/KaTeX_SansSerif-Regular.woff
Normal file
BIN
css/katex/fonts/KaTeX_SansSerif-Regular.woff
Normal file
Binary file not shown.
BIN
css/katex/fonts/KaTeX_SansSerif-Regular.woff2
Normal file
BIN
css/katex/fonts/KaTeX_SansSerif-Regular.woff2
Normal file
Binary file not shown.
BIN
css/katex/fonts/KaTeX_Script-Regular.ttf
Normal file
BIN
css/katex/fonts/KaTeX_Script-Regular.ttf
Normal file
Binary file not shown.
BIN
css/katex/fonts/KaTeX_Script-Regular.woff
Normal file
BIN
css/katex/fonts/KaTeX_Script-Regular.woff
Normal file
Binary file not shown.
BIN
css/katex/fonts/KaTeX_Script-Regular.woff2
Normal file
BIN
css/katex/fonts/KaTeX_Script-Regular.woff2
Normal file
Binary file not shown.
BIN
css/katex/fonts/KaTeX_Size1-Regular.ttf
Normal file
BIN
css/katex/fonts/KaTeX_Size1-Regular.ttf
Normal file
Binary file not shown.
BIN
css/katex/fonts/KaTeX_Size1-Regular.woff
Normal file
BIN
css/katex/fonts/KaTeX_Size1-Regular.woff
Normal file
Binary file not shown.
BIN
css/katex/fonts/KaTeX_Size1-Regular.woff2
Normal file
BIN
css/katex/fonts/KaTeX_Size1-Regular.woff2
Normal file
Binary file not shown.
BIN
css/katex/fonts/KaTeX_Size2-Regular.ttf
Normal file
BIN
css/katex/fonts/KaTeX_Size2-Regular.ttf
Normal file
Binary file not shown.
BIN
css/katex/fonts/KaTeX_Size2-Regular.woff
Normal file
BIN
css/katex/fonts/KaTeX_Size2-Regular.woff
Normal file
Binary file not shown.
BIN
css/katex/fonts/KaTeX_Size2-Regular.woff2
Normal file
BIN
css/katex/fonts/KaTeX_Size2-Regular.woff2
Normal file
Binary file not shown.
BIN
css/katex/fonts/KaTeX_Size3-Regular.ttf
Normal file
BIN
css/katex/fonts/KaTeX_Size3-Regular.ttf
Normal file
Binary file not shown.
BIN
css/katex/fonts/KaTeX_Size3-Regular.woff
Normal file
BIN
css/katex/fonts/KaTeX_Size3-Regular.woff
Normal file
Binary file not shown.
BIN
css/katex/fonts/KaTeX_Size3-Regular.woff2
Normal file
BIN
css/katex/fonts/KaTeX_Size3-Regular.woff2
Normal file
Binary file not shown.
BIN
css/katex/fonts/KaTeX_Size4-Regular.ttf
Normal file
BIN
css/katex/fonts/KaTeX_Size4-Regular.ttf
Normal file
Binary file not shown.
BIN
css/katex/fonts/KaTeX_Size4-Regular.woff
Normal file
BIN
css/katex/fonts/KaTeX_Size4-Regular.woff
Normal file
Binary file not shown.
BIN
css/katex/fonts/KaTeX_Size4-Regular.woff2
Normal file
BIN
css/katex/fonts/KaTeX_Size4-Regular.woff2
Normal file
Binary file not shown.
BIN
css/katex/fonts/KaTeX_Typewriter-Regular.ttf
Normal file
BIN
css/katex/fonts/KaTeX_Typewriter-Regular.ttf
Normal file
Binary file not shown.
BIN
css/katex/fonts/KaTeX_Typewriter-Regular.woff
Normal file
BIN
css/katex/fonts/KaTeX_Typewriter-Regular.woff
Normal file
Binary file not shown.
BIN
css/katex/fonts/KaTeX_Typewriter-Regular.woff2
Normal file
BIN
css/katex/fonts/KaTeX_Typewriter-Regular.woff2
Normal file
Binary file not shown.
1
css/katex/katex.min.css
vendored
Normal file
1
css/katex/katex.min.css
vendored
Normal file
File diff suppressed because one or more lines are too long
1671
css/main.css
1671
css/main.css
File diff suppressed because it is too large
Load diff
|
|
@ -1,9 +1,3 @@
|
||||||
.env
|
.env
|
||||||
Dockerfile
|
Dockerfile
|
||||||
/characters
|
/user_data
|
||||||
/loras
|
|
||||||
/models
|
|
||||||
/presets
|
|
||||||
/prompts
|
|
||||||
/softprompts
|
|
||||||
/training
|
|
||||||
|
|
|
||||||
Some files were not shown because too many files have changed in this diff Show more
Loading…
Reference in a new issue