Commit
·
636bca1
0
Parent(s):
Initial commit
Browse filesThis view is limited to 50 files because it contains too many changes.
See raw diff
- .gitattributes +37 -0
- LICENSE +163 -0
- README.md +541 -0
- assets/K-EXAONE_Symbol_3d.png +3 -0
- chat_template.jinja +159 -0
- config.json +143 -0
- examples/example_output_search.txt +173 -0
- generation_config.json +10 -0
- model-00001-of-00096.safetensors +3 -0
- model-00002-of-00096.safetensors +3 -0
- model-00003-of-00096.safetensors +3 -0
- model-00004-of-00096.safetensors +3 -0
- model-00005-of-00096.safetensors +3 -0
- model-00006-of-00096.safetensors +3 -0
- model-00007-of-00096.safetensors +3 -0
- model-00008-of-00096.safetensors +3 -0
- model-00009-of-00096.safetensors +3 -0
- model-00010-of-00096.safetensors +3 -0
- model-00011-of-00096.safetensors +3 -0
- model-00012-of-00096.safetensors +3 -0
- model-00013-of-00096.safetensors +3 -0
- model-00014-of-00096.safetensors +3 -0
- model-00015-of-00096.safetensors +3 -0
- model-00016-of-00096.safetensors +3 -0
- model-00017-of-00096.safetensors +3 -0
- model-00018-of-00096.safetensors +3 -0
- model-00019-of-00096.safetensors +3 -0
- model-00020-of-00096.safetensors +3 -0
- model-00021-of-00096.safetensors +3 -0
- model-00022-of-00096.safetensors +3 -0
- model-00023-of-00096.safetensors +3 -0
- model-00024-of-00096.safetensors +3 -0
- model-00025-of-00096.safetensors +3 -0
- model-00026-of-00096.safetensors +3 -0
- model-00027-of-00096.safetensors +3 -0
- model-00028-of-00096.safetensors +3 -0
- model-00029-of-00096.safetensors +3 -0
- model-00030-of-00096.safetensors +3 -0
- model-00031-of-00096.safetensors +3 -0
- model-00032-of-00096.safetensors +3 -0
- model-00033-of-00096.safetensors +3 -0
- model-00034-of-00096.safetensors +3 -0
- model-00035-of-00096.safetensors +3 -0
- model-00036-of-00096.safetensors +3 -0
- model-00037-of-00096.safetensors +3 -0
- model-00038-of-00096.safetensors +3 -0
- model-00039-of-00096.safetensors +3 -0
- model-00040-of-00096.safetensors +3 -0
- model-00041-of-00096.safetensors +3 -0
- model-00042-of-00096.safetensors +3 -0
.gitattributes
ADDED
|
@@ -0,0 +1,37 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
*.7z filter=lfs diff=lfs merge=lfs -text
|
| 2 |
+
*.arrow filter=lfs diff=lfs merge=lfs -text
|
| 3 |
+
*.bin filter=lfs diff=lfs merge=lfs -text
|
| 4 |
+
*.bz2 filter=lfs diff=lfs merge=lfs -text
|
| 5 |
+
*.ckpt filter=lfs diff=lfs merge=lfs -text
|
| 6 |
+
*.ftz filter=lfs diff=lfs merge=lfs -text
|
| 7 |
+
*.gz filter=lfs diff=lfs merge=lfs -text
|
| 8 |
+
*.h5 filter=lfs diff=lfs merge=lfs -text
|
| 9 |
+
*.joblib filter=lfs diff=lfs merge=lfs -text
|
| 10 |
+
*.lfs.* filter=lfs diff=lfs merge=lfs -text
|
| 11 |
+
*.mlmodel filter=lfs diff=lfs merge=lfs -text
|
| 12 |
+
*.model filter=lfs diff=lfs merge=lfs -text
|
| 13 |
+
*.msgpack filter=lfs diff=lfs merge=lfs -text
|
| 14 |
+
*.npy filter=lfs diff=lfs merge=lfs -text
|
| 15 |
+
*.npz filter=lfs diff=lfs merge=lfs -text
|
| 16 |
+
*.onnx filter=lfs diff=lfs merge=lfs -text
|
| 17 |
+
*.ot filter=lfs diff=lfs merge=lfs -text
|
| 18 |
+
*.parquet filter=lfs diff=lfs merge=lfs -text
|
| 19 |
+
*.pb filter=lfs diff=lfs merge=lfs -text
|
| 20 |
+
*.pickle filter=lfs diff=lfs merge=lfs -text
|
| 21 |
+
*.pkl filter=lfs diff=lfs merge=lfs -text
|
| 22 |
+
*.pt filter=lfs diff=lfs merge=lfs -text
|
| 23 |
+
*.pth filter=lfs diff=lfs merge=lfs -text
|
| 24 |
+
*.rar filter=lfs diff=lfs merge=lfs -text
|
| 25 |
+
*.safetensors filter=lfs diff=lfs merge=lfs -text
|
| 26 |
+
saved_model/**/* filter=lfs diff=lfs merge=lfs -text
|
| 27 |
+
*.tar.* filter=lfs diff=lfs merge=lfs -text
|
| 28 |
+
*.tar filter=lfs diff=lfs merge=lfs -text
|
| 29 |
+
*.tflite filter=lfs diff=lfs merge=lfs -text
|
| 30 |
+
*.tgz filter=lfs diff=lfs merge=lfs -text
|
| 31 |
+
*.wasm filter=lfs diff=lfs merge=lfs -text
|
| 32 |
+
*.xz filter=lfs diff=lfs merge=lfs -text
|
| 33 |
+
*.zip filter=lfs diff=lfs merge=lfs -text
|
| 34 |
+
*.zst filter=lfs diff=lfs merge=lfs -text
|
| 35 |
+
*tfevents* filter=lfs diff=lfs merge=lfs -text
|
| 36 |
+
assets/K-EXAONE_Symbol_3d.png filter=lfs diff=lfs merge=lfs -text
|
| 37 |
+
tokenizer.json filter=lfs diff=lfs merge=lfs -text
|
LICENSE
ADDED
|
@@ -0,0 +1,163 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
K-EXAONE AI Model License Agreement
|
| 2 |
+
|
| 3 |
+
This License Agreement ("Agreement") is entered into between LG Management
|
| 4 |
+
Development Institute Co., Ltd. ("Licensor") and you ("User") and governs the
|
| 5 |
+
use of the K-EXAONE AI Model ("Model"). By downloading, installing, copying,
|
| 6 |
+
or using the Model, you agree to comply with and be bound by the terms of
|
| 7 |
+
this Agreement. If you do not agree to all terms, you must not download,
|
| 8 |
+
install, copy, or use the Model. This Agreement constitutes a binding legal
|
| 9 |
+
contract between User and Licensor.
|
| 10 |
+
|
| 11 |
+
1. Definitions
|
| 12 |
+
1.1 “Model” means the artificial intelligence model provided by
|
| 13 |
+
Licensor, including all software, algorithms, machine learning models,
|
| 14 |
+
or related components provided by Licensor, together with all updates,
|
| 15 |
+
improvements, enhancements, bug fixes, patches, or other modifications
|
| 16 |
+
thereto implemented automatically or manually.
|
| 17 |
+
1.2 “Derivative Work” means any modification, alteration, improvement,
|
| 18 |
+
enhancement, adaptation, or derivative work of the Model created by User
|
| 19 |
+
or a third party, including any changes to the Model's architecture,
|
| 20 |
+
parameters, data processing methods, or any other aspect of the Model
|
| 21 |
+
that modifies its functionality or output.
|
| 22 |
+
1.3 “Output” means all data, results, content, predictions, analyses,
|
| 23 |
+
insights, or other materials generated by the Model or Derivatives
|
| 24 |
+
Work, whether in their original form or further processed or modified
|
| 25 |
+
by User. This includes, but is not limited to, text or numerical data
|
| 26 |
+
generated directly or indirectly through the use of the Model.
|
| 27 |
+
1.4 “Licensor” means the provider that lawfully offers the K-EXAONE
|
| 28 |
+
AI Model. Licensor retains all rights to the Model and has the right to
|
| 29 |
+
grant a license for its use under the terms specified in this Agreement.
|
| 30 |
+
1.5 “User” means an individual, organization, corporation, academic
|
| 31 |
+
institution, government agency, or other entity that uses or intends
|
| 32 |
+
to use the Model under the terms of this Agreement. User is responsible
|
| 33 |
+
for ensuring that all authorized users accessing or using the Model on
|
| 34 |
+
its behalf comply with this Agreement.
|
| 35 |
+
|
| 36 |
+
2. License Grant
|
| 37 |
+
2.1 License Grant: Subject to the terms and conditions set forth in this
|
| 38 |
+
Agreement and Section 2.2, Licensor grants to the User a non-exclusive,
|
| 39 |
+
non-transferable, worldwide, irrevocable license to access, download,
|
| 40 |
+
install, modify, use, distribute, and create derivative works of the
|
| 41 |
+
Model for commercial and non-commercial purposes. In the event the Model
|
| 42 |
+
or Derivative Work is distributed, this Agreement shall be distributed
|
| 43 |
+
alongside it to ensure the license terms are maintained, and the name
|
| 44 |
+
of the Model and Derivative Work shall begin with “K-EXAONE”.
|
| 45 |
+
2.2 Distribution, sublicensing, or making the Model and Derivative Work
|
| 46 |
+
available to third parties for commercial purposes requires separate
|
| 47 |
+
agreement with Licensor.
|
| 48 |
+
|
| 49 |
+
3. Exceptions and Restrictions
|
| 50 |
+
3.1 Reverse Engineering: Except as expressly permitted by applicable law,
|
| 51 |
+
User shall not attempt to decompile, disassemble, reverse engineer, or
|
| 52 |
+
derive the source code, underlying ideas, algorithms, or structure of
|
| 53 |
+
the Model. Any attempts to circumvent or evade any technical protection
|
| 54 |
+
measures applied to the Model are strictly prohibited.
|
| 55 |
+
3.2 Illegal Use: User shall not use the Model or Derivative Work for any
|
| 56 |
+
illegal, fraudulent, or unauthorized activities, or for purposes that
|
| 57 |
+
violate applicable laws or regulations, including but not limited to,
|
| 58 |
+
the creation, distribution, or dissemination of malicious, deceptive,
|
| 59 |
+
or illegal content.
|
| 60 |
+
3.3 Ethical Use: User shall ensure that the Model or Derivative Work is
|
| 61 |
+
used ethically and responsibly in compliance with the following guidelines:
|
| 62 |
+
a. Model and Derivative Work must not be used to generate, disseminate,
|
| 63 |
+
or amplify false, misleading, or harmful information, including fake
|
| 64 |
+
news, misinformation, or inflammatory content.
|
| 65 |
+
b. Model and Derivative Work must not be used to create, distribute,
|
| 66 |
+
or promote content that is discriminatory, harassing, defamatory,
|
| 67 |
+
insulting, or otherwise offensive toward individuals or groups
|
| 68 |
+
based on race, gender, sexual orientation, religion, nationality,
|
| 69 |
+
or other protected characteristics.
|
| 70 |
+
c. Model and Derivative Work must not infringe upon the rights of
|
| 71 |
+
others, including intellectual property rights, privacy rights,
|
| 72 |
+
or other rights recognized by law. User must obtain all necessary
|
| 73 |
+
permissions and consents before using the Model and Derivative Work
|
| 74 |
+
in a manner that could affect the rights of third parties.
|
| 75 |
+
d. Model and Derivative Work must not be used in a manner that causes
|
| 76 |
+
physical, mental, emotional, or financial harm to any individual,
|
| 77 |
+
organization, or community. User must take all reasonable measures
|
| 78 |
+
to prevent the misuse or abuse of the Model and Derivative Work that
|
| 79 |
+
could result in harm or injury.
|
| 80 |
+
|
| 81 |
+
4. Ownership
|
| 82 |
+
4.1 Intellectual Property Rights: User acknowledges that this Agreement
|
| 83 |
+
does not transfer to the User any ownership or patent rights related to
|
| 84 |
+
the Model or any trademarks, service marks, and logos.
|
| 85 |
+
4.2 Output: Licensor claims no ownership over any output generated by
|
| 86 |
+
the Model or Derivative Work, and the use of such output is solely the
|
| 87 |
+
responsibility of User.
|
| 88 |
+
|
| 89 |
+
5. Warranty
|
| 90 |
+
5.1 Provided "As Is": The Model and Derivative Work are provided "as is,"
|
| 91 |
+
without any warranty or representation of any kind, whether express,
|
| 92 |
+
implied, or statutory. Licensor disclaims all warranties, including
|
| 93 |
+
but not limited to implied warranties of merchantability, fitness for
|
| 94 |
+
a particular purpose, accuracy, reliability, and non-infringement,
|
| 95 |
+
as well as any warranties arising from course of dealing or trade usage.
|
| 96 |
+
5.2 Performance and Reliability: Licensor does not warrant or guarantee
|
| 97 |
+
that the Model or Derivative Work will meet User's requirements, that
|
| 98 |
+
the operation of the Model or Derivative Work will be uninterrupted
|
| 99 |
+
or error-free, or that defects in the Model will be corrected. User
|
| 100 |
+
acknowledges that use of the Model or Derivative Work is at their own
|
| 101 |
+
risk and that the Model or Derivative Work may contain bugs, errors,
|
| 102 |
+
or other limitations.
|
| 103 |
+
5.3 Warranty Disclaimer: Licensor does not warrant, endorse, or certify
|
| 104 |
+
any results, conclusions, or recommendations arising from the use of the
|
| 105 |
+
Model. User bears sole responsibility for evaluating the Model's accuracy,
|
| 106 |
+
reliability, and suitability for its intended purpose.
|
| 107 |
+
|
| 108 |
+
6. Limitation of Liability
|
| 109 |
+
6.1 Indemnity for Damages: To the maximum extent permitted by applicable
|
| 110 |
+
law, Licensor shall not be liable for any special, incidental, indirect,
|
| 111 |
+
consequential, punitive, or exemplary damages, including the loss of
|
| 112 |
+
business profits, business interruption, loss of business information,
|
| 113 |
+
data loss, or any other pecuniary or non-pecuniary loss arising from
|
| 114 |
+
the use or inability to use the Model, Derivative Work, or Outputs,
|
| 115 |
+
even if Licensor has been advised of the possibility of such damages.
|
| 116 |
+
6.2 Indemnification: User agrees to indemnify, defend, and hold harmless
|
| 117 |
+
Licensor, its affiliates, officers, directors, employees, and agents from
|
| 118 |
+
and against any and all claims, liabilities, damages, losses, costs,
|
| 119 |
+
or expenses (including reasonable attorneys' fees) arising out of or
|
| 120 |
+
in connection with your use of the Model, Derivative Work, or Outputs,
|
| 121 |
+
including any breach of this Agreement or applicable law.
|
| 122 |
+
|
| 123 |
+
7. Termination
|
| 124 |
+
7.1 Termination by Licensor: Licensor reserves the right to terminate
|
| 125 |
+
this Agreement and revoke the User's right to use the Model at any time,
|
| 126 |
+
with or without cause and without prior notice, if User breaches any term
|
| 127 |
+
of this Agreement. Termination shall be effective immediately upon notice.
|
| 128 |
+
7.2 Effect of Termination: Upon termination of this Agreement, User shall
|
| 129 |
+
immediately cease all use of the Model and Derivative Work and destroy
|
| 130 |
+
all copies of the Model and Derivative Work in the User's possession
|
| 131 |
+
or control, including any backup or archival copies. User shall provide
|
| 132 |
+
written proof to Licensor that such destruction has been completed.
|
| 133 |
+
7.3 Survival: The provisions of this Agreement that by their nature
|
| 134 |
+
should survive termination (including, without limitation, Section 4
|
| 135 |
+
(Ownership), Section 5 (Warranty), Section 6 (Limitation of Liability),
|
| 136 |
+
and this Section 7 (Termination)) shall survive termination.
|
| 137 |
+
|
| 138 |
+
8. Governing Law
|
| 139 |
+
8.1 Governing Law: This Agreement shall be construed and governed by the
|
| 140 |
+
laws of the Republic of Korea, without giving effect to its conflict of
|
| 141 |
+
laws principles.
|
| 142 |
+
8.2 Dispute Resolution: All disputes, controversies, or claims arising out
|
| 143 |
+
of or in connection with this Agreement, including its existence, validity,
|
| 144 |
+
interpretation, performance, breach, or termination, shall be finally
|
| 145 |
+
settled by arbitration administered by the Korea Commercial Arbitration
|
| 146 |
+
Board (KCAB) in accordance with the KCAB International Arbitration Rules
|
| 147 |
+
in effect at the time of the commencement of the arbitration. The place
|
| 148 |
+
of arbitration shall be Seoul, Republic of Korea. The arbitral tribunal
|
| 149 |
+
shall consist of one (1) arbitrator. The language of the arbitration
|
| 150 |
+
shall be Korean.
|
| 151 |
+
|
| 152 |
+
9. Miscellaneous
|
| 153 |
+
9.1 Entire Agreement: This Agreement constitutes the entire agreement
|
| 154 |
+
between User and Licensor regarding the subject matter hereof and
|
| 155 |
+
supersedes all prior oral or written agreements, representations,
|
| 156 |
+
or understandings. Any terms in a purchase order or other document
|
| 157 |
+
submitted by the User concerning the Model that add to, differ from, or
|
| 158 |
+
are inconsistent with the terms of this Agreement shall not be binding
|
| 159 |
+
upon Licensor and shall be null and void.
|
| 160 |
+
|
| 161 |
+
By downloading, installing, or using the K-EXAONE AI Model, User acknowledges
|
| 162 |
+
that they have read and understood the terms of this Agreement and agree to
|
| 163 |
+
be bound by them.
|
README.md
ADDED
|
@@ -0,0 +1,541 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
---
|
| 2 |
+
license: other
|
| 3 |
+
license_name: k-exaone
|
| 4 |
+
license_link: LICENSE
|
| 5 |
+
language:
|
| 6 |
+
- en
|
| 7 |
+
- ko
|
| 8 |
+
- es
|
| 9 |
+
- de
|
| 10 |
+
- ja
|
| 11 |
+
- vi
|
| 12 |
+
tags:
|
| 13 |
+
- lg-ai
|
| 14 |
+
- exaone
|
| 15 |
+
- k-exaone
|
| 16 |
+
pipeline_tag: text-generation
|
| 17 |
+
library_name: transformers
|
| 18 |
+
---
|
| 19 |
+
|
| 20 |
+
<br>
|
| 21 |
+
<br>
|
| 22 |
+
<p align="center">
|
| 23 |
+
<img src="assets/K-EXAONE_Symbol_3d.png" width="400">
|
| 24 |
+
<br>
|
| 25 |
+
<!-- <p align="center"> 🤗 <a href="https://huggingface.co/collections/LGAI-EXAONE/k-exaone">Hugging Face</a>   |   📝 <a href="#"> Blog</a>   |   📑 <a href="#"> Technical Report </a>-->
|
| 26 |
+
<br>
|
| 27 |
+
<br>
|
| 28 |
+
|
| 29 |
+
<div align="center">
|
| 30 |
+
<a href="https://huggingface.co/collections/LGAI-EXAONE/k-exaone" style="text-decoration: none;">
|
| 31 |
+
<img src="https://img.shields.io/badge/🤗-Huggingface-FC926C?style=for-the-badge" alt="Huggingface">
|
| 32 |
+
</a>
|
| 33 |
+
<a href="#" style="text-decoration: none;">
|
| 34 |
+
<img src="https://img.shields.io/badge/📝-Blog_(TBD)-E343BD?style=for-the-badge" alt="Blog">
|
| 35 |
+
</a>
|
| 36 |
+
<a href="#" style="text-decoration: none;">
|
| 37 |
+
<img src="https://img.shields.io/badge/📑-Technical_Report_(TBD)-684CF4?style=for-the-badge" alt="Technical Report">
|
| 38 |
+
</a>
|
| 39 |
+
</div>
|
| 40 |
+
|
| 41 |
+
<br>
|
| 42 |
+
|
| 43 |
+
## Introduction
|
| 44 |
+
|
| 45 |
+
We introduce **K-EXAONE**, a large-scale multilingual language model developed by LG AI Research. Built using a Mixture-of-Experts architecture, K-EXAONE features **236 billion total** parameters, with **23 billion active** during inference. Performance evaluations across various benchmarks demonstrate that K-EXAONE excels in reasoning, agentic capabilities, general knowledge, multilingual understanding, and long-context processing.
|
| 46 |
+
|
| 47 |
+
#### Key Features
|
| 48 |
+
|
| 49 |
+
- **Architecture & Efficiency:** Features a 236B fine-grained MoE design (23B active) optimized with **Multi-Token Prediction (MTP)**, enabling self-speculative decoding that boosts inference throughput by approximately 1.5x.
|
| 50 |
+
- **Long-Context Capabilities:** Natively supports a **256K context window**, utilizing a **3:1 hybrid attention** scheme with a **128-token sliding window** to significantly minimize memory usage during long-document processing.
|
| 51 |
+
- **Multilingual Support:** Covers 6 languages: Korean, English, Spanish, German, Japanese, and Vietnamese. Features a redesigned **150k vocabulary** with **SuperBPE**, improving token efficiency by ~30%.
|
| 52 |
+
- **Agentic Capabilities:** Demonstrates superior tool-use and search capabilities via **multi-agent strategies.**
|
| 53 |
+
- **Safety & Ethics:** Aligned with **universal human values**, the model uniquely incorporates **Korean cultural and historical contexts** to address regional sensitivities often overlooked by other models. It demonstrates high reliability across diverse risk categories.
|
| 54 |
+
|
| 55 |
+
For more details, please refer to the [technical report](#).
|
| 56 |
+
|
| 57 |
+
|
| 58 |
+
### Model Configuration
|
| 59 |
+
|
| 60 |
+
- Number of Parameters: 236B in total and 23B activated
|
| 61 |
+
- Number of Parameters (without embeddings): 234B
|
| 62 |
+
- Hidden Dimension: 6,144
|
| 63 |
+
- Number of Layers: 48 Main layers + 1 MTP layers
|
| 64 |
+
- Hybrid Attention Pattern: 12 x (3 Sliding window attention + 1 Global attention)
|
| 65 |
+
- Sliding Window Attention
|
| 66 |
+
- Number of Attention Heads: 64 Q-heads and 8 KV-heads
|
| 67 |
+
- Head Dimension: 128 for both Q/KV
|
| 68 |
+
- Sliding Window Size: 128
|
| 69 |
+
- Global Attention
|
| 70 |
+
- Number of Attention Heads: 64 Q-heads and 8 KV-heads
|
| 71 |
+
- Head Dimension: 128 for both Q/KV
|
| 72 |
+
- No Rotary Positional Embedding Used (NoPE)
|
| 73 |
+
- Mixture of Experts:
|
| 74 |
+
- Number of Experts: 128
|
| 75 |
+
- Number of Activated Experts: 8
|
| 76 |
+
- Number of Shared Experts: 1
|
| 77 |
+
- MoE Intermediate Size: 2,048
|
| 78 |
+
- Vocab Size: 153,600
|
| 79 |
+
- Context Length: 262,144 tokens
|
| 80 |
+
- Knowledge Cutoff: Dec 2024 (2024/12)
|
| 81 |
+
## Evaluation Results
|
| 82 |
+
|
| 83 |
+
The following table shows the evaluation results of the K-EXAONE model in reasoning mode, compared to our previous model, [EXAONE-4.0](https://github.com/LG-AI-EXAONE/EXAONE-4.0), and other competing models. The evaluation details can be found in the [technical report](#).
|
| 84 |
+
|
| 85 |
+
<table>
|
| 86 |
+
<tr>
|
| 87 |
+
<th> </th>
|
| 88 |
+
<th>K-EXAONE (Reasoning)</th>
|
| 89 |
+
<th>EXAONE 4.0 (Reasoning)</th>
|
| 90 |
+
<th>GPT-OSS (Reasoning: High)</th>
|
| 91 |
+
<th>Qwen3-Thinking-2507</th>
|
| 92 |
+
<th>DeepSeek-V3.2 (Reasoning)</th>
|
| 93 |
+
</tr>
|
| 94 |
+
<tr>
|
| 95 |
+
<td align="center">Architecture</td>
|
| 96 |
+
<td align="center">MoE</td>
|
| 97 |
+
<td align="center">Dense</td>
|
| 98 |
+
<td align="center">MoE</td>
|
| 99 |
+
<td align="center">MoE</td>
|
| 100 |
+
<td align="center">MoE</td>
|
| 101 |
+
</tr>
|
| 102 |
+
<tr>
|
| 103 |
+
<td align="center">Total Params</td>
|
| 104 |
+
<td align="center">236B</td>
|
| 105 |
+
<td align="center">32B</td>
|
| 106 |
+
<td align="center">117B</td>
|
| 107 |
+
<td align="center">235B</td>
|
| 108 |
+
<td align="center">671B</td>
|
| 109 |
+
</tr>
|
| 110 |
+
<tr>
|
| 111 |
+
<td align="center">Active Params</td>
|
| 112 |
+
<td align="center">23B</td>
|
| 113 |
+
<td align="center">32B</td>
|
| 114 |
+
<td align="center">5.1B</td>
|
| 115 |
+
<td align="center">22B</td>
|
| 116 |
+
<td align="center">37B</td>
|
| 117 |
+
</tr>
|
| 118 |
+
<tr>
|
| 119 |
+
<td align="center" colspan='7'><i>World Knowledge</i></td>
|
| 120 |
+
</tr>
|
| 121 |
+
<tr>
|
| 122 |
+
<td align="center">MMLU-Pro</td>
|
| 123 |
+
<td align="center">83.9</td>
|
| 124 |
+
<td align="center">81.8</td>
|
| 125 |
+
<td align="center">80.7</td>
|
| 126 |
+
<td align="center">84.4</td>
|
| 127 |
+
<td align="center">85.0</td>
|
| 128 |
+
</tr>
|
| 129 |
+
<tr>
|
| 130 |
+
<td align="center">GPQA-Diamond</td>
|
| 131 |
+
<td align="center">80.0</td>
|
| 132 |
+
<td align="center">75.4</td>
|
| 133 |
+
<td align="center">80.1</td>
|
| 134 |
+
<td align="center">81.1</td>
|
| 135 |
+
<td align="center">82.4</td>
|
| 136 |
+
</tr>
|
| 137 |
+
<tr>
|
| 138 |
+
<td align="center">Humanity's Last Exam</td>
|
| 139 |
+
<td align="center">13.8</td>
|
| 140 |
+
<td align="center">10.6</td>
|
| 141 |
+
<td align="center">14.9</td>
|
| 142 |
+
<td align="center">18.2</td>
|
| 143 |
+
<td align="center">25.1</td>
|
| 144 |
+
</tr>
|
| 145 |
+
<tr>
|
| 146 |
+
<td align="center" colspan='7'><i>Math</i></td>
|
| 147 |
+
</tr>
|
| 148 |
+
<tr>
|
| 149 |
+
<td align="center">AIME 2025</td>
|
| 150 |
+
<td align="center">92.6</td>
|
| 151 |
+
<td align="center">85.3</td>
|
| 152 |
+
<td align="center">92.5</td>
|
| 153 |
+
<td align="center">92.3</td>
|
| 154 |
+
<td align="center">93.1</td>
|
| 155 |
+
</tr>
|
| 156 |
+
<tr>
|
| 157 |
+
<td align="center" colspan='7'><i>Coding</i></td>
|
| 158 |
+
</tr>
|
| 159 |
+
<tr>
|
| 160 |
+
<td align="center">LiveCodeBench v6</td>
|
| 161 |
+
<td align="center">81.1</td>
|
| 162 |
+
<td align="center">66.7</td>
|
| 163 |
+
<td align="center">81.9</td>
|
| 164 |
+
<td align="center">74.1</td>
|
| 165 |
+
<td align="center">79.4</td>
|
| 166 |
+
</tr>
|
| 167 |
+
<tr>
|
| 168 |
+
<td align="center" colspan='7'><i>Agentic Tool Use</i></td>
|
| 169 |
+
</tr>
|
| 170 |
+
<tr>
|
| 171 |
+
<td align="center">τ<sup>2</sup>-Bench (Telecom)</td>
|
| 172 |
+
<td align="center">71.9</td>
|
| 173 |
+
<td align="center">23.7</td>
|
| 174 |
+
<td align="center">60.3</td>
|
| 175 |
+
<td align="center">45.6</td>
|
| 176 |
+
<td align="center">85.8</td>
|
| 177 |
+
</tr>
|
| 178 |
+
<tr>
|
| 179 |
+
<td align="center" colspan='7'><i>Instruction Following</i></td>
|
| 180 |
+
</tr>
|
| 181 |
+
<tr>
|
| 182 |
+
<td align="center">IFBench</td>
|
| 183 |
+
<td align="center">67.4</td>
|
| 184 |
+
<td align="center">36.0</td>
|
| 185 |
+
<td align="center">69.5</td>
|
| 186 |
+
<td align="center">52.6</td>
|
| 187 |
+
<td align="center">62.5</td>
|
| 188 |
+
</tr>
|
| 189 |
+
<tr>
|
| 190 |
+
<td align="center">IFEval</td>
|
| 191 |
+
<td align="center">89.8</td>
|
| 192 |
+
<td align="center">84.7</td>
|
| 193 |
+
<td align="center">89.5</td>
|
| 194 |
+
<td align="center">87.8</td>
|
| 195 |
+
<td align="center">92.6</td>
|
| 196 |
+
</tr>
|
| 197 |
+
<tr>
|
| 198 |
+
<td align="center" colspan='7'><i>Long Context Understanding</i></td>
|
| 199 |
+
</tr>
|
| 200 |
+
<tr>
|
| 201 |
+
<td align="center">AA-LCR</td>
|
| 202 |
+
<td align="center">53.5</td>
|
| 203 |
+
<td align="center">14.0</td>
|
| 204 |
+
<td align="center">50.7</td>
|
| 205 |
+
<td align="center">67.0</td>
|
| 206 |
+
<td align="center">65.0</td>
|
| 207 |
+
</tr>
|
| 208 |
+
<tr>
|
| 209 |
+
<td align="center" colspan='7'><i>Korean Knowledge & Math</i></td>
|
| 210 |
+
</tr>
|
| 211 |
+
<tr>
|
| 212 |
+
<td align="center">KMMLU-Pro</td>
|
| 213 |
+
<td align="center">67.3</td>
|
| 214 |
+
<td align="center">67.7</td>
|
| 215 |
+
<td align="center">62.4</td>
|
| 216 |
+
<td align="center">71.6</td>
|
| 217 |
+
<td align="center">72.1</td>
|
| 218 |
+
</tr>
|
| 219 |
+
<tr>
|
| 220 |
+
<td align="center">HRM8K</td>
|
| 221 |
+
<td align="center">90.7</td>
|
| 222 |
+
<td align="center">89.4</td>
|
| 223 |
+
<td align="center">91.6</td>
|
| 224 |
+
<td align="center">92.0</td>
|
| 225 |
+
<td align="center">90.6</td>
|
| 226 |
+
</tr>
|
| 227 |
+
<tr>
|
| 228 |
+
<td align="center">CLIcK</td>
|
| 229 |
+
<td align="center">83.9</td>
|
| 230 |
+
<td align="center">78.8</td>
|
| 231 |
+
<td align="center">74.6</td>
|
| 232 |
+
<td align="center">81.3</td>
|
| 233 |
+
<td align="center">86.3</td>
|
| 234 |
+
</tr>
|
| 235 |
+
<tr>
|
| 236 |
+
<td align="center">KoBALT</td>
|
| 237 |
+
<td align="center">61.8</td>
|
| 238 |
+
<td align="center">25.4</td>
|
| 239 |
+
<td align="center">54.3</td>
|
| 240 |
+
<td align="center">56.1</td>
|
| 241 |
+
<td align="center">62.7</td>
|
| 242 |
+
</tr>
|
| 243 |
+
</table>
|
| 244 |
+
|
| 245 |
+
|
| 246 |
+
## Requirements
|
| 247 |
+
|
| 248 |
+
Until the libraries officially support K-EXAONE, you need to install the requirements in our version with the EXAONE-MoE implementations. We will announce when these libraries are updated to support the K-EXAONE model.
|
| 249 |
+
|
| 250 |
+
#### Transformers
|
| 251 |
+
|
| 252 |
+
You can install the latest version of Transformers with support for EXAONE-MoE architecture from [this repository](https://github.com/Aim-Highest/transformers).
|
| 253 |
+
The base version of Transformers is `5.0.0rc1`, so it might be helpful to check [the migration guide](https://github.com/huggingface/transformers/blob/main/MIGRATION_GUIDE_V5.md) from the Transformers library.
|
| 254 |
+
|
| 255 |
+
#### vLLM
|
| 256 |
+
|
| 257 |
+
You should install both Transformers and vLLM to use K-EXAONE model on vLLM server.
|
| 258 |
+
You can install the latest version of vLLM with support for EXAONE-MoE architecture from [this repository](https://github.com/Aim-Highest/vllm/tree/add-exaone-moe).
|
| 259 |
+
|
| 260 |
+
#### SGLang
|
| 261 |
+
|
| 262 |
+
You should install both Transformers and SGLang to use K-EXAONE model on SGLang server.
|
| 263 |
+
You can install the latest version of SGLang with support for EXAONE-MoE architecture from [this repository](https://github.com/Aim-Highest/sglang).
|
| 264 |
+
|
| 265 |
+
#### llama.cpp
|
| 266 |
+
|
| 267 |
+
You can install the latest version of llama.cpp with support for EXAONE-MoE architecture from [this repository](https://github.com/Aim-Highest/llama.cpp).
|
| 268 |
+
Please refer to the [official build guide](https://github.com/ggml-org/llama.cpp/blob/master/docs/build.md) for details.
|
| 269 |
+
## Quickstart
|
| 270 |
+
|
| 271 |
+
You can use the K-EXAONE model with the Transformers library. For better quality, you should check the [usage guideline](#usage-guideline) section.
|
| 272 |
+
|
| 273 |
+
### Reasoning mode
|
| 274 |
+
|
| 275 |
+
For tasks that require accurate results, you can run the K-EXAONE model in reasoning mode as below.
|
| 276 |
+
|
| 277 |
+
```python
|
| 278 |
+
from transformers import AutoModelForCausalLM, AutoTokenizer
|
| 279 |
+
|
| 280 |
+
model_name = "LGAI-EXAONE/K-EXAONE-236B-A23B"
|
| 281 |
+
|
| 282 |
+
model = AutoModelForCausalLM.from_pretrained(
|
| 283 |
+
model_name,
|
| 284 |
+
dtype="bfloat16",
|
| 285 |
+
device_map="auto",
|
| 286 |
+
)
|
| 287 |
+
tokenizer = AutoTokenizer.from_pretrained(model_name)
|
| 288 |
+
|
| 289 |
+
messages = [
|
| 290 |
+
{"role": "system", "content": "You are K-EXAONE, a large language model developed by LG AI Research in South Korea, built to serve as a helpful and reliable assistant."},
|
| 291 |
+
{"role": "user", "content": "Which one is bigger, 3.9 vs 3.12?"}
|
| 292 |
+
]
|
| 293 |
+
input_ids = tokenizer.apply_chat_template(
|
| 294 |
+
messages,
|
| 295 |
+
tokenize=True,
|
| 296 |
+
add_generation_prompt=True,
|
| 297 |
+
return_tensors="pt",
|
| 298 |
+
enable_thinking=True, # skippable (default: True)
|
| 299 |
+
)
|
| 300 |
+
|
| 301 |
+
generated_ids = model.generate(
|
| 302 |
+
**input_ids.to(model.device),
|
| 303 |
+
max_new_tokens=16384,
|
| 304 |
+
temperature=1.0,
|
| 305 |
+
top_p=0.95,
|
| 306 |
+
)
|
| 307 |
+
output_ids = generated_ids[0][input_ids['input_ids'].shape[-1]:]
|
| 308 |
+
print(tokenizer.decode(output_ids, skip_special_tokens=True))
|
| 309 |
+
```
|
| 310 |
+
|
| 311 |
+
### Non-reasoning mode
|
| 312 |
+
|
| 313 |
+
For tasks where latency matters more than accuracy, you can run the K-EXAONE model in non-reasoning mode as below.
|
| 314 |
+
|
| 315 |
+
```python
|
| 316 |
+
messages = [
|
| 317 |
+
{"role": "system", "content": "You are K-EXAONE, a large language model developed by LG AI Research in South Korea, built to serve as a helpful and reliable assistant."},
|
| 318 |
+
{"role": "user", "content": "Explain how wonderful you are"}
|
| 319 |
+
]
|
| 320 |
+
input_ids = tokenizer.apply_chat_template(
|
| 321 |
+
messages,
|
| 322 |
+
tokenize=True,
|
| 323 |
+
add_generation_prompt=True,
|
| 324 |
+
return_tensors="pt",
|
| 325 |
+
enable_thinking=False,
|
| 326 |
+
)
|
| 327 |
+
|
| 328 |
+
generated_ids = model.generate(
|
| 329 |
+
**input_ids.to(model.device),
|
| 330 |
+
max_new_tokens=1024,
|
| 331 |
+
temperature=1.0,
|
| 332 |
+
top_p=0.95,
|
| 333 |
+
)
|
| 334 |
+
output_ids = generated_ids[0][input_ids['input_ids'].shape[-1]:]
|
| 335 |
+
print(tokenizer.decode(output_ids, skip_special_tokens=True))
|
| 336 |
+
```
|
| 337 |
+
|
| 338 |
+
### Agentic tool use
|
| 339 |
+
|
| 340 |
+
For your AI-powered agent, you can integrate external functionalities in the following OpenAI-style tool description format. Here is an example of creating a tool description from a Python function.
|
| 341 |
+
|
| 342 |
+
Please check the [example file](examples/example_output_search.txt) for an example of a search agent conversation using K-EXAONE.
|
| 343 |
+
|
| 344 |
+
```python
|
| 345 |
+
from transformers.utils import get_json_schema
|
| 346 |
+
|
| 347 |
+
def roll_dice(max_num: int):
|
| 348 |
+
"""
|
| 349 |
+
Roll a dice with the number 1 to N. User can select the number N.
|
| 350 |
+
|
| 351 |
+
Args:
|
| 352 |
+
max_num: The maximum number on the dice.
|
| 353 |
+
"""
|
| 354 |
+
return random.randint(1, max_num)
|
| 355 |
+
|
| 356 |
+
tool_schema = get_json_schema(roll_dice)
|
| 357 |
+
tools = [tool_schema]
|
| 358 |
+
|
| 359 |
+
messages = [
|
| 360 |
+
{"role": "system", "content": "You are K-EXAONE, a large language model developed by LG AI Research in South Korea, built to serve as a helpful and reliable assistant."},
|
| 361 |
+
{"role": "user", "content": "Roll a D20 twice and sum the results."}
|
| 362 |
+
]
|
| 363 |
+
input_ids = tokenizer.apply_chat_template(
|
| 364 |
+
messages,
|
| 365 |
+
tokenize=True,
|
| 366 |
+
add_generation_prompt=True,
|
| 367 |
+
return_tensors="pt",
|
| 368 |
+
tools=tools,
|
| 369 |
+
)
|
| 370 |
+
|
| 371 |
+
generated_ids = model.generate(
|
| 372 |
+
**input_ids.to(model.device),
|
| 373 |
+
max_new_tokens=16384,
|
| 374 |
+
temperature=1.0,
|
| 375 |
+
top_p=0.95,
|
| 376 |
+
)
|
| 377 |
+
output_ids = generated_ids[0][input_ids['input_ids'].shape[-1]:]
|
| 378 |
+
print(tokenizer.decode(output_ids, skip_special_tokens=True))
|
| 379 |
+
```
|
| 380 |
+
|
| 381 |
+
|
| 382 |
+
## Usage Guideline
|
| 383 |
+
|
| 384 |
+
> [!IMPORTANT]
|
| 385 |
+
> To achieve the expected performance, we recommend using the following configurations:
|
| 386 |
+
> - We strongly recommend to use `temperature=1.0`, `top_p=0.95`, `presence_penalty=0.0` for best performance.
|
| 387 |
+
> - Different from EXAONE-4.0, K-EXAONE uses `enable_thinking=True` as default. Thus, you need to set `enable_thinking=False` when you want to use non-reasoning mode.
|
| 388 |
+
>
|
| 389 |
+
|
| 390 |
+
## Deployment
|
| 391 |
+
|
| 392 |
+
### TensorRT-LLM
|
| 393 |
+
|
| 394 |
+
TensorRT-LLM support for the K-EXAONE model is being prepared. Please refer to the [EXAONE-MoE PR](https://github.com/NVIDIA/TensorRT-LLM/pull/10355) on TensorRT-LLM repository for details.
|
| 395 |
+
|
| 396 |
+
### vLLM
|
| 397 |
+
|
| 398 |
+
We support the K-EXAONE model on vLLM. You need to install our fork of the vLLM library to use the K-EXAONE model. Please check the [requirements](#requirements) section.
|
| 399 |
+
Practically, you can serve the model with a 256K context length using tensor parallel on 4 H200 GPUs.
|
| 400 |
+
|
| 401 |
+
After you install the vLLM library with an EXAONE-MoE implementation, you can run the vLLM server by following command:
|
| 402 |
+
|
| 403 |
+
```bash
|
| 404 |
+
vllm serve LGAI-EXAONE/K-EXAONE-236B-A23B \
|
| 405 |
+
--reasoning-parser deepseek_v3 \
|
| 406 |
+
--enable-auto-tool-choice \
|
| 407 |
+
--tool-call-parser hermes
|
| 408 |
+
```
|
| 409 |
+
|
| 410 |
+
An OpenAI-compatible API server will be available at http://localhost:8000/v1.
|
| 411 |
+
|
| 412 |
+
You can test the vLLM server by sending a chat completion request as below:
|
| 413 |
+
|
| 414 |
+
```bash
|
| 415 |
+
curl -X POST http://localhost:8000/v1/chat/completions \
|
| 416 |
+
-H "Content-Type: application/json" \
|
| 417 |
+
-d '{
|
| 418 |
+
"model": "LGAI-EXAONE/K-EXAONE-236B-A23B",
|
| 419 |
+
"messages": [
|
| 420 |
+
{"role": "user", "content": "How many r'\''s in \"strawberry\"?"}
|
| 421 |
+
],
|
| 422 |
+
"max_tokens": 16384,
|
| 423 |
+
"temperature": 1.0,
|
| 424 |
+
"top_p": 0.95,
|
| 425 |
+
"chat_template_kwargs": {"enable_thinking": true}
|
| 426 |
+
}'
|
| 427 |
+
```
|
| 428 |
+
|
| 429 |
+
If you are interested in using MTP weights for speculative decoding, add according options as below.
|
| 430 |
+
|
| 431 |
+
```bash
|
| 432 |
+
vllm serve LGAI-EXAONE/K-EXAONE-236B-A23B \
|
| 433 |
+
--reasoning-parser deepseek_v3 \
|
| 434 |
+
--enable-auto-tool-choice \
|
| 435 |
+
--tool-call-parser hermes \
|
| 436 |
+
--no-enable-prefix-caching \
|
| 437 |
+
--speculative_config '{
|
| 438 |
+
"method": "mtp",
|
| 439 |
+
"num_speculative_tokens": 2
|
| 440 |
+
}'
|
| 441 |
+
```
|
| 442 |
+
|
| 443 |
+
### SGLang
|
| 444 |
+
|
| 445 |
+
We support the K-EXAONE model on SGLang. You need to install our fork of the SGLang library to use the K-EXAONE model. Please check the [requirements](#requirements) section.
|
| 446 |
+
Practically, you can serve the model with a 256K context length using tensor parallel on 4 H200 GPUs.
|
| 447 |
+
|
| 448 |
+
```bash
|
| 449 |
+
python -m sglang.launch_server \
|
| 450 |
+
--model LGAI-EXAONE/K-EXAONE-236B-A23B \
|
| 451 |
+
--reasoning-parser qwen3 \
|
| 452 |
+
--disable-hybrid-swa-memory
|
| 453 |
+
```
|
| 454 |
+
|
| 455 |
+
A SGLang server will be available at http://localhost:30000.
|
| 456 |
+
|
| 457 |
+
> [!NOTE]
|
| 458 |
+
> Currently, using the OpenAI-compatible server is incompatible with the `transformers>=5.0.0rc0`, so you need to use SGLang native API for now.
|
| 459 |
+
> For native API, please refer to the [official documentation](https://docs.sglang.io/basic_usage/native_api.html).
|
| 460 |
+
>
|
| 461 |
+
> Once the issue is resolved, we will update this section accordingly.
|
| 462 |
+
|
| 463 |
+
You can test the SGLang server by sending a request as below:
|
| 464 |
+
|
| 465 |
+
```python
|
| 466 |
+
from transformers import AutoTokenizer
|
| 467 |
+
import requests
|
| 468 |
+
|
| 469 |
+
model_name = "LGAI-EXAONE/K-EXAONE-236B-A23B"
|
| 470 |
+
tokenizer = AutoTokenizer.from_pretrained(model_name)
|
| 471 |
+
|
| 472 |
+
messages = [
|
| 473 |
+
{"role": "user", "content": "How many r'\''s in \"strawberry\"?"}
|
| 474 |
+
]
|
| 475 |
+
input_text = tokenizer.apply_chat_template(
|
| 476 |
+
messages,
|
| 477 |
+
tokenize=False,
|
| 478 |
+
add_generation_prompt=True,
|
| 479 |
+
return_tensors="pt",
|
| 480 |
+
)
|
| 481 |
+
|
| 482 |
+
response = requests.post(
|
| 483 |
+
f"http://localhost:30000/generate",
|
| 484 |
+
json={
|
| 485 |
+
"text": input_text,
|
| 486 |
+
"sampling_params": {
|
| 487 |
+
"temperature": 1.0,
|
| 488 |
+
"top_p": 0.95,
|
| 489 |
+
"max_new_tokens": 16384,
|
| 490 |
+
},
|
| 491 |
+
},
|
| 492 |
+
)
|
| 493 |
+
print(response.json()['text'])
|
| 494 |
+
```
|
| 495 |
+
|
| 496 |
+
If you are interested in in using MTP weights for speculative decoding, add according options as below.
|
| 497 |
+
|
| 498 |
+
```bash
|
| 499 |
+
python -m sglang.launch_server \
|
| 500 |
+
--model LGAI-EXAONE/K-EXAONE-236B-A23B \
|
| 501 |
+
--reasoning-parser qwen3 \
|
| 502 |
+
--disable-hybrid-swa-memory \
|
| 503 |
+
--speculative-algorithm EAGLE \
|
| 504 |
+
--speculative-num-steps 3 \
|
| 505 |
+
--speculative-eagle-topk 1 \
|
| 506 |
+
--speculative-num-draft-tokens 4
|
| 507 |
+
```
|
| 508 |
+
|
| 509 |
+
|
| 510 |
+
## Limitation
|
| 511 |
+
|
| 512 |
+
The K-EXAONE language model has certain limitations and may occasionally generate inappropriate responses. The language model generates responses based on the output probability of tokens, and it is determined during learning from training data. While we have made every effort to exclude personal, harmful, and biased information from the training data, some problematic content may still be included, potentially leading to undesirable responses. Please note that the text generated by K-EXAONE language model does not reflect the views of LG AI Research.
|
| 513 |
+
|
| 514 |
+
- Inappropriate answers may be generated, which contain personal, harmful or other inappropriate information.
|
| 515 |
+
- Biased responses may be generated, which are associated with age, gender, race, and so on.
|
| 516 |
+
- The generated responses rely heavily on statistics from the training data, which can result in the generation of semantically or syntactically incorrect sentences.
|
| 517 |
+
- Since the model does not reflect the latest information, the responses may be false or contradictory.
|
| 518 |
+
|
| 519 |
+
LG AI Research strives to reduce potential risks that may arise from K-EXAONE language models. Users are not allowed to engage in any malicious activities (e.g., keying in illegal information) that may induce the creation of inappropriate outputs violating LG AI's ethical principles when using K-EXAONE language models.
|
| 520 |
+
|
| 521 |
+
|
| 522 |
+
## License
|
| 523 |
+
|
| 524 |
+
The model is licensed under [K-EXAONE AI Model License Agreement](./LICENSE)
|
| 525 |
+
|
| 526 |
+
|
| 527 |
+
## Citation
|
| 528 |
+
|
| 529 |
+
```
|
| 530 |
+
@article{k-exaone,
|
| 531 |
+
title={K-EXAONE Technical Report},
|
| 532 |
+
author={{LG AI Research}},
|
| 533 |
+
journal={arXiv preprint arXiv:TBD},
|
| 534 |
+
year={2025}
|
| 535 |
+
}
|
| 536 |
+
```
|
| 537 |
+
|
| 538 |
+
## Contact
|
| 539 |
+
|
| 540 |
+
LG AI Research Technical Support: contact_us@lgresearch.ai
|
| 541 |
+
|
assets/K-EXAONE_Symbol_3d.png
ADDED
|
Git LFS Details
|
chat_template.jinja
ADDED
|
@@ -0,0 +1,159 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{% set image_count = namespace(value=0) %}
|
| 2 |
+
{% set video_count = namespace(value=0) %}
|
| 3 |
+
|
| 4 |
+
{%- set role_indicators = {
|
| 5 |
+
'user': '<|user|>\n',
|
| 6 |
+
'assistant': '<|assistant|>\n',
|
| 7 |
+
'system': '<|system|>\n',
|
| 8 |
+
'tool': '<|tool|>\n',
|
| 9 |
+
'tool_declare': '<|tool_declare|>\n'
|
| 10 |
+
} %}
|
| 11 |
+
{%- set end_of_turn = '<|endofturn|>\n' %}
|
| 12 |
+
|
| 13 |
+
|
| 14 |
+
{%- macro declare_available_tools(tools) %}
|
| 15 |
+
{{- "# Tools" }}
|
| 16 |
+
{{- "\n" }}
|
| 17 |
+
{%- for tool in tools %}
|
| 18 |
+
{{- "<tool>" }}
|
| 19 |
+
{{- tool | tojson(ensure_ascii=False) | safe }}
|
| 20 |
+
{{- "</tool>\n" }}
|
| 21 |
+
{%- endfor %}
|
| 22 |
+
{%- endmacro %}
|
| 23 |
+
|
| 24 |
+
|
| 25 |
+
{%- set ns = namespace(last_query_index = messages|length - 1, last_query_index_not_yet_determined = true) %}
|
| 26 |
+
{%- for message in messages[::-1] %}
|
| 27 |
+
{%- set index = (messages|length - 1) - loop.index0 %}
|
| 28 |
+
{%- if ns.last_query_index_not_yet_determined and message.role == "user" and message.content is string %}
|
| 29 |
+
{%- set ns.last_query_index = index -%}
|
| 30 |
+
{%- set ns.last_query_index_not_yet_determined = false -%}
|
| 31 |
+
{%- endif %}
|
| 32 |
+
{%- endfor %}
|
| 33 |
+
|
| 34 |
+
{%- if tools is defined and tools %}
|
| 35 |
+
{{- role_indicators['tool_declare'] }}
|
| 36 |
+
{{- declare_available_tools(tools) }}
|
| 37 |
+
{{- end_of_turn -}}
|
| 38 |
+
{%- endif %}
|
| 39 |
+
|
| 40 |
+
{%- for i in range(messages | length) %}
|
| 41 |
+
{%- set msg = messages[i] %}
|
| 42 |
+
{%- set role = msg.role %}
|
| 43 |
+
{%- if role not in role_indicators %}
|
| 44 |
+
{{- raise_exception('Unknown role: ' ~ role) }}
|
| 45 |
+
{%- endif %}
|
| 46 |
+
|
| 47 |
+
{%- if i == 0 %}
|
| 48 |
+
{%- if role == 'system' %}
|
| 49 |
+
{{- role_indicators['system'] }}
|
| 50 |
+
{{- msg.content }}
|
| 51 |
+
{{- end_of_turn -}}
|
| 52 |
+
{%- continue %}
|
| 53 |
+
{%- endif %}
|
| 54 |
+
{%- endif %}
|
| 55 |
+
|
| 56 |
+
{%- if role == 'assistant' %}
|
| 57 |
+
{{- role_indicators['assistant'] }}
|
| 58 |
+
|
| 59 |
+
{%- set content = (msg.content if (msg.content is defined and msg.content) else "") -%}
|
| 60 |
+
{%- set reasoning = none -%}
|
| 61 |
+
|
| 62 |
+
{%- if msg.reasoning_content is defined and msg.reasoning_content%}
|
| 63 |
+
{%- set reasoning = msg.reasoning_content.strip() -%}
|
| 64 |
+
{%- elif content and "</think>" in content %}
|
| 65 |
+
{%- set _parts = content.split('</think>') -%}
|
| 66 |
+
{%- set reasoning = _parts[0].lstrip('<think>').strip() -%}
|
| 67 |
+
{%- set content = _parts[-1].strip() -%}
|
| 68 |
+
{%- endif %}
|
| 69 |
+
|
| 70 |
+
{%- if not (reasoning and i > ns.last_query_index) or (skip_think is defined and skip_think) %}
|
| 71 |
+
{%- set reasoning = none %}
|
| 72 |
+
{%- endif %}
|
| 73 |
+
|
| 74 |
+
{%- set content = content.strip() -%}
|
| 75 |
+
|
| 76 |
+
{{- "<think>\n" }}
|
| 77 |
+
{{- (reasoning if reasoning is not none else "") }}
|
| 78 |
+
{{- "\n</think>\n\n" }}
|
| 79 |
+
|
| 80 |
+
{{- content }}
|
| 81 |
+
|
| 82 |
+
{%- if msg.tool_calls %}
|
| 83 |
+
{%- if content is defined and content %}
|
| 84 |
+
{{- "\n" }}
|
| 85 |
+
{%- endif %}
|
| 86 |
+
{%- for tool_call in msg.tool_calls %}
|
| 87 |
+
{%- if tool_call.function is defined %}
|
| 88 |
+
{%- set tool_call = tool_call.function %}
|
| 89 |
+
{%- endif %}
|
| 90 |
+
|
| 91 |
+
{%- if tool_call.arguments is defined %}
|
| 92 |
+
{%- set arguments = tool_call.arguments %}
|
| 93 |
+
{%- elif tool_call.parameters is defined %}
|
| 94 |
+
{%- set arguments = tool_call.parameters %}
|
| 95 |
+
{%- else %}
|
| 96 |
+
{{- raise_exception('arguments or parameters are mandatory: ' ~ tool_call) }}
|
| 97 |
+
{%- endif %}
|
| 98 |
+
{%- if arguments is string %}
|
| 99 |
+
{{- "<tool_call>" }}{"name": "{{- tool_call.name }}", "arguments": {{ arguments }}}{{- "</tool_call>" }}
|
| 100 |
+
{%- else %}
|
| 101 |
+
{{- "<tool_call>" }}{"name": "{{- tool_call.name }}", "arguments": {{ arguments | tojson(ensure_ascii=False) | safe }}}{{- "</tool_call>" }}
|
| 102 |
+
{%- endif %}
|
| 103 |
+
{%- if not loop.last %}
|
| 104 |
+
{{- "\n" }}
|
| 105 |
+
{%- endif %}
|
| 106 |
+
|
| 107 |
+
{%- endfor %}
|
| 108 |
+
{%- endif %}
|
| 109 |
+
{{- end_of_turn -}}
|
| 110 |
+
|
| 111 |
+
{%- elif role == "tool" %}
|
| 112 |
+
{%- if i == 0 or messages[i - 1].role != "tool" %}
|
| 113 |
+
{{- role_indicators['tool'] }}
|
| 114 |
+
{%- endif %}
|
| 115 |
+
{%- if msg.content is defined %}
|
| 116 |
+
{%- if msg.content is string %}
|
| 117 |
+
{{- "<tool_result>" }}{{ msg.content }}{{- "</tool_result>" }}
|
| 118 |
+
{%- else %}
|
| 119 |
+
{{- "<tool_result>" }}{{ msg.content | tojson(ensure_ascii=False) | safe }}{{- "</tool_result>" }}
|
| 120 |
+
{%- endif %}
|
| 121 |
+
{%- endif %}
|
| 122 |
+
{%- if loop.last or messages[i + 1].role != "tool" %}
|
| 123 |
+
{{- end_of_turn -}}
|
| 124 |
+
{%- else %}
|
| 125 |
+
{{- "\n" }}
|
| 126 |
+
{%- endif %}
|
| 127 |
+
|
| 128 |
+
{%- else %}
|
| 129 |
+
{{- role_indicators[role] }}
|
| 130 |
+
{%- if msg.content is string %}
|
| 131 |
+
{{- msg.content }}
|
| 132 |
+
{%- else %}
|
| 133 |
+
{%- for content in msg.content %}
|
| 134 |
+
{%- if content.type == 'image' %}
|
| 135 |
+
{%- set image_count.value = image_count.value + 1 %}
|
| 136 |
+
{%- if add_vision_id %}Picture {{ image_count.value }}: {% endif %}<vision><image_pad></vision>
|
| 137 |
+
{%- elif content.type == 'video' %}
|
| 138 |
+
{%- set video_count.value = video_count.value + 1 %}
|
| 139 |
+
{%- if add_vision_id %}Video {{ video_count.value }}: {% endif %}<vision><video_pad></vision>
|
| 140 |
+
{%- elif content.type == 'text' %}
|
| 141 |
+
{{- content.text }}
|
| 142 |
+
{%- else %}
|
| 143 |
+
{{- content.text }}
|
| 144 |
+
{%- endif %}
|
| 145 |
+
{%- endfor %}
|
| 146 |
+
{%- endif %}
|
| 147 |
+
{{- end_of_turn -}}
|
| 148 |
+
{%- endif %}
|
| 149 |
+
{% endfor %}
|
| 150 |
+
|
| 151 |
+
|
| 152 |
+
{%- if add_generation_prompt %}
|
| 153 |
+
{{- role_indicators['assistant'] }}
|
| 154 |
+
{%- if enable_thinking is not defined or enable_thinking is true %}
|
| 155 |
+
{{- "<think>\n" }}
|
| 156 |
+
{%- else %}
|
| 157 |
+
{{- "<think>\n\n</think>\n\n" }}
|
| 158 |
+
{%- endif %}
|
| 159 |
+
{%- endif %}
|
config.json
ADDED
|
@@ -0,0 +1,143 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"architectures": [
|
| 3 |
+
"ExaoneMoEForCausalLM"
|
| 4 |
+
],
|
| 5 |
+
"attention_dropout": 0.0,
|
| 6 |
+
"bos_token_id": 1,
|
| 7 |
+
"dtype": "bfloat16",
|
| 8 |
+
"eos_token_id": 53,
|
| 9 |
+
"first_last_k_dense_replace": 1,
|
| 10 |
+
"head_dim": 128,
|
| 11 |
+
"hidden_act": "silu",
|
| 12 |
+
"hidden_size": 6144,
|
| 13 |
+
"initializer_range": 0.02,
|
| 14 |
+
"intermediate_size": 18432,
|
| 15 |
+
"is_moe_layer": [
|
| 16 |
+
false,
|
| 17 |
+
true,
|
| 18 |
+
true,
|
| 19 |
+
true,
|
| 20 |
+
true,
|
| 21 |
+
true,
|
| 22 |
+
true,
|
| 23 |
+
true,
|
| 24 |
+
true,
|
| 25 |
+
true,
|
| 26 |
+
true,
|
| 27 |
+
true,
|
| 28 |
+
true,
|
| 29 |
+
true,
|
| 30 |
+
true,
|
| 31 |
+
true,
|
| 32 |
+
true,
|
| 33 |
+
true,
|
| 34 |
+
true,
|
| 35 |
+
true,
|
| 36 |
+
true,
|
| 37 |
+
true,
|
| 38 |
+
true,
|
| 39 |
+
true,
|
| 40 |
+
true,
|
| 41 |
+
true,
|
| 42 |
+
true,
|
| 43 |
+
true,
|
| 44 |
+
true,
|
| 45 |
+
true,
|
| 46 |
+
true,
|
| 47 |
+
true,
|
| 48 |
+
true,
|
| 49 |
+
true,
|
| 50 |
+
true,
|
| 51 |
+
true,
|
| 52 |
+
true,
|
| 53 |
+
true,
|
| 54 |
+
true,
|
| 55 |
+
true,
|
| 56 |
+
true,
|
| 57 |
+
true,
|
| 58 |
+
true,
|
| 59 |
+
true,
|
| 60 |
+
true,
|
| 61 |
+
true,
|
| 62 |
+
true,
|
| 63 |
+
true
|
| 64 |
+
],
|
| 65 |
+
"layer_types": [
|
| 66 |
+
"sliding_attention",
|
| 67 |
+
"sliding_attention",
|
| 68 |
+
"sliding_attention",
|
| 69 |
+
"full_attention",
|
| 70 |
+
"sliding_attention",
|
| 71 |
+
"sliding_attention",
|
| 72 |
+
"sliding_attention",
|
| 73 |
+
"full_attention",
|
| 74 |
+
"sliding_attention",
|
| 75 |
+
"sliding_attention",
|
| 76 |
+
"sliding_attention",
|
| 77 |
+
"full_attention",
|
| 78 |
+
"sliding_attention",
|
| 79 |
+
"sliding_attention",
|
| 80 |
+
"sliding_attention",
|
| 81 |
+
"full_attention",
|
| 82 |
+
"sliding_attention",
|
| 83 |
+
"sliding_attention",
|
| 84 |
+
"sliding_attention",
|
| 85 |
+
"full_attention",
|
| 86 |
+
"sliding_attention",
|
| 87 |
+
"sliding_attention",
|
| 88 |
+
"sliding_attention",
|
| 89 |
+
"full_attention",
|
| 90 |
+
"sliding_attention",
|
| 91 |
+
"sliding_attention",
|
| 92 |
+
"sliding_attention",
|
| 93 |
+
"full_attention",
|
| 94 |
+
"sliding_attention",
|
| 95 |
+
"sliding_attention",
|
| 96 |
+
"sliding_attention",
|
| 97 |
+
"full_attention",
|
| 98 |
+
"sliding_attention",
|
| 99 |
+
"sliding_attention",
|
| 100 |
+
"sliding_attention",
|
| 101 |
+
"full_attention",
|
| 102 |
+
"sliding_attention",
|
| 103 |
+
"sliding_attention",
|
| 104 |
+
"sliding_attention",
|
| 105 |
+
"full_attention",
|
| 106 |
+
"sliding_attention",
|
| 107 |
+
"sliding_attention",
|
| 108 |
+
"sliding_attention",
|
| 109 |
+
"full_attention",
|
| 110 |
+
"sliding_attention",
|
| 111 |
+
"sliding_attention",
|
| 112 |
+
"sliding_attention",
|
| 113 |
+
"full_attention"
|
| 114 |
+
],
|
| 115 |
+
"max_position_embeddings": 262144,
|
| 116 |
+
"model_type": "exaone_moe",
|
| 117 |
+
"moe_intermediate_size": 2048,
|
| 118 |
+
"n_group": 1,
|
| 119 |
+
"norm_topk_prob": true,
|
| 120 |
+
"num_attention_heads": 64,
|
| 121 |
+
"num_experts": 128,
|
| 122 |
+
"num_experts_per_tok": 8,
|
| 123 |
+
"num_hidden_layers": 48,
|
| 124 |
+
"num_key_value_heads": 8,
|
| 125 |
+
"num_shared_experts": 1,
|
| 126 |
+
"pad_token_id": 0,
|
| 127 |
+
"rms_norm_eps": 1e-05,
|
| 128 |
+
"rope_parameters": {
|
| 129 |
+
"rope_theta": 1000000,
|
| 130 |
+
"rope_type": "default"
|
| 131 |
+
},
|
| 132 |
+
"routed_scaling_factor": 2.5,
|
| 133 |
+
"scoring_func": "sigmoid",
|
| 134 |
+
"sliding_window": 128,
|
| 135 |
+
"sliding_window_pattern": "LLLG",
|
| 136 |
+
"tie_word_embeddings": false,
|
| 137 |
+
"tokenizer_class": "GPT2Tokenizer",
|
| 138 |
+
"topk_group": 1,
|
| 139 |
+
"topk_method": "noaux_tc",
|
| 140 |
+
"transformers_version": "5.0.0.dev0",
|
| 141 |
+
"use_cache": true,
|
| 142 |
+
"vocab_size": 153600
|
| 143 |
+
}
|
examples/example_output_search.txt
ADDED
|
@@ -0,0 +1,173 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
<|tool_declare|>
|
| 2 |
+
# Tools
|
| 3 |
+
<tool>{"type": "function", "function": {"type": "function", "name": "search", "description": "Discovery search focusing on diverse online sources.", "parameters": {"type": "object", "properties": {"query": {"type": "string", "description": "Search query to send to Search API."}}, "required": ["query"]}}}</tool>
|
| 4 |
+
<tool>{"type": "function", "function": {"type": "function", "name": "open", "description": "Open a specific website by providing its URL and retrieve a concise summary of the website's content.", "parameters": {"type": "object", "properties": {"url": {"type": "string", "description": "The target website URL or domain"}, "pattern": {"type": "string", "description": "The target pattern of the webpage content to be summarized."}}, "required": ["url", "pattern"]}}}</tool>
|
| 5 |
+
<|endofturn|>
|
| 6 |
+
<|system|>
|
| 7 |
+
You're a helpful assistant.<|endofturn|>
|
| 8 |
+
<|user|>
|
| 9 |
+
According to the 'Wellness Trends Report 2025: How Wellness Got Smarter', what do the Preferred Hotels & Resorts’ Travel Trends predictions for 2025 indicate about longevity wellness?<|endofturn|>
|
| 10 |
+
<|assistant|>
|
| 11 |
+
<think>
|
| 12 |
+
The user is asking about a specific report: "Wellness Trends Report 2025: How Wellness Got Smarter" and specifically about Preferred Hotels & Resorts' Travel Trends predictions for 2025 regarding longevity wellness.
|
| 13 |
+
|
| 14 |
+
I need to search for this report. Let's search for the report title.
|
| 15 |
+
</think>
|
| 16 |
+
|
| 17 |
+
<tool_call>{"name": "search", "arguments": {"query": "\"Wellness Trends Report 2025: How Wellness Got Smarter\" Preferred Hotels & Resorts Travel Trends predictions 2025 longevity wellness"}}</tool_call><|endofturn|>
|
| 18 |
+
<|tool|>
|
| 19 |
+
<tool_result>[{"title": "Wellness Trends Report 2025: How Wellness Got Smarter", "url": "https://www.forbes.com/sites/angelinavillaclarke/2025/02/04/wellness-trends-report-2025-how-wellness-got-smarter/", "description": "Wellness Trends Report 2025: How Wellness Got Smarter. In this second ... Longevity is also a trend highlighted in Preferred Hotels & Resorts' ..."}]</tool_result><|endofturn|>
|
| 20 |
+
<|assistant|>
|
| 21 |
+
<think>
|
| 22 |
+
The search result shows a Forbes article with that title. Let's open it.
|
| 23 |
+
</think>
|
| 24 |
+
|
| 25 |
+
<tool_call>{"name": "open", "arguments": {"url": "https://www.forbes.com/sites/angelinavillaclarke/2025/02/04/wellness-trends-report-2025-how-wellness-got-smarter/", "pattern": "Preferred Hotels & Resorts Travel Trends predictions 2025 longevity wellness"}}</tool_call><|endofturn|>
|
| 26 |
+
<|tool|>
|
| 27 |
+
<tool_result>Summary:
|
| 28 |
+
The Forbes Wellness Trends Report 2025 by Angelina Villa-Clarke, published February 4, 2025, centers on smarter, tech-enabled longevity wellness and the integration of advanced science with traditional practices. It notes that longevity wellness is at the forefront of Preferred Hotels & Resorts’ Travel Trends predictions for 2025, highlighting biohacking, high-tech recovery, and brain-health initiatives across luxury properties. Examples include ZEM Wellness Clinic Altea in Spain (opened last month) with a ten-acre site featuring 95 suites and a 4,000 sqm spa, plus Europe’s first Metawell Centre; Alohilani Resort Waikiki Beach’s Fit Waikiki and Drip Hawaii IV lounge; and longevity-focused offerings at Lefay Resort & Spa Dolomiti and The Alpina Gstaad. The report also covers Accor’s Wellness Trends 2025, which stresses science-based, transparent wellness and a holistic approach. Additionally, brain fitness emerges as a major theme, with investments in brain-health services, nootropics, and tech-driven cognitive enhancement.
|
| 29 |
+
|
| 30 |
+
Answer to the question:
|
| 31 |
+
Preferred Hotels & Resorts Travel Trends predictions for 2025 identify longevity wellness as a leading trend, combining advanced science and technology with traditional wellness practices to support recovery, healthy aging, and mental well-being. The article provides concrete examples of how this trend is being enacted across luxury properties and destinations.
|
| 32 |
+
|
| 33 |
+
In concrete terms, ZEM Wellness Clinic Altea in Spain opened recently (the article states it “opened last month”) and is described as a pioneering medical clinic offering a blend of cutting-edge technology and traditional wellness. It sits on ten acres and includes 95 suites and a 4,000 square meter spa, with a Recovery Unit featuring cryotherapy, hyperbaric oxygen, and pressotherapy, and it showcases Europe’s first Metawell Centre developed by the Gharieni Group to deliver anti-inflammatory effects, improved sleep, detox and lymph drainage, and enhanced recovery.
|
| 34 |
+
|
| 35 |
+
The piece also highlights ongoing or upcoming longevity-focused experiences at other properties: Lefay Resort & Spa Dolomiti in Italy and The Alpina Gstaad in Switzerland, both part of the broader longevity-wellness menu; SIRO One Za’abeel in Dubai (focusing on hi-tech wellness) and a second SIRO property, SIRO Boka Palace in Montenegro, opening in May 2025 with a Destination Fitness pillar offering outdoor activities and recovery labs. In Waikiki, Alohilani Resort’s Fit Waikiki and the Drip Hawaii IV lounge illustrate hotel-based longevity and recovery services.
|
| 36 |
+
|
| 37 |
+
Brain fitness is underscored as a major trend within longevity wellness. Accor’s Wellness Trends report frames brain health as a rising priority in a longer-living population, with four market directions: brain supplements, nootropic-infused drinks, high-tech brain devices, and personalized brainwave programs. Viavi’s brain-health service, developed with Dr. Sabine Donnai, offers a comprehensive brain evaluation and personal health strategy for £12,000, plus a pTau blood test available for £1,000. The article also notes that brain-health initiatives are being integrated into wellness offerings more broadly, including services spearheaded by practitioners like Dr. Donnai and Viavi, in the context of a broader move toward science-backed, transparent wellness. Accor’s Emlyn Brown is quoted describing wellness in 2025 as grounded in science, authenticity, technology, trust, and sustainable, whole-self wellbeing.</tool_result><|endofturn|>
|
| 38 |
+
<|assistant|>
|
| 39 |
+
<think>
|
| 40 |
+
The summary provides a detailed answer. However, the user's question is: "According to the 'Wellness Trends Report 2025: How Wellness Got Smarter', what do the Preferred Hotels & Resorts’ Travel Trends predictions for 2025 indicate about longevity wellness?"
|
| 41 |
+
|
| 42 |
+
The summary indicates that Preferred Hotels & Resorts' Travel Trends predictions for 2025 identify longevity wellness as a leading trend, combining advanced science and technology with traditional wellness practices to support recovery, healthy aging, and mental well-being. It mentions biohacking, high-tech recovery, and brain-health initiatives across luxury properties.
|
| 43 |
+
|
| 44 |
+
But perhaps the user wants a more concise answer. Let's see if we can extract the exact phrasing from the article. Let's open the article again with a more specific pattern.
|
| 45 |
+
</think>
|
| 46 |
+
|
| 47 |
+
<tool_call>{"name": "open", "arguments": {"url": "https://www.forbes.com/sites/angelinavillaclarke/2025/02/04/wellness-trends-report-2025-how-wellness-got-smarter/", "pattern": "Preferred Hotels & Resorts"}}</tool_call><|endofturn|>
|
| 48 |
+
<|tool|>
|
| 49 |
+
<tool_result>Summary:
|
| 50 |
+
The Wellness Trends Report 2025: How Wellness Got Smarter (Forbes, Feb 4, 2025) discusses high-tech, science-informed wellness and longevity therapies. It notes that longevity wellness is a central focus in Preferred Hotels & Resorts’ Travel Trends predictions for 2025. The article cites several Preferred Hotels properties as examples, including Alohilani Resort Waikiki Beach (Honolulu) with a reimagined Fit Waikiki wellness center and the Drip Hawaii IV lounge, Lefay Resort & Spa Dolomiti (Italy), and The Alpina Gstaad (Switzerland). It describes these properties as integrating advanced wellness offerings—such as cryotherapy, oxygen therapy, and advanced skin treatments—with luxurious accommodations to support health, longevity, and well-being. The piece positions brain fitness as an emerging wellness priority and situates these initiatives within the broader trend toward tech-enabled, authentic wellness experiences promoted by Preferred Hotels & Resorts.
|
| 51 |
+
|
| 52 |
+
Answer to the question:
|
| 53 |
+
What does the page say about Preferred Hotels & Resorts?
|
| 54 |
+
|
| 55 |
+
- The article identifies longevity wellness as a leading trend in Preferred Hotels & Resorts’ Travel Trends predictions for 2025. It quotes that longevity wellness combines advanced science and technology with traditional practices to support recovery, healthy aging, and mental well-being.
|
| 56 |
+
- It provides concrete examples of Preferred Hotels properties embracing longevity wellness:
|
| 57 |
+
- Alohilani Resort Waikiki Beach in Honolulu, which has reimagined its wellness center (Fit Waikiki) for enhanced experiences and offers cutting-edge therapies, including Waikiki’s first IV lounge at Drip Hawaii medical spa.
|
| 58 |
+
- Lefay Resort & Spa Dolomiti in Italy, described as a wellness hotel that is part of Preferred Hotels & Resorts.
|
| 59 |
+
- The Alpina Gstaad in Switzerland, also listed as a Preferred Hotels & Resorts property embracing longevity wellness.
|
| 60 |
+
- The article notes that these integrative wellness offerings, along with luxurious accommodations, are becoming a go-to option for guests seeking to enhance health, longevity, and overall quality of life.</tool_result><|endofturn|>
|
| 61 |
+
<|assistant|>
|
| 62 |
+
<think>
|
| 63 |
+
The summary gives a good answer. However, the user might want a direct quote or specific indication. Let's search for the phrase "Preferred Hotels & Resorts’ Travel Trends predictions for 2025 indicate" or similar.
|
| 64 |
+
</think>
|
| 65 |
+
|
| 66 |
+
<tool_call>{"name": "search", "arguments": {"query": "\"Preferred Hotels & Resorts\" \"Travel Trends\" 2025 longevity wellness"}}</tool_call><|endofturn|>
|
| 67 |
+
<|tool|>
|
| 68 |
+
<tool_result>[{"title": "Preferred Hotels & Resorts Unveils 2025 Travel Trends", "url": "https://www.luxurytravelmagazine.com/news-articles/preferred-hotels-resorts-unveils-2025-travel-trends", "description": "Preferred Hotels & Resorts, the world's largest independent hotel brand, has unveiled the top travel trends shaping the year ahead."}, {"title": "Wellness Trends Report 2025: How Wellness Got Smarter", "url": "https://www.forbes.com/sites/angelinavillaclarke/2025/02/04/wellness-trends-report-2025-how-wellness-got-smarter/", "description": "Longevity is also a trend highlighted in Preferred Hotels & Resorts' Travel Trends predictions for 2025. “Longevity wellness is at the forefront ..."}, {"title": "ILTM Cannes 2025: The Luxury Travel Trends That ...", "url": "https://www.hertelier.com/post/iltm-cannes-2025", "description": "ILTM Cannes 2025: The Luxury Travel Trends That Actually Matter ... Preferred Hotels & Resorts is leaning into this shift. CEO Lindsey ..."}, {"title": "Virtuoso, Globetrender flag luxury travel trends to watch", "url": "https://latteluxurynews.com/2024/10/03/virtuoso-globetrender-luxury-travel-trends/", "description": "Virtuoso and Globetrender have complied a list of luxury travel trends to watch in 2025 ... Preferred Hotels & Resorts adds eight hotels ..."}, {"title": "2025 Hot New Hotel Preview: Part 3 - by Pavia + Jeralyn", "url": "https://fathomwaytogo.substack.com/p/new-hotels-2025-europe", "description": "As we already reported in our 2025 travel trends report, hotels are merging into trains and vice versa. ... Part of Preferred Hotels & Resorts' ..."}, {"title": "From Seaside Sanctuaries to Cool Culture Capitals", "url": "https://www.hospitalitynet.org/news/4124170.html", "description": "Preferred Hotels & Resorts Welcomes 14 ... Travel Trends through AI Innovation, Sustainability and New Consumer Insights at WTM London 2025."}, {"title": "Six Senses Unveils Calendar of 2024 Wellness Retreats", "url": "https://www.travelagewest.com/Travel/Hotels/six-senses-retreats", "description": "... Travel Trends for 2026. From Our Partners. More Stories Like This. The ... Preferred Hotels & Resorts Grows Global Portfolio · Blue Diamond ..."}, {"title": "Noelle Mateer", "url": "https://www.hoteldive.com/editors/nmateer/", "description": "Preferred Hotels & Resorts continues US expansion with California lodge ... The wellness travel trends that will impact hotel amenities in 2024. Jan. 25 ..."}, {"title": "TRAVELWEEK FEBRUARY 27 by Travelweek", "url": "https://issuu.com/travelweek/docs/travelweek_february_27_be4feca6dd7cb0", "description": "It's one of the fastest growing travel trends for 2025. According to ... Preferred Hotels & Resorts launches travel advisor portal. NEW ..."}, {"title": "PoB Hotels releases 2026 Whitepaper on luxury travel ...", "url": "https://www.linkedin.com/posts/the-hotel-magazine_pob-whitepaper-report-2026-activity-7387111501813428224-ZslR", "description": "PoB Hotels releases 2026 Whitepaper on luxury travel trends in Britain ... Learn more from Pia-Marie Ametsbichler (Preferred Hotels & Resorts) ..."}]</tool_result><|endofturn|>
|
| 69 |
+
<|assistant|>
|
| 70 |
+
<think>
|
| 71 |
+
Let's open the Forbes article again, but maybe we need to scroll to find the exact sentence. Let's open with a pattern that captures the relevant paragraph.
|
| 72 |
+
</think>
|
| 73 |
+
|
| 74 |
+
<tool_call>{"name": "open", "arguments": {"url": "https://www.forbes.com/sites/angelinavillaclarke/2025/02/04/wellness-trends-report-2025-how-wellness-got-smarter/", "pattern": "Longevity is also a trend highlighted in Preferred Hotels & Resorts'"}}</tool_call><|endofturn|>
|
| 75 |
+
<|tool|>
|
| 76 |
+
<tool_result>Summary:
|
| 77 |
+
Longevity wellness is highlighted as a key trend in Preferred Hotels & Resorts’ Travel Trends predictions for 2025. The Forbes article notes that longevity wellness combines advanced science and technology with traditional practices to support recovery, healthy aging, and mental well-being, including biohacking and high-tech treatments. Examples cited include Alohilani Resort Waikiki Beach’s Fit Waikiki wellness center, the Drip Hawaii IV lounge, and a broader slate of properties such as Lefay Resort & Spa Dolomiti, The Alpina Gstaad, ZEM Wellness Clinic Altea, and the RoseBar Longevity Clinic at Six Senses Ibiza (open since 2023). The piece also mentions new hi-tech wellness initiatives at SIRO hotels (Dubai) and a May 2025 opening for SIRO Boka Palace in Montenegro, all framed within a movement toward sleep technology, brain health, and personalized recovery services. Additionally, the article references Accor’s 2025 Wellness Trends report and a brain-fitness focus involving Viavi and brain-health services priced at £12,000 for a full evaluation and £1,000 for a pTau blood test.
|
| 78 |
+
|
| 79 |
+
Answer to the question:
|
| 80 |
+
Longevity is a trend highlighted in Preferred Hotels & Resorts’ Travel Trends predictions for 2025, described as longevity wellness that blends advanced science and technology with traditional wellness practices to support recovery, healthy aging, and mental well-being. The article demonstrates this trend through several concrete examples across luxury and wellness-focused properties. At Alohilani Resort Waikiki Beach in Honolulu, the wellness offering includes Fit Waikiki, a reimagined wellness center, and the Drip Hawaii medical spa providing Hawaii’s first IV lounge, illustrating institutionalized longevity-oriented services within a resort setting.
|
| 81 |
+
|
| 82 |
+
Other highlighted properties include Lefay Resort & Spa Dolomiti in Italy and The Alpina Gstaad in Switzerland, which are described as integrating longevity-focused offerings with luxury accommodations. ZEM Wellness Clinic Altea in Spain is presented as a medical-wacenter operation powered by advanced technology and cross-disciplinary specialists, with features such as a Metawell Centre from Gharieni Group for mind-body technologies and an exclusive Recovery Unit with cryotherapy, hyperbaric oxygen, and pressotherapy. The RoseBar Longevity Clinic at Six Senses Ibiza, opened in 2023, is cited as a pioneer in merging health technologies with holistic spa practices to boost mental and physical well-being.
|
| 83 |
+
|
| 84 |
+
The article also discusses high-tech and brain-health aspects of longevity, including SIRO One Za’abeel in Dubai and the upcoming SIRO Boka Palace in Montenegro (opening May 2025) with a Destination Fitness pillar and recovery-focused facilities. The brain-fitness component is further developed through Accor’s wellness initiatives and collaborations with Viavi and Dr. Sabine Donnai, offering brain-health services such as a £12,000 brain evaluation and a £1,000 pTau blood test, and emphasizing four trends: brain supplements, nootropic-infused drinks, high-tech brain devices, and personalized brainwave programs to optimize cognitive performance. Together, these examples illustrate how Preferred Hotels & Resorts’ travel trends for 2025 foreground longevity as a central theme in wellness offerings and hotel experiences.</tool_result><|endofturn|>
|
| 85 |
+
<|assistant|>
|
| 86 |
+
<think>
|
| 87 |
+
The summary provides a comprehensive answer. However, the user may want a concise answer: what do the predictions indicate about longevity wellness? The summary says: longevity wellness is highlighted as a key trend, combining advanced science and technology with traditional practices to support recovery, healthy aging, and mental well-being.
|
| 88 |
+
|
| 89 |
+
But perhaps the user wants the exact phrasing from the report. Let's search for the phrase "Preferred Hotels & Resorts’ Travel Trends predictions for 2025 indicate" within the article. Use open with pattern "indicate".
|
| 90 |
+
</think>
|
| 91 |
+
|
| 92 |
+
<tool_call>{"name": "open", "arguments": {"url": "https://www.forbes.com/sites/angelinavillaclarke/2025/02/04/wellness-trends-report-2025-how-wellness-got-smarter/", "pattern": "indicate"}}</tool_call><|endofturn|>
|
| 93 |
+
<|tool|>
|
| 94 |
+
<tool_result>Summary:
|
| 95 |
+
The Wellness Trends Report 2025: How Wellness Got Smarter, by Angelina Villa-Clarke and published February 4, 2025, explores the shift toward high-tech solutions for health, longevity therapies, and integrated wellness experiences. It highlights a wave of new wellness facilities and tech-enabled services, including ZEM Wellness Clinic Altea in Spain, which opened recently and combines medicine, nutrition, and a Recovery Unit with advanced technologies such as cryotherapy and hyperbaric oxygen. The piece notes industry voices—such as Franck De Weerdt of ZEM, Dr. Preema Vig, and Talana Bestall of RoseBar/Six Senses Ibiza—emphasizing the commodification of cutting-edge treatments like Metawell mind-body technologies and Profhilo regenerative skincare. It traces trends in longevity-focused offerings at luxury brands (RoseBar, Lefay Resort & Spa, The Alpina Gstaad, SIRO hotels) and references Preferrred Hotels & Resorts’ 2025 outlook on integrating science, tech, and authenticity. A notable feature is brain fitness as a rising priority, with Brain health services priced at about £12,000 for a comprehensive brain evaluation and £1,000 for a pTau blood test via Viavi, plus four key brain-boosting categories (supplements, nootropic drinks, devices, and personalized programs). The article also covers sleep technology, delivery of tailored recovery programs, and the expansion of wellness offerings across Accor’s and SIRO’s portfolios, including openings and plans for 2025.
|
| 96 |
+
|
| 97 |
+
Answer to the question:
|
| 98 |
+
The page indicates that Wellness Trends Report 2025: How Wellness Got Smarter presents a detailed look at how wellness is becoming more high-tech and science-based in 2025. It identifies several core developments:
|
| 99 |
+
|
| 100 |
+
- High-tech wellness and longevity focus: clinics and hotels are integrating advanced medical technology, data-driven recovery, and science-backed therapies to extend healthy lifespans. Examples include ZEM Wellness Clinic Altea in Spain, which opened recently, and RoseBar Longevity Clinic at Six Senses Ibiza, which has been advancing health-tech-integrated spa experiences since 2023. The MedTech and recovery tech highlighted include Metawell mind-body technologies and a Recovery Unit featuring cryotherapy, hyperbaric oxygen, and pressotherapy, with explanations from executives such as Franck De Weerdt (CEO of ZEM) and Talana Bestall (RoseBar).
|
| 101 |
+
|
| 102 |
+
- Brain fitness as a principal trend: Accor and Viavi are spotlighting brain health innovations. Accor cites brain-focused offerings through partners like Viavi, with a brain health service priced at £12,000 for a comprehensive brain evaluation and £1,000 for the pTau blood test. Dr. Sabine Donnai discusses a holistic brain health program and data-driven approach to cognitive performance, aiming to future-proof cognition.
|
| 103 |
+
|
| 104 |
+
- Longevity and sleep/wellness integration in hospitality: brands such as Lefay Resort & Spa, The Alpina Gstaad, and SIRO hotels are expanding longevity wellness, sleep intelligence, and recovery labs. SIRO One Za’abeel in Dubai emphasizes sleep technology, including AI sleep monitors and regulated mattresses, with a second SIRO property, SIRO Boka Palace in Montenegro, planned to open in May 2025, featuring dedicated fitness and recovery labs and personalized nutrition plans.
|
| 105 |
+
|
| 106 |
+
- Industry voices and brand perspectives: Emlyn Brown from Accor’s Wellness Trends report stresses that wellness in 2025 should be grounded in science and authenticity, balancing technology with informed choices and sustainable habits. Emlyn Brown also notes the broader shift toward transparent and holistic well-being, beyond hype. The article also references Dr. Preema Vig’s skincare innovations and Profhilo’s regenerative approach, illustrating how cosmetic and medical wellness converge.</tool_result><|endofturn|>
|
| 107 |
+
<|assistant|>
|
| 108 |
+
<think>
|
| 109 |
+
The summary doesn't directly answer the question. Perhaps we need to search for the exact phrase "Preferred Hotels & Resorts’ Travel Trends predictions for 2025 indicate about longevity wellness". Let's search more broadly.
|
| 110 |
+
</think>
|
| 111 |
+
|
| 112 |
+
<tool_call>{"name": "search", "arguments": {"query": "\"Preferred Hotels & Resorts\" \"longevity wellness\" 2025"}}</tool_call><|endofturn|>
|
| 113 |
+
<|tool|>
|
| 114 |
+
<tool_result>[{"title": "Preferred Hotels & Resorts Unveils 2025 Travel Trends", "url": "https://www.luxurytravelmagazine.com/news-articles/preferred-hotels-resorts-unveils-2025-travel-trends", "description": "Other Preferred Hotels & Resorts properties also embracing longevity wellness with services such as cryotherapy, oxygen therapy, and ..."}, {"title": "Wellness Trends Report 2025: How Wellness Got Smarter", "url": "https://www.forbes.com/sites/angelinavillaclarke/2025/02/04/wellness-trends-report-2025-how-wellness-got-smarter/", "description": "Other Preferred Hotels & Resorts properties also embracing longevity wellness, with services such as cryotherapy, oxygen therapy, and advanced ..."}, {"title": "WELLNESS GUIDE: Vilalara Longevity Thalassa & Medical ...", "url": "https://luxnomade.com/resorts/wellness-guide-vilalara-longevity-thalassa-medical-spa/", "description": "Located right by the sea, the long-established Vilalara Thalassa Resort, in the Algarve – Portugal joined forces with Longevity Wellness Worldwide to create ..."}, {"title": "Quiet luxury: When wealth goes silent", "url": "https://en.10minhotel.com/2025/08/19/quiet-luxury-when-wealth-goes-silent/", "description": "... 2025 The era ... longevity, wellness, and life-enhancing experiences. ... Sesimbra Oceanfront Hotel Joins the Preferred Hotels & Resorts Lifestyle Collection."}, {"title": "Page 44", "url": "https://hoteldesigns.net/page/44/?attachment_id", "description": "... Longevity Wellness Worldwide's core principles of restoration and regeneration… ... The Londoner, a member of the prestigious Preferred Hotels & Resorts ..."}, {"title": "UAE'S NEXT MASTERMIND AWARDS 2025 🌟 Business & ...", "url": "https://www.instagram.com/p/DI2nRdISPwt/", "description": "Al Habtoor Palace Dubai, Preferred Hotels & Resorts. Photo by MASTERMIND_AWARDS in Al Habtoor Palace Dubai, Preferred Hotels & Resorts with @dubaitv. dubaitv."}, {"title": "Town & Country USA - Summer 2025 | PDF | Flight Attendant", "url": "https://www.scribd.com/document/872321029/Town-Country-USA-Summer-2025", "description": "© 2025 Preferred Hotels & Resorts. HOTEL CASA DEL MAR, Santa Monica ... encers are hosting glucose challenges (com- longevity wellness company Danford Works."}, {"title": "LURA Lifestyle Management (@luralifestyle)", "url": "https://www.instagram.com/luralifestyle/?hl=en", "description": "... Preferred Hotels & Resorts and Bucketlist Xperiences. This rare opportunity invites travellers to explore seven legendary destinations - Singapore, India ..."}, {"title": "UAE'S NEXT MASTERMIND AWARDS 2025 🌟 Business & ...", "url": "https://www.instagram.com/p/DLIbJTNyu34/", "description": "... 2025 Business & Corporate Leaders | Coaching Leaders | Authors 21st ... Photo by MASTERMIND_AWARDS in Al Habtoor Palace Dubai, Preferred Hotels & Resorts ..."}, {"title": "Real Estate Industry Media (@realestateindustrymedia)", "url": "https://www.instagram.com/realestateindustrymedia/?hl=en", "description": "Photo by Zulfiqar Ali in Al Habtoor Palace Dubai, Preferred Hotels & Resorts with @ ... longevity, wellness, and thoughtful placemaking ..."}]</tool_result><|endofturn|>
|
| 115 |
+
<|assistant|>
|
| 116 |
+
<think>
|
| 117 |
+
Let's open the Forbes article again, but maybe the summary we have is sufficient. The user asked: "According to the 'Wellness Trends Report 2025: How Wellness Got Smarter', what do the Preferred Hotels & Resorts’ Travel Trends predictions for 2025 indicate about longevity wellness?"
|
| 118 |
+
|
| 119 |
+
Based on the summary, the answer is that longevity wellness is a leading trend highlighted in Preferred Hotels & Resorts' Travel Trends predictions for 2025, combining advanced science and technology with traditional practices to support recovery, healthy aging, and mental well-being. It includes biohacking, high-tech recovery, and brain-health initiatives across luxury properties.
|
| 120 |
+
|
| 121 |
+
We can provide a concise answer with supporting details. Let's craft the answer.
|
| 122 |
+
|
| 123 |
+
But maybe we should verify by opening the Forbes article directly and looking for the exact paragraph. Let's open with a pattern that captures the paragraph containing "Preferred Hotels & Resorts’ Travel Trends predictions for 2025".
|
| 124 |
+
</think>
|
| 125 |
+
|
| 126 |
+
<tool_call>{"name": "open", "arguments": {"url": "https://www.forbes.com/sites/angelinavillaclarke/2025/02/04/wellness-trends-report-2025-how-wellness-got-smarter/", "pattern": "Preferred Hotels & Resorts\u2019 Travel Trends predictions for 2025"}}</tool_call><|endofturn|>
|
| 127 |
+
<|tool|>
|
| 128 |
+
<tool_result>Summary:
|
| 129 |
+
Longevity wellness is at the forefront of 2025 wellness trends, combining advanced science and technology with traditional practices to support recovery, healthy aging, and mental well-being. Preferred Hotels & Resorts’ Travel Trends predictions for 2025 highlight longevity as a key theme, including biohacking recovery and tech-enabled wellness across its properties. Examples cited include Alohilani Resort Waikiki Beach in Honolulu, whose Fit Waikiki wellness center is reimagined with offerings like rooftop aerial yoga and a full-service spa, plus Drip Hawaii’s IV lounge. Other Preferred Hotels & Resorts properties embracing longevity wellness include Lefay Resort & SPA Dolomiti in Italy and The Alpina Gstaad in Switzerland, which offer integrative services such as cryotherapy, oxygen therapy, and advanced skin treatments. These high-tech, science-backed offerings paired with luxurious accommodations are described as becoming the go-to for health, longevity, and overall quality of life.
|
| 130 |
+
|
| 131 |
+
Answer to the question:
|
| 132 |
+
Preferred Hotels & Resorts’ Travel Trends predictions for 2025 identify longevity wellness as a leading trend. The report describes longevity wellness as the integration of advanced science and technology with traditional wellness practices to support recovery, healthy aging, and mental well-being. It emphasizes a shift toward biohacking recovery and high-tech wellness experiences within its hotel portfolio, illustrating how luxury properties are incorporating cutting-edge health services.
|
| 133 |
+
|
| 134 |
+
Specific examples from Preferred Hotels & Resorts properties include:
|
| 135 |
+
- Alohilani Resort Waikiki Beach (Honolulu): The resort’s wellness center, Fit Waikiki, has been reimagined to offer features such as rooftop aerial yoga and a full-service spa. The Drip Hawaii medical spa is noted as providing Waikiki’s first IV lounge, illustrating the move toward advanced, tech-enabled health services.
|
| 136 |
+
|
| 137 |
+
- Lefay Resort & SPA Dolomiti (Italy) and The Alpina Gstaad (Switzerland): These Preferred Hotels & Resorts properties are highlighted as offering integrative longevity wellness, including services such as cryotherapy, oxygen therapy, and advanced skin treatments.
|
| 138 |
+
|
| 139 |
+
The article frames these offerings as part of a broader shift where enhanced wellness experiences—driven by science and technology—are combined with luxurious accommodations to become the default for guests seeking health, longevity, and improved quality of life.</tool_result><|endofturn|>
|
| 140 |
+
<|assistant|>
|
| 141 |
+
<think>
|
| 142 |
+
Now we have a clear answer. The summary indicates that Preferred Hotels & Resorts’ Travel Trends predictions for 2025 identify longevity wellness as a leading trend, combining advanced science and technology with traditional wellness practices to support recovery, healthy aging, and mental well-being. It includes biohacking recovery and high-tech wellness experiences across properties.
|
| 143 |
+
|
| 144 |
+
We can answer accordingly.
|
| 145 |
+
|
| 146 |
+
But perhaps the user wants a more specific indication: maybe the report says that longevity wellness is "at the forefront" or "a key theme". The summary says "Longevity wellness is at the forefront of 2025 wellness trends" and "Preferred Hotels & Resorts’ Travel Trends predictions for 2025 highlight longevity as a key theme".
|
| 147 |
+
|
| 148 |
+
Thus, the answer: The predictions indicate that longevity wellness is a leading trend, combining advanced science and technology with traditional practices to support recovery, healthy aging, and mental well-being.
|
| 149 |
+
|
| 150 |
+
We can include examples.
|
| 151 |
+
|
| 152 |
+
Let's formulate the final answer.
|
| 153 |
+
|
| 154 |
+
We'll cite the source: Forbes article "Wellness Trends Report 2025: How Wellness Got Smarter". We'll reference the summary.
|
| 155 |
+
|
| 156 |
+
Now, answer.
|
| 157 |
+
</think>
|
| 158 |
+
|
| 159 |
+
Based on the **Forbes Wellness Trends Report 2025: How Wellness Got Smarter**, the **Preferred Hotels & Resorts’ Travel Trends predictions for 2025** identify **longevity wellness as a leading trend**.
|
| 160 |
+
|
| 161 |
+
Specifically, the report states that **“longevity wellness is at the forefront of 2025 wellness trends”** and that **Preferred Hotels & Resorts’ Travel Trends predictions highlight longevity as a key theme**.
|
| 162 |
+
|
| 163 |
+
**What this indicates about longevity wellness:**
|
| 164 |
+
- **Integration of advanced science and technology with traditional wellness practices** to support recovery, healthy aging, and mental well‑being.
|
| 165 |
+
- A shift toward **biohacking recovery** and **high‑tech wellness experiences** across luxury properties.
|
| 166 |
+
- The trend is exemplified by properties such as:
|
| 167 |
+
- **Alohilani Resort Waikiki Beach** (Honolulu) – reimagined Fit Waikiki wellness center and Drip Hawaii IV lounge.
|
| 168 |
+
- **Lefay Resort & SPA Dolomiti** (Italy) – offering cryotherapy, oxygen therapy, and advanced skin treatments.
|
| 169 |
+
- **The Alpina Gstaad** (Switzerland) – providing integrative longevity‑focused services.
|
| 170 |
+
|
| 171 |
+
In essence, the predictions signal that **longevity wellness is moving beyond conventional spa offerings toward science‑backed, tech‑enabled experiences that combine cutting‑edge therapies with luxury accommodations** to help guests extend healthy lifespans and enhance overall quality of life.
|
| 172 |
+
|
| 173 |
+
*Source: Forbes Wellness Trends Report 2025: How Wellness Got Smarter (published February 4, 2025).*<|endofturn|>
|
generation_config.json
ADDED
|
@@ -0,0 +1,10 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"_from_model_config": true,
|
| 3 |
+
"bos_token_id": 1,
|
| 4 |
+
"eos_token_id": 2,
|
| 5 |
+
"pad_token_id": 0,
|
| 6 |
+
"presence_penalty": 0.0,
|
| 7 |
+
"temperature": 1.0,
|
| 8 |
+
"top_p": 0.95,
|
| 9 |
+
"transformers_version": "5.0.0.dev0"
|
| 10 |
+
}
|
model-00001-of-00096.safetensors
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:3819f32eb513897839874ba0651e62bea40b07d4013f3d6ca08b99c7a104967c
|
| 3 |
+
size 4984444064
|
model-00002-of-00096.safetensors
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:aa7e1826ad8a779c105661773a59ac2e0a5c0f9e6e80360e9fb2fe1728e600ec
|
| 3 |
+
size 4982857688
|
model-00003-of-00096.safetensors
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:ef77a28cde9d26c90eeee23e1237470184cb3a0098b7320953fd30e0b3666f83
|
| 3 |
+
size 4984456216
|
model-00004-of-00096.safetensors
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:67bab236f2b86532bf538441d111b70a0b323dc900a66b5bb017acba1968caa6
|
| 3 |
+
size 4982857688
|
model-00005-of-00096.safetensors
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:24748f41c0234270f7d8b79015d2a00f23b8ab0d02c47ce7681cffb2ba084596
|
| 3 |
+
size 4984456216
|
model-00006-of-00096.safetensors
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:6d908b84617a632c65bd7bcb02805adebf4bc6fccde833e0b9cf95843652b686
|
| 3 |
+
size 4982857688
|
model-00007-of-00096.safetensors
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:f85c6828493d4bc8cd700413c182109c5d2eb4e878d4e501bb19ba441249a1c8
|
| 3 |
+
size 4984456216
|
model-00008-of-00096.safetensors
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:128e786c15a890be9cfbdbdd51b24521f3417b7d924f9390f38668bf518407d0
|
| 3 |
+
size 4982857688
|
model-00009-of-00096.safetensors
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:0f2d16e89d54655e4cd811ccbf3263a9ea7c9832acfa42388f436dc9335a6de1
|
| 3 |
+
size 4984456216
|
model-00010-of-00096.safetensors
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:b552aba97f1468e4d1de833fcddb4f46edc1e62ac986e3fb898c214992978a0e
|
| 3 |
+
size 4982857688
|
model-00011-of-00096.safetensors
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:c25988256341f8a75308f1cbe5097b2306b93d1184c0980600563084e1325823
|
| 3 |
+
size 4984456216
|
model-00012-of-00096.safetensors
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:5eb38ac4f006bce969d2caa48453b340be280ebdf8569df5dece90af332d938b
|
| 3 |
+
size 4982857688
|
model-00013-of-00096.safetensors
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:b3aab153a8f46f009bc96f4314912418db334483e558cc72f00eeb657e3ff986
|
| 3 |
+
size 4984456216
|
model-00014-of-00096.safetensors
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:73618c5ec45426fd80a798d8378da78e7803b467a203ee400c573443806fb133
|
| 3 |
+
size 4982857688
|
model-00015-of-00096.safetensors
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:f8b9bbccf2b52bcb6dd3a847f37873d4b83dce21584c36abc12bb7f0462b8830
|
| 3 |
+
size 4984456216
|
model-00016-of-00096.safetensors
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:0d52ded5e93afc6d70c1a7808cbcf824359402d7530be5f146b6b6dbac4f3450
|
| 3 |
+
size 4982857688
|
model-00017-of-00096.safetensors
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:7830da7bf015f6809c2fb12723eaf88a78cb1dc9c9a26c72117cebaa2aea7f38
|
| 3 |
+
size 4984456216
|
model-00018-of-00096.safetensors
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:e246c762ced4c41ca27acd825e4719b06add1da711491198c0a4f3ea5a9edd2b
|
| 3 |
+
size 4982857688
|
model-00019-of-00096.safetensors
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:e024fc3d7126016e20fb5c50e522c7e1253ce6a44920979659a4b17e2b48419f
|
| 3 |
+
size 4984456312
|
model-00020-of-00096.safetensors
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:f93bc63e72cb0006e326b919a02b8e7267a78c80a058217ed67468b8f2d0e926
|
| 3 |
+
size 4982857888
|
model-00021-of-00096.safetensors
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:e775c2bce665bb822cbd2ce3f8c57bceac05e2ac89cf35d26f537a84bbfbf71e
|
| 3 |
+
size 4984456416
|
model-00022-of-00096.safetensors
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:174b728b2a4e217c75913a6238ec1dbe4eb104aceaa6ab1a8cadb8527648c1ae
|
| 3 |
+
size 4982857888
|
model-00023-of-00096.safetensors
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:5680604f9bf821d8dd75a8453f8d51d4f82234393a67fa378b9768f67026fd55
|
| 3 |
+
size 4984456416
|
model-00024-of-00096.safetensors
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:3cab3b6e717be8666948c079bde25e7ecb88395a053c17e5554846acb15cff7f
|
| 3 |
+
size 4982857888
|
model-00025-of-00096.safetensors
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:d4560993c6dcf1f2ff91874e5b98928d1bd88f933bd43cd1ac48f7df0483a0d3
|
| 3 |
+
size 4984456416
|
model-00026-of-00096.safetensors
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:99ad65b2a6e37d90d37f47dcd639bfb53e76c6d3aed24e81fde98e66d3139a1e
|
| 3 |
+
size 4982857888
|
model-00027-of-00096.safetensors
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:79179b9eb6ffa8d3652f1bccc8d90f5bd2e2a648fb5c9a2c3b0740e7ac382a7d
|
| 3 |
+
size 4984456416
|
model-00028-of-00096.safetensors
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:05773ce51728f6a453232441df82caca50aecade0c460b8750c9df5edee38aad
|
| 3 |
+
size 4982857888
|
model-00029-of-00096.safetensors
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:d4e9f803a7b71e4f8b58529ef811ff65d30fcabd9d4c0e07eb619b7db79f4fdf
|
| 3 |
+
size 4984456416
|
model-00030-of-00096.safetensors
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:c80011beb1affac2b2189699b834511bd509b5017c59fe7d56c752bef21c472d
|
| 3 |
+
size 4982857888
|
model-00031-of-00096.safetensors
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:36cc9e6ed2583de7ee8609ece391072e78b53ed60eaef20166d0e99c237ebc52
|
| 3 |
+
size 4984456416
|
model-00032-of-00096.safetensors
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:c507cf26934788aaaf1f8b628a0db3512f87018af17736ff3de97320460925f6
|
| 3 |
+
size 4982857888
|
model-00033-of-00096.safetensors
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:58208aa801e6493d8ba1066c4ad05386b972a1667d6a4c67e945be37c72c40ec
|
| 3 |
+
size 4984456416
|
model-00034-of-00096.safetensors
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:6aab0ff9e69c7ef14df6f7d9190ceef9ab1014f775fab6b7056ddb5668618842
|
| 3 |
+
size 4982857888
|
model-00035-of-00096.safetensors
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:3b6e9e9c5fb40acf3b3a561724ca564828fb3bd3711ea00a1689ae09729abf3c
|
| 3 |
+
size 4984456416
|
model-00036-of-00096.safetensors
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:00eee01f73bac78c2e76edc20330f6837b56b0e71f4789e8ef6653174773d2c0
|
| 3 |
+
size 4982857888
|
model-00037-of-00096.safetensors
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:c931b1fd8119a4446165a5f44e4f322c34149f2fe2dcf4fb75f50fb5a78e386e
|
| 3 |
+
size 4984456416
|
model-00038-of-00096.safetensors
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:e9b521f1939e0e58ac6b7a8a66d08c3eae88ad7130eb12c34e1f0e7c322039d8
|
| 3 |
+
size 4982857888
|
model-00039-of-00096.safetensors
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:69809270c6e650e55eb167c646755650ab8da16e9e16e40f529ef2ae2832ff21
|
| 3 |
+
size 4984456416
|
model-00040-of-00096.safetensors
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:3e2778dda4881003bf4557cd3314b82ca654e9b4a021a017ace2d601615711bf
|
| 3 |
+
size 4982857888
|
model-00041-of-00096.safetensors
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:f98c4c1cf853c5d5ca993dd1d7d5088a8ce27075b10599128a420a89b7dc7a83
|
| 3 |
+
size 4984456416
|
model-00042-of-00096.safetensors
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:9cd4bafe344c078f519c6254e8a607372283ddccc14f2094fe06708f7792616d
|
| 3 |
+
size 4982857888
|