File tree Expand file tree Collapse file tree 3 files changed +9
-1
lines changed Expand file tree Collapse file tree 3 files changed +9
-1
lines changed Original file line number Diff line number Diff line change @@ -205,6 +205,8 @@ def __init__(
205205 model_path : str ,
206206 # NOTE: These parameters are likely to change in the future.
207207 n_ctx : int = 512 ,
208+ rope_freq_base : float = 10000.0 ,
209+ rope_freq_scale : float = 1.0 ,
208210 n_parts : int = - 1 ,
209211 n_gpu_layers : int = 0 ,
210212 seed : int = 1337 ,
@@ -227,6 +229,8 @@ def __init__(
227229 Args:
228230 model_path: Path to the model.
229231 n_ctx: Maximum context size.
232+ rope_freq_base: RoPE base frequency.
233+ rope_freq_scale: RoPE frequency scale.
230234 n_parts: Number of parts to split the model into. If -1, the number of parts is automatically determined.
231235 seed: Random seed. -1 for random.
232236 f16_kv: Use half-precision for key/value cache.
@@ -253,6 +257,8 @@ def __init__(
253257
254258 self .params = llama_cpp .llama_context_default_params ()
255259 self .params .n_ctx = n_ctx
260+ self .params .rope_freq_base = rope_freq_base
261+ self .params .rope_freq_scale = rope_freq_scale
256262 self .params .n_gpu_layers = n_gpu_layers
257263 self .params .seed = seed
258264 self .params .f16_kv = f16_kv
Original file line number Diff line number Diff line change @@ -184,6 +184,8 @@ class llama_context_params(Structure):
184184 _fields_ = [
185185 ("seed" , c_uint32 ),
186186 ("n_ctx" , c_int32 ),
187+ ("rope_freq_base" , c_float ),
188+ ("rope_freq_scale" , c_float ),
187189 ("n_batch" , c_int32 ),
188190 ("n_gpu_layers" , c_int32 ),
189191 ("main_gpu" , c_int32 ),
You can’t perform that action at this time.
0 commit comments