settings (#1)
Browse files- settings (61593cf594616a3b3a49e493a8efd0cffbdd74c1)
- Update tokenizer_config.json (d72759eb9ecd8b6b89b79b8b6bec7506f8955152)
- Update chat_template.jinja (2a97cd4ffbb9146e4a918d050522685b07eb5da2)
- Update model.safetensors.index.json (cc0588e06e033297cfd8de104525181ef7e24c4c)
- Upload olmo-instruct.png (20d6096aa631a89fb041faae70983cceb709b248)
- .gitattributes +1 -0
- chat_template.jinja +1 -1
- generation_config.json +8 -6
- model.safetensors.index.json +2 -2
- olmo-instruct.png +3 -0
- tokenizer_config.json +3 -2
.gitattributes
CHANGED
|
@@ -33,3 +33,4 @@ saved_model/**/* filter=lfs diff=lfs merge=lfs -text
|
|
| 33 |
*.zip filter=lfs diff=lfs merge=lfs -text
|
| 34 |
*.zst filter=lfs diff=lfs merge=lfs -text
|
| 35 |
*tfevents* filter=lfs diff=lfs merge=lfs -text
|
|
|
|
|
|
| 33 |
*.zip filter=lfs diff=lfs merge=lfs -text
|
| 34 |
*.zst filter=lfs diff=lfs merge=lfs -text
|
| 35 |
*tfevents* filter=lfs diff=lfs merge=lfs -text
|
| 36 |
+
olmo-instruct.png filter=lfs diff=lfs merge=lfs -text
|
chat_template.jinja
CHANGED
|
@@ -12,4 +12,4 @@ You are Olmo, a helpful AI assistant built by Ai2. Your date cutoff is December
|
|
| 12 |
' + message['content'] + '<|im_end|>
|
| 13 |
' -}}{%- elif message['role'] == 'tool' -%}{{- '<|im_start|>environment
|
| 14 |
' + message['content'] + '<|im_end|>
|
| 15 |
-
' -}}{%- endif -%}{%- if loop.last and add_generation_prompt -%}{{- '<|im_start|>assistant\n' -}}{%- endif -%}{%- endfor -%}
|
|
|
|
| 12 |
' + message['content'] + '<|im_end|>
|
| 13 |
' -}}{%- elif message['role'] == 'tool' -%}{{- '<|im_start|>environment
|
| 14 |
' + message['content'] + '<|im_end|>
|
| 15 |
+
' -}}{%- endif -%}{%- if loop.last and add_generation_prompt -%}{{- '<|im_start|>assistant\n' -}}{%- endif -%}{%- endfor -%}
|
generation_config.json
CHANGED
|
@@ -1,8 +1,10 @@
|
|
| 1 |
{
|
| 2 |
"_from_model_config": true,
|
| 3 |
-
"
|
| 4 |
-
|
| 5 |
-
|
| 6 |
-
|
| 7 |
-
"
|
| 8 |
-
|
|
|
|
|
|
|
|
|
| 1 |
{
|
| 2 |
"_from_model_config": true,
|
| 3 |
+
"do_sample": true,
|
| 4 |
+
"eos_token_id": 100257,
|
| 5 |
+
"pad_token_id": 100277,
|
| 6 |
+
"transformers_version": "4.57.1",
|
| 7 |
+
"temperature": 0.6,
|
| 8 |
+
"top_p": 0.95,
|
| 9 |
+
"max_new_tokens": 32768
|
| 10 |
+
}
|
model.safetensors.index.json
CHANGED
|
@@ -1,7 +1,7 @@
|
|
| 1 |
{
|
| 2 |
"metadata": {
|
| 3 |
-
"total_parameters":
|
| 4 |
-
"total_size":
|
| 5 |
},
|
| 6 |
"weight_map": {
|
| 7 |
"lm_head.weight": "model-00014-of-00014.safetensors",
|
|
|
|
| 1 |
{
|
| 2 |
"metadata": {
|
| 3 |
+
"total_parameters": 32233522176,
|
| 4 |
+
"total_size": 128934088704
|
| 5 |
},
|
| 6 |
"weight_map": {
|
| 7 |
"lm_head.weight": "model-00014-of-00014.safetensors",
|
olmo-instruct.png
ADDED
|
Git LFS Details
|
tokenizer_config.json
CHANGED
|
@@ -185,5 +185,6 @@
|
|
| 185 |
"model_max_length": 65536,
|
| 186 |
"pad_token": "<|pad|>",
|
| 187 |
"tokenizer_class": "GPT2Tokenizer",
|
| 188 |
-
"unk_token": "<|endoftext|>"
|
| 189 |
-
}
|
|
|
|
|
|
| 185 |
"model_max_length": 65536,
|
| 186 |
"pad_token": "<|pad|>",
|
| 187 |
"tokenizer_class": "GPT2Tokenizer",
|
| 188 |
+
"unk_token": "<|endoftext|>",
|
| 189 |
+
"chat_template": "{%- set has_system = messages|selectattr('role', 'equalto', 'system')|list|length > 0 -%}{%- if not has_system -%}{{- '<|im_start|>system\nYou are Olmo, a helpful AI assistant built by Ai2. Your date cutoff is December 2024, and your model weights are available at https://huggingface.co/allenai. ' -}}{%- if tools is none or (tools | length) == 0 -%}{{- 'You do not currently have access to any functions. <functions></functions><|im_end|>\n' -}}{%- else -%}{{- 'You are provided with function signatures within <functions></functions> XML tags. You may call one or more functions to assist with the user query. Output any function calls within <function_calls></function_calls> XML tags. Do not make assumptions about what values to plug into functions.' -}}{{- '<functions>' -}}{{- tools | tojson -}}{{- '</functions><|im_end|>\n' -}}{%- endif -%}{%- endif -%}{%- for message in messages -%}{%- if message['role'] == 'system' -%}{{- '<|im_start|>system\n' + message['content'] -}}{%- if tools is not none -%}{{- '<functions>' -}}{{- tools | tojson -}}{{- '</functions>' -}}{%- elif message.get('functions', none) is not none -%}{{- ' <functions>' + message['functions'] + '</functions>' -}}{%- endif -%}{{- '<|im_end|>\n' -}}{%- elif message['role'] == 'user' -%}{{- '<|im_start|>user\n' + message['content'] + '<|im_end|>\n' -}}{%- elif message['role'] == 'assistant' -%}{{- '<|im_start|>assistant\n' -}}{%- if message.get('content', none) is not none -%}{{- message['content'] -}}{%- endif -%}{%- if message.get('function_calls', none) is not none -%}{{- '<function_calls>' + message['function_calls'] + '</function_calls>' -}}{% elif message.get('tool_calls', none) is not none %}{{- '<function_calls>' -}}{%- for tool_call in message['tool_calls'] %}{%- if tool_call is mapping and tool_call.get('function', none) is not none %}{%- set args = tool_call['function']['arguments'] -%}{%- set ns = namespace(arguments_list=[]) -%}{%- for key, value in args.items() -%}{%- set ns.arguments_list = ns.arguments_list + [key ~ '=' ~ (value | tojson)] -%}{%- endfor -%}{%- set arguments = ns.arguments_list | join(', ') -%}{{- tool_call['function']['name'] + '(' + arguments + ')' -}}{%- if not loop.last -%}{{ '\n' }}{%- endif -%}{% else %}{{- tool_call -}}{%- endif %}{%- endfor %}{{- '</function_calls>' -}}{%- endif -%}{%- if not loop.last -%}{{- '<|im_end|>' + '\n' -}}{%- else -%}{{- eos_token -}}{%- endif -%}{%- elif message['role'] == 'environment' -%}{{- '<|im_start|>environment\n' + message['content'] + '<|im_end|>\n' -}}{%- elif message['role'] == 'tool' -%}{{- '<|im_start|>environment\n' + message['content'] + '<|im_end|>\n' -}}{%- endif -%}{%- if loop.last and add_generation_prompt -%}{{- '<|im_start|>assistant\\n' -}}{%- endif -%}{%- endfor -%}"
|
| 190 |
+
}
|