Upload folder using huggingface_hub
Browse filesThis view is limited to 50 files because it contains too many changes.
See raw diff
- .gitattributes +1 -0
- arguments.yaml +55 -0
- environ.txt +225 -0
- script.sh +49 -0
- slice_12208/added_tokens.json +4 -0
- slice_12208/chat_template.json +3 -0
- slice_12208/config.json +85 -0
- slice_12208/preprocessor_config.json +52 -0
- slice_12208/processor_config.json +7 -0
- slice_12208/pytorch_model.bin +3 -0
- slice_12208/special_tokens_map.json +31 -0
- slice_12208/tokenizer.json +0 -0
- slice_12208/tokenizer.model +3 -0
- slice_12208/tokenizer_config.json +69 -0
- slice_24416/added_tokens.json +4 -0
- slice_24416/chat_template.json +3 -0
- slice_24416/config.json +85 -0
- slice_24416/preprocessor_config.json +52 -0
- slice_24416/processor_config.json +7 -0
- slice_24416/pytorch_model.bin +3 -0
- slice_24416/special_tokens_map.json +31 -0
- slice_24416/tokenizer.json +0 -0
- slice_24416/tokenizer.model +3 -0
- slice_24416/tokenizer_config.json +69 -0
- slice_36624/added_tokens.json +4 -0
- slice_36624/chat_template.json +3 -0
- slice_36624/config.json +85 -0
- slice_36624/preprocessor_config.json +52 -0
- slice_36624/processor_config.json +7 -0
- slice_36624/pytorch_model.bin +3 -0
- slice_36624/special_tokens_map.json +31 -0
- slice_36624/tokenizer.json +0 -0
- slice_36624/tokenizer.model +3 -0
- slice_36624/tokenizer_config.json +69 -0
- slice_48832/added_tokens.json +4 -0
- slice_48832/chat_template.json +3 -0
- slice_48832/config.json +85 -0
- slice_48832/preprocessor_config.json +52 -0
- slice_48832/processor_config.json +7 -0
- slice_48832/pytorch_model.bin +3 -0
- slice_48832/special_tokens_map.json +31 -0
- slice_48832/tokenizer.json +0 -0
- slice_48832/tokenizer.model +3 -0
- slice_48832/tokenizer_config.json +69 -0
- slice_61040/added_tokens.json +4 -0
- slice_61040/chat_template.json +3 -0
- slice_61040/config.json +85 -0
- slice_61040/preprocessor_config.json +52 -0
- slice_61040/processor_config.json +7 -0
- slice_61040/pytorch_model.bin +3 -0
.gitattributes
CHANGED
@@ -33,3 +33,4 @@ saved_model/**/* filter=lfs diff=lfs merge=lfs -text
|
|
33 |
*.zip filter=lfs diff=lfs merge=lfs -text
|
34 |
*.zst filter=lfs diff=lfs merge=lfs -text
|
35 |
*tfevents* filter=lfs diff=lfs merge=lfs -text
|
|
|
|
33 |
*.zip filter=lfs diff=lfs merge=lfs -text
|
34 |
*.zst filter=lfs diff=lfs merge=lfs -text
|
35 |
*tfevents* filter=lfs diff=lfs merge=lfs -text
|
36 |
+
wandb/run-20250330_124344-uzpzatp0/run-uzpzatp0.wandb filter=lfs diff=lfs merge=lfs -text
|
arguments.yaml
ADDED
@@ -0,0 +1,55 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
data_cfgs:
|
2 |
+
eval_data_files: {}
|
3 |
+
eval_datasets: {}
|
4 |
+
eval_name: {}
|
5 |
+
eval_optional_args: []
|
6 |
+
eval_size: {}
|
7 |
+
eval_split: {}
|
8 |
+
eval_subset: {}
|
9 |
+
eval_template: {}
|
10 |
+
load_multi_datasets: false
|
11 |
+
train_data_files: {}
|
12 |
+
train_datasets: /aifs4su/yaodong/hantao/datasets/MMInstruct-GPT4V_mistral-7b_cosi_cut/merged/top1-100_valid
|
13 |
+
train_name: text-image-to-text
|
14 |
+
train_optional_args: []
|
15 |
+
train_size: {}
|
16 |
+
train_split: train
|
17 |
+
train_template: MM_TI2T_LLAVA
|
18 |
+
logger_cfgs:
|
19 |
+
cache_dir: {}
|
20 |
+
log_project: align-anything
|
21 |
+
log_run_name: sft
|
22 |
+
log_type: wandb
|
23 |
+
output_dir: ../outputs/LLAVA_7B_cosi/top1-100_valid
|
24 |
+
save_total_limit: 6
|
25 |
+
model_cfgs:
|
26 |
+
model_max_length: 4096
|
27 |
+
model_name_or_path: /aifs4su/yaodong/hantao/models/llava-v1.6-mistral-7b-hf
|
28 |
+
trust_remote_code: true
|
29 |
+
special_tokens: {}
|
30 |
+
train_cfgs:
|
31 |
+
adam_betas:
|
32 |
+
- 0.9
|
33 |
+
- 0.95
|
34 |
+
adam_epsilon: 1.0e-08
|
35 |
+
bf16: true
|
36 |
+
ds_cfgs: ds_z3_config.json
|
37 |
+
epochs: 3
|
38 |
+
eval_interval: 10
|
39 |
+
eval_strategy: epoch
|
40 |
+
fp16: false
|
41 |
+
freeze_language_model: false
|
42 |
+
freeze_mm_proj: false
|
43 |
+
freeze_vision_tower: true
|
44 |
+
gradient_accumulation_steps: 16
|
45 |
+
gradient_checkpointing: true
|
46 |
+
learning_rate: 2.0e-05
|
47 |
+
load_checkpoint: false
|
48 |
+
lr_scheduler_type: cosine
|
49 |
+
lr_warmup_ratio: 0.03
|
50 |
+
max_grad_norm: 1.0
|
51 |
+
per_device_eval_batch_size: 1
|
52 |
+
per_device_train_batch_size: 1
|
53 |
+
save_checkpoint: false
|
54 |
+
seed: 42
|
55 |
+
weight_decay: 0.0
|
environ.txt
ADDED
@@ -0,0 +1,225 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
ADDR2LINE=/aifs4su/yaodong/miniconda3/envs/hantao_llama/bin/x86_64-conda-linux-gnu-addr2line
|
2 |
+
AR=/aifs4su/yaodong/miniconda3/envs/hantao_llama/bin/x86_64-conda-linux-gnu-ar
|
3 |
+
AS=/aifs4su/yaodong/miniconda3/envs/hantao_llama/bin/x86_64-conda-linux-gnu-as
|
4 |
+
BASH_FUNC__module_raw%%=() { unset _mlshdbg;
|
5 |
+
if [ "${MODULES_SILENT_SHELL_DEBUG:-0}" = '1' ]; then
|
6 |
+
case "$-" in
|
7 |
+
*v*x*)
|
8 |
+
set +vx;
|
9 |
+
_mlshdbg='vx'
|
10 |
+
;;
|
11 |
+
*v*)
|
12 |
+
set +v;
|
13 |
+
_mlshdbg='v'
|
14 |
+
;;
|
15 |
+
*x*)
|
16 |
+
set +x;
|
17 |
+
_mlshdbg='x'
|
18 |
+
;;
|
19 |
+
*)
|
20 |
+
_mlshdbg=''
|
21 |
+
;;
|
22 |
+
esac;
|
23 |
+
fi;
|
24 |
+
unset _mlre _mlIFS;
|
25 |
+
if [ -n "${IFS+x}" ]; then
|
26 |
+
_mlIFS=$IFS;
|
27 |
+
fi;
|
28 |
+
IFS=' ';
|
29 |
+
for _mlv in ${MODULES_RUN_QUARANTINE:-};
|
30 |
+
do
|
31 |
+
if [ "${_mlv}" = "${_mlv##*[!A-Za-z0-9_]}" -a "${_mlv}" = "${_mlv#[0-9]}" ]; then
|
32 |
+
if [ -n "`eval 'echo ${'$_mlv'+x}'`" ]; then
|
33 |
+
_mlre="${_mlre:-}${_mlv}_modquar='`eval 'echo ${'$_mlv'}'`' ";
|
34 |
+
fi;
|
35 |
+
_mlrv="MODULES_RUNENV_${_mlv}";
|
36 |
+
_mlre="${_mlre:-}${_mlv}='`eval 'echo ${'$_mlrv':-}'`' ";
|
37 |
+
fi;
|
38 |
+
done;
|
39 |
+
if [ -n "${_mlre:-}" ]; then
|
40 |
+
eval `eval ${_mlre} /usr/bin/tclsh /cm/local/apps/environment-modules/4.5.3/libexec/modulecmd.tcl bash '"$@"'`;
|
41 |
+
else
|
42 |
+
eval `/usr/bin/tclsh /cm/local/apps/environment-modules/4.5.3/libexec/modulecmd.tcl bash "$@"`;
|
43 |
+
fi;
|
44 |
+
_mlstatus=$?;
|
45 |
+
if [ -n "${_mlIFS+x}" ]; then
|
46 |
+
IFS=$_mlIFS;
|
47 |
+
else
|
48 |
+
unset IFS;
|
49 |
+
fi;
|
50 |
+
unset _mlre _mlv _mlrv _mlIFS;
|
51 |
+
if [ -n "${_mlshdbg:-}" ]; then
|
52 |
+
set -$_mlshdbg;
|
53 |
+
fi;
|
54 |
+
unset _mlshdbg;
|
55 |
+
return $_mlstatus
|
56 |
+
}
|
57 |
+
BASH_FUNC_ml%%=() { module ml "$@"
|
58 |
+
}
|
59 |
+
BASH_FUNC_module%%=() { _module_raw "$@" 2>&1
|
60 |
+
}
|
61 |
+
BASH_FUNC_switchml%%=() { typeset swfound=1;
|
62 |
+
if [ "${MODULES_USE_COMPAT_VERSION:-0}" = '1' ]; then
|
63 |
+
typeset swname='main';
|
64 |
+
if [ -e /cm/local/apps/environment-modules/4.5.3/libexec/modulecmd.tcl ]; then
|
65 |
+
typeset swfound=0;
|
66 |
+
unset MODULES_USE_COMPAT_VERSION;
|
67 |
+
fi;
|
68 |
+
else
|
69 |
+
typeset swname='compatibility';
|
70 |
+
if [ -e /cm/local/apps/environment-modules/4.5.3/libexec/modulecmd-compat ]; then
|
71 |
+
typeset swfound=0;
|
72 |
+
MODULES_USE_COMPAT_VERSION=1;
|
73 |
+
export MODULES_USE_COMPAT_VERSION;
|
74 |
+
fi;
|
75 |
+
fi;
|
76 |
+
if [ $swfound -eq 0 ]; then
|
77 |
+
echo "Switching to Modules $swname version";
|
78 |
+
source /cm/local/apps/environment-modules/4.5.3/init/bash;
|
79 |
+
else
|
80 |
+
echo "Cannot switch to Modules $swname version, command not found";
|
81 |
+
return 1;
|
82 |
+
fi
|
83 |
+
}
|
84 |
+
BROWSER=/home/yangyaodong/.vscode-server/cli/servers/Stable-e54c774e0add60467559eb0d1e229c6452cf8447/server/bin/helpers/browser.sh
|
85 |
+
BUILD=x86_64-conda-linux-gnu
|
86 |
+
BUNDLED_DEBUGPY_PATH=/home/yangyaodong/.vscode-server/extensions/ms-python.debugpy-2025.0.1-linux-x64/bundled/libs/debugpy
|
87 |
+
CC=/aifs4su/yaodong/miniconda3/envs/hantao_llama/bin/x86_64-conda-linux-gnu-cc
|
88 |
+
CC_FOR_BUILD=/aifs4su/yaodong/miniconda3/envs/hantao_llama/bin/x86_64-conda-linux-gnu-cc
|
89 |
+
CFLAGS=-march=nocona -mtune=haswell -ftree-vectorize -fPIC -fstack-protector-strong -fno-plt -O2 -ffunction-sections -pipe -isystem /aifs4su/yaodong/miniconda3/envs/hantao_llama/include -I/aifs4su/yaodong/miniconda3/envs/hantao_llama/targets/x86_64-linux/include -L/aifs4su/yaodong/miniconda3/envs/hantao_llama/targets/x86_64-linux/lib -L/aifs4su/yaodong/miniconda3/envs/hantao_llama/targets/x86_64-linux/lib/stubs
|
90 |
+
CMAKE_ARGS=-DCMAKE_AR=/aifs4su/yaodong/miniconda3/envs/hantao_llama/bin/x86_64-conda-linux-gnu-ar -DCMAKE_RANLIB=/aifs4su/yaodong/miniconda3/envs/hantao_llama/bin/x86_64-conda-linux-gnu-ranlib -DCMAKE_LINKER=/aifs4su/yaodong/miniconda3/envs/hantao_llama/bin/x86_64-conda-linux-gnu-ld -DCMAKE_STRIP=/aifs4su/yaodong/miniconda3/envs/hantao_llama/bin/x86_64-conda-linux-gnu-strip -DCMAKE_BUILD_TYPE=Release
|
91 |
+
CMAKE_PREFIX_PATH=/aifs4su/yaodong/miniconda3/envs/hantao_llama:/aifs4su/yaodong/miniconda3/envs/hantao_llama/x86_64-conda-linux-gnu/sysroot/usr
|
92 |
+
CMD_WLM_CLUSTER_NAME=slurm
|
93 |
+
COLORTERM=truecolor
|
94 |
+
CONDA_BUILD_SYSROOT=/aifs4su/yaodong/miniconda3/envs/hantao_llama/x86_64-conda-linux-gnu/sysroot
|
95 |
+
CONDA_DEFAULT_ENV=hantao_llama
|
96 |
+
CONDA_EXE=/aifs4su/yaodong/miniconda3/bin/conda
|
97 |
+
CONDA_PREFIX=/aifs4su/yaodong/miniconda3/envs/hantao_llama
|
98 |
+
CONDA_PREFIX_1=/aifs4su/yaodong/miniconda3
|
99 |
+
CONDA_PROMPT_MODIFIER=(hantao_llama)
|
100 |
+
CONDA_PYTHON_EXE=/aifs4su/yaodong/miniconda3/bin/python
|
101 |
+
CONDA_SHLVL=2
|
102 |
+
CPATH=/cm/shared/apps/slurm/current/include
|
103 |
+
CPATH_modshare=/cm/shared/apps/slurm/current/include:1
|
104 |
+
CPP=/aifs4su/yaodong/miniconda3/envs/hantao_llama/bin/x86_64-conda-linux-gnu-cpp
|
105 |
+
CPPFLAGS=-DNDEBUG -D_FORTIFY_SOURCE=2 -O2 -isystem /aifs4su/yaodong/miniconda3/envs/hantao_llama/include -I/aifs4su/yaodong/miniconda3/envs/hantao_llama/targets/x86_64-linux/include -L/aifs4su/yaodong/miniconda3/envs/hantao_llama/targets/x86_64-linux/lib -L/aifs4su/yaodong/miniconda3/envs/hantao_llama/targets/x86_64-linux/lib/stubs
|
106 |
+
CROSS_RANK=0
|
107 |
+
CROSS_SIZE=1
|
108 |
+
CUDA_MODULE_LOADING=LAZY
|
109 |
+
CUDA_VISIBLE_DEVICES=0,1,2,3,4,5,6,7
|
110 |
+
CXX=/aifs4su/yaodong/miniconda3/envs/hantao_llama/bin/x86_64-conda-linux-gnu-c++
|
111 |
+
CXXFILT=/aifs4su/yaodong/miniconda3/envs/hantao_llama/bin/x86_64-conda-linux-gnu-c++filt
|
112 |
+
CXXFLAGS=-fvisibility-inlines-hidden -std=c++17 -fmessage-length=0 -march=nocona -mtune=haswell -ftree-vectorize -fPIC -fstack-protector-strong -fno-plt -O2 -ffunction-sections -pipe -isystem /aifs4su/yaodong/miniconda3/envs/hantao_llama/include -I/aifs4su/yaodong/miniconda3/envs/hantao_llama/targets/x86_64-linux/include -L/aifs4su/yaodong/miniconda3/envs/hantao_llama/targets/x86_64-linux/lib -L/aifs4su/yaodong/miniconda3/envs/hantao_llama/targets/x86_64-linux/lib/stubs
|
113 |
+
CXX_FOR_BUILD=/aifs4su/yaodong/miniconda3/envs/hantao_llama/bin/x86_64-conda-linux-gnu-c++
|
114 |
+
DBUS_SESSION_BUS_ADDRESS=unix:path=/run/user/1028/bus
|
115 |
+
DEBUGPY_ADAPTER_ENDPOINTS=/home/yangyaodong/.vscode-server/extensions/ms-python.debugpy-2025.0.1-linux-x64/.noConfigDebugAdapterEndpoints/endpoint-cf2a8fd1c0b5bb2d.txt
|
116 |
+
DEBUG_CFLAGS=-march=nocona -mtune=haswell -ftree-vectorize -fPIC -fstack-protector-all -fno-plt -Og -g -Wall -Wextra -fvar-tracking-assignments -ffunction-sections -pipe -isystem /aifs4su/yaodong/miniconda3/envs/hantao_llama/include
|
117 |
+
DEBUG_CPPFLAGS=-D_DEBUG -D_FORTIFY_SOURCE=2 -Og -isystem /aifs4su/yaodong/miniconda3/envs/hantao_llama/include
|
118 |
+
DEBUG_CXXFLAGS=-fvisibility-inlines-hidden -std=c++17 -fmessage-length=0 -march=nocona -mtune=haswell -ftree-vectorize -fPIC -fstack-protector-all -fno-plt -Og -g -Wall -Wextra -fvar-tracking-assignments -ffunction-sections -pipe -isystem /aifs4su/yaodong/miniconda3/envs/hantao_llama/include
|
119 |
+
ELFEDIT=/aifs4su/yaodong/miniconda3/envs/hantao_llama/bin/x86_64-conda-linux-gnu-elfedit
|
120 |
+
ENABLE_LMOD=0
|
121 |
+
GCC=/aifs4su/yaodong/miniconda3/envs/hantao_llama/bin/x86_64-conda-linux-gnu-gcc
|
122 |
+
GCC_AR=/aifs4su/yaodong/miniconda3/envs/hantao_llama/bin/x86_64-conda-linux-gnu-gcc-ar
|
123 |
+
GCC_NM=/aifs4su/yaodong/miniconda3/envs/hantao_llama/bin/x86_64-conda-linux-gnu-gcc-nm
|
124 |
+
GCC_RANLIB=/aifs4su/yaodong/miniconda3/envs/hantao_llama/bin/x86_64-conda-linux-gnu-gcc-ranlib
|
125 |
+
GIT_ASKPASS=/home/yangyaodong/.vscode-server/cli/servers/Stable-e54c774e0add60467559eb0d1e229c6452cf8447/server/extensions/git/dist/askpass.sh
|
126 |
+
GPROF=/aifs4su/yaodong/miniconda3/envs/hantao_llama/bin/x86_64-conda-linux-gnu-gprof
|
127 |
+
GSETTINGS_SCHEMA_DIR=/aifs4su/yaodong/miniconda3/envs/hantao_llama/share/glib-2.0/schemas
|
128 |
+
GSETTINGS_SCHEMA_DIR_CONDA_BACKUP=
|
129 |
+
GXX=/aifs4su/yaodong/miniconda3/envs/hantao_llama/bin/x86_64-conda-linux-gnu-g++
|
130 |
+
HF_DATASETS_CACHE=/aifs4su/yaodong/.cache/huggingface/datasets
|
131 |
+
HF_HOME=/aifs4su/yaodong/.cache/huggingface
|
132 |
+
HISTTIMEFORMAT=%y/%m/%d %T
|
133 |
+
HOME=/home/yangyaodong
|
134 |
+
HOST=x86_64-conda-linux-gnu
|
135 |
+
KMP_DUPLICATE_LIB_OK=True
|
136 |
+
KMP_INIT_AT_FORK=FALSE
|
137 |
+
LANG=C.UTF-8
|
138 |
+
LD=/aifs4su/yaodong/miniconda3/envs/hantao_llama/bin/x86_64-conda-linux-gnu-ld
|
139 |
+
LDFLAGS=-Wl,-O2 -Wl,--sort-common -Wl,--as-needed -Wl,-z,relro -Wl,-z,now -Wl,--disable-new-dtags -Wl,--gc-sections -Wl,-rpath,/aifs4su/yaodong/miniconda3/envs/hantao_llama/lib -Wl,-rpath-link,/aifs4su/yaodong/miniconda3/envs/hantao_llama/lib -L/aifs4su/yaodong/miniconda3/envs/hantao_llama/lib -L/aifs4su/yaodong/miniconda3/envs/hantao_llama/targets/x86_64-linux/lib -L/aifs4su/yaodong/miniconda3/envs/hantao_llama/targets/x86_64-linux/lib/stubs
|
140 |
+
LD_GOLD=/aifs4su/yaodong/miniconda3/envs/hantao_llama/bin/x86_64-conda-linux-gnu-ld.gold
|
141 |
+
LD_LIBRARY_PATH=/aifs4su/yaodong/miniconda3/envs/hantao_llama/lib/python3.11/site-packages/cv2/../../lib64:/usr/mpi/gcc/openmpi-4.1.7a1/lib:/cm/shared/apps/slurm/current/lib64/slurm:/cm/shared/apps/slurm/current/lib64
|
142 |
+
LD_LIBRARY_PATH_modshare=/cm/shared/apps/slurm/current/lib64:1:/usr/mpi/gcc/openmpi-4.1.7a1/lib:1:/cm/shared/apps/slurm/current/lib64/slurm:1
|
143 |
+
LD_RUN_PATH=/usr/mpi/gcc/openmpi-4.1.7a1/lib
|
144 |
+
LD_RUN_PATH_modshare=/usr/mpi/gcc/openmpi-4.1.7a1/lib:1
|
145 |
+
LESSCLOSE=/usr/bin/lesspipe %s %s
|
146 |
+
LESSOPEN=| /usr/bin/lesspipe %s
|
147 |
+
LIBRARY_PATH=/cm/shared/apps/slurm/current/lib64/slurm:/cm/shared/apps/slurm/current/lib64
|
148 |
+
LIBRARY_PATH_modshare=/cm/shared/apps/slurm/current/lib64:1:/cm/shared/apps/slurm/current/lib64/slurm:1
|
149 |
+
LOADEDMODULES=slurm/slurm/23.02.6:gcc/64/4.1.7a1
|
150 |
+
LOADEDMODULES_modshare=slurm/slurm/23.02.6:1:gcc/64/4.1.7a1:1
|
151 |
+
LOCAL_RANK=0
|
152 |
+
LOCAL_SIZE=8
|
153 |
+
LOGLEVEL=WARNING
|
154 |
+
LOGNAME=yangyaodong
|
155 |
+
LS_COLORS=rs=0:di=01;34:ln=01;36:mh=00:pi=40;33:so=01;35:do=01;35:bd=40;33;01:cd=40;33;01:or=40;31;01:mi=00:su=37;41:sg=30;43:ca=30;41:tw=30;42:ow=34;42:st=37;44:ex=01;32:*.tar=01;31:*.tgz=01;31:*.arc=01;31:*.arj=01;31:*.taz=01;31:*.lha=01;31:*.lz4=01;31:*.lzh=01;31:*.lzma=01;31:*.tlz=01;31:*.txz=01;31:*.tzo=01;31:*.t7z=01;31:*.zip=01;31:*.z=01;31:*.dz=01;31:*.gz=01;31:*.lrz=01;31:*.lz=01;31:*.lzo=01;31:*.xz=01;31:*.zst=01;31:*.tzst=01;31:*.bz2=01;31:*.bz=01;31:*.tbz=01;31:*.tbz2=01;31:*.tz=01;31:*.deb=01;31:*.rpm=01;31:*.jar=01;31:*.war=01;31:*.ear=01;31:*.sar=01;31:*.rar=01;31:*.alz=01;31:*.ace=01;31:*.zoo=01;31:*.cpio=01;31:*.7z=01;31:*.rz=01;31:*.cab=01;31:*.wim=01;31:*.swm=01;31:*.dwm=01;31:*.esd=01;31:*.jpg=01;35:*.jpeg=01;35:*.mjpg=01;35:*.mjpeg=01;35:*.gif=01;35:*.bmp=01;35:*.pbm=01;35:*.pgm=01;35:*.ppm=01;35:*.tga=01;35:*.xbm=01;35:*.xpm=01;35:*.tif=01;35:*.tiff=01;35:*.png=01;35:*.svg=01;35:*.svgz=01;35:*.mng=01;35:*.pcx=01;35:*.mov=01;35:*.mpg=01;35:*.mpeg=01;35:*.m2v=01;35:*.mkv=01;35:*.webm=01;35:*.webp=01;35:*.ogm=01;35:*.mp4=01;35:*.m4v=01;35:*.mp4v=01;35:*.vob=01;35:*.qt=01;35:*.nuv=01;35:*.wmv=01;35:*.asf=01;35:*.rm=01;35:*.rmvb=01;35:*.flc=01;35:*.avi=01;35:*.fli=01;35:*.flv=01;35:*.gl=01;35:*.dl=01;35:*.xcf=01;35:*.xwd=01;35:*.yuv=01;35:*.cgm=01;35:*.emf=01;35:*.ogv=01;35:*.ogx=01;35:*.aac=00;36:*.au=00;36:*.flac=00;36:*.m4a=00;36:*.mid=00;36:*.midi=00;36:*.mka=00;36:*.mp3=00;36:*.mpc=00;36:*.ogg=00;36:*.ra=00;36:*.wav=00;36:*.oga=00;36:*.opus=00;36:*.spx=00;36:*.xspf=00;36:
|
156 |
+
MANPATH=/usr/mpi/gcc/openmpi-4.1.7a1/share/man:/cm/shared/apps/slurm/current/man:/cm/local/apps/environment-modules/4.5.3/share/man:/usr/local/man:/usr/local/share/man:/usr/share/man:/cm/local/apps/environment-modules/current/share/man:/cm/local/apps/environment-modules/current/share/man
|
157 |
+
MANPATH_modshare=/usr/local/share/man:1:/usr/mpi/gcc/openmpi-4.1.7a1/share/man:1:/cm/local/apps/environment-modules/current/share/man:1:/cm/local/apps/environment-modules/4.5.3/share/man:1:/usr/local/man:1:/usr/share/man:1:/cm/shared/apps/slurm/current/man:1
|
158 |
+
MASTER_ADDR=127.0.0.1
|
159 |
+
MASTER_PORT=50192
|
160 |
+
MIG_PARTED_CHECKPOINT_FILE=/var/lib/nvidia-mig-manager/checkpoint.json
|
161 |
+
MIG_PARTED_CONFIG_FILE=/etc/nvidia-mig-manager/config.yaml
|
162 |
+
MIG_PARTED_HOOKS_FILE=/etc/nvidia-mig-manager/hooks.yaml
|
163 |
+
MODULEPATH=/cm/local/modulefiles:/cm/shared/modulefiles
|
164 |
+
MODULESHOME=/cm/local/apps/environment-modules/4.5.3
|
165 |
+
MODULES_CMD=/cm/local/apps/environment-modules/4.5.3/libexec/modulecmd.tcl
|
166 |
+
MODULES_SET_SHELL_STARTUP=0
|
167 |
+
MOTD_SHOWN=pam
|
168 |
+
MPI_HOME=/usr/mpi/gcc/openmpi-4.1.7a1
|
169 |
+
MPI_RUN=/usr/mpi/gcc/openmpi-4.1.7a1/bin/mpirun
|
170 |
+
NM=/aifs4su/yaodong/miniconda3/envs/hantao_llama/bin/x86_64-conda-linux-gnu-nm
|
171 |
+
NVCC_PREPEND_FLAGS= -ccbin=/aifs4su/yaodong/miniconda3/bin/x86_64-conda-linux-gnu-c++ -ccbin=/aifs4su/yaodong/miniconda3/envs/hantao_llama/bin/x86_64-conda-linux-gnu-c++
|
172 |
+
NVCC_PREPEND_FLAGS_BACKUP= -ccbin=/aifs4su/yaodong/miniconda3/bin/x86_64-conda-linux-gnu-c++
|
173 |
+
NVITOP_MONITOR_MODE=colorful
|
174 |
+
OBJCOPY=/aifs4su/yaodong/miniconda3/envs/hantao_llama/bin/x86_64-conda-linux-gnu-objcopy
|
175 |
+
OBJDUMP=/aifs4su/yaodong/miniconda3/envs/hantao_llama/bin/x86_64-conda-linux-gnu-objdump
|
176 |
+
OLDPWD=/home/yangyaodong
|
177 |
+
PATH=/aifs4su/yaodong/miniconda3/envs/hantao_llama/bin:/usr/lpp/mmfs/bin:/usr/local/cuda/bin:/opt/bin:/usr/lpp/mmfs/bin:/cm/shared/apps/slurm/current/sbin:/cm/shared/apps/slurm/current/bin:/usr/local/cuda/bin:/opt/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/usr/games:/usr/local/games:/snap/bin:/sbin:/usr/sbin:/cm/local/apps/environment-modules/4.5.3/bin
|
178 |
+
PATH_modshare=/usr/mpi/gcc/openmpi-4.1.7a1/bin:1:/opt/bin/:1:/usr/bin:1:/usr/local/bin:1:/cm/shared/apps/slurm/current/bin:1:/home/yangyaodong/.vscode-server/cli/servers/Stable-e54c774e0add60467559eb0d1e229c6452cf8447/server/bin/remote-cli:1:/cm/shared/apps/slurm/current/sbin:1:/bin:1:/snap/bin:1:/sbin:1:/home/yangyaodong/.vscode-server/data/User/globalStorage/github.copilot-chat/debugCommand:1:/home/yangyaodong/.vscode-server/extensions/ms-python.debugpy-2025.0.1-linux-x64/bundled/scripts/noConfigScripts:1:/usr/sbin:1:/usr/games:1:/cm/local/apps/environment-modules/4.5.3/bin:1:/usr/local/sbin:1:/usr/lpp/mmfs/bin:1:/usr/local/cuda/bin:1:/usr/local/games:1
|
179 |
+
PWD=/aifs4su/yaodong/hantao/align-anything/scripts
|
180 |
+
PYDEVD_DISABLE_FILE_VALIDATION=1
|
181 |
+
PYTHONHASHSEED=42
|
182 |
+
PYTHONPATH=/aifs4su/yaodong/hantao/align-anything/scripts
|
183 |
+
QT_QPA_FONTDIR=/aifs4su/yaodong/miniconda3/envs/hantao_llama/lib/python3.11/site-packages/cv2/qt/fonts
|
184 |
+
QT_QPA_PLATFORM_PLUGIN_PATH=/aifs4su/yaodong/miniconda3/envs/hantao_llama/lib/python3.11/site-packages/cv2/qt/plugins
|
185 |
+
RANK=0
|
186 |
+
RANLIB=/aifs4su/yaodong/miniconda3/envs/hantao_llama/bin/x86_64-conda-linux-gnu-ranlib
|
187 |
+
READELF=/aifs4su/yaodong/miniconda3/envs/hantao_llama/bin/x86_64-conda-linux-gnu-readelf
|
188 |
+
SHELL=/bin/bash
|
189 |
+
SHLVL=4
|
190 |
+
SIZE=/aifs4su/yaodong/miniconda3/envs/hantao_llama/bin/x86_64-conda-linux-gnu-size
|
191 |
+
SLURM_CONF=/cm/shared/apps/slurm/var/etc/slurm/slurm.conf
|
192 |
+
SSH_CLIENT=10.33.4.51 46666 22
|
193 |
+
SSH_CONNECTION=10.33.4.230 40638 10.33.4.213 22
|
194 |
+
SSL_CERT_DIR=/usr/lib/ssl/certs
|
195 |
+
SSL_CERT_FILE=/usr/lib/ssl/certs/ca-certificates.crt
|
196 |
+
STRINGS=/aifs4su/yaodong/miniconda3/envs/hantao_llama/bin/x86_64-conda-linux-gnu-strings
|
197 |
+
STRIP=/aifs4su/yaodong/miniconda3/envs/hantao_llama/bin/x86_64-conda-linux-gnu-strip
|
198 |
+
TERM=screen
|
199 |
+
TERM_PROGRAM=tmux
|
200 |
+
TERM_PROGRAM_VERSION=3.2a
|
201 |
+
TMUX=/tmp/tmux-1028/default,2296743,10
|
202 |
+
TMUX_PANE=%25
|
203 |
+
USER=yangyaodong
|
204 |
+
VSCODE_GIT_ASKPASS_EXTRA_ARGS=
|
205 |
+
VSCODE_GIT_ASKPASS_MAIN=/home/yangyaodong/.vscode-server/cli/servers/Stable-e54c774e0add60467559eb0d1e229c6452cf8447/server/extensions/git/dist/askpass-main.js
|
206 |
+
VSCODE_GIT_ASKPASS_NODE=/home/yangyaodong/.vscode-server/cli/servers/Stable-e54c774e0add60467559eb0d1e229c6452cf8447/server/node
|
207 |
+
VSCODE_GIT_IPC_HANDLE=/run/user/1028/vscode-git-bbbbf321f6.sock
|
208 |
+
VSCODE_IPC_HOOK_CLI=/run/user/1028/vscode-ipc-e2edf668-dca9-4331-a6ac-7d4507f653ce.sock
|
209 |
+
WANDB_API_KEY=7e2dcc0c310ebcb7cdcafd5e9320d6be55cf1a33
|
210 |
+
WANDB_SERVICE=2-1109563-tcp-localhost-43957
|
211 |
+
WORLD_SIZE=8
|
212 |
+
XDG_DATA_DIRS=/usr/local/share:/usr/share:/var/lib/snapd/desktop
|
213 |
+
XDG_RUNTIME_DIR=/run/user/1028
|
214 |
+
XDG_SESSION_CLASS=user
|
215 |
+
XDG_SESSION_ID=43255
|
216 |
+
XDG_SESSION_TYPE=tty
|
217 |
+
ZERO_STAGE=3
|
218 |
+
_=/aifs4su/yaodong/miniconda3/envs/hantao_llama/bin/deepspeed
|
219 |
+
_CE_CONDA=
|
220 |
+
_CE_M=
|
221 |
+
_CONDA_PYTHON_SYSCONFIGDATA_NAME=_sysconfigdata_x86_64_conda_cos7_linux_gnu
|
222 |
+
_LMFILES_=/cm/local/modulefiles/slurm/slurm/23.02.6:/cm/local/modulefiles/gcc/64/4.1.7a1
|
223 |
+
_LMFILES__modshare=/cm/local/modulefiles/slurm/slurm/23.02.6:1:/cm/local/modulefiles/gcc/64/4.1.7a1:1
|
224 |
+
build_alias=x86_64-conda-linux-gnu
|
225 |
+
host_alias=x86_64-conda-linux-gnu
|
script.sh
ADDED
@@ -0,0 +1,49 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
#!/usr/bin/env bash
|
2 |
+
#
|
3 |
+
# Copyright 2025 PKU-Alignment Team. All Rights Reserved.
|
4 |
+
#
|
5 |
+
# Licensed under the Apache License, Version 2.0 (the "License");
|
6 |
+
# you may not use this file except in compliance with the License.
|
7 |
+
# You may obtain a copy of the License at
|
8 |
+
#
|
9 |
+
# http://www.apache.org/licenses/LICENSE-2.0
|
10 |
+
#
|
11 |
+
# Unless required by applicable law or agreed to in writing, software
|
12 |
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
13 |
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
14 |
+
# See the License for the specific language governing permissions and
|
15 |
+
# limitations under the License.
|
16 |
+
# ==============================================================================
|
17 |
+
|
18 |
+
DATASETS_NAME=("top1-100_valid")
|
19 |
+
|
20 |
+
MODEL_NAME_OR_PATH="/aifs4su/yaodong/hantao/models/llava-v1.6-mistral-7b-hf" # model path
|
21 |
+
|
22 |
+
for DATASET_NAME in "${DATASETS_NAME[@]}"; do
|
23 |
+
TRAIN_DATASETS="/aifs4su/yaodong/hantao/datasets/MMInstruct-GPT4V_mistral-7b_cosi_cut/merged/${DATASET_NAME}" # dataset path
|
24 |
+
TRAIN_TEMPLATE="MM_TI2T_LLAVA" # dataset template
|
25 |
+
TRAIN_NAME="text-image-to-text" # dataset name
|
26 |
+
TRAIN_SPLIT="train" # split the dataset
|
27 |
+
|
28 |
+
OUTPUT_DIR="../outputs/LLAVA_7B_cosi/${DATASET_NAME}" # output dir
|
29 |
+
|
30 |
+
# For wandb online logging
|
31 |
+
export WANDB_API_KEY="7e2dcc0c310ebcb7cdcafd5e9320d6be55cf1a33"
|
32 |
+
|
33 |
+
# Source the setup script
|
34 |
+
source ./setup.sh
|
35 |
+
|
36 |
+
# Execute deepspeed command
|
37 |
+
deepspeed \
|
38 |
+
--master_port ${MASTER_PORT} \
|
39 |
+
--module align_anything.trainers.text_image_to_text.sft \
|
40 |
+
--model_name_or_path ${MODEL_NAME_OR_PATH} \
|
41 |
+
--train_datasets ${TRAIN_DATASETS} \
|
42 |
+
--train_template ${TRAIN_TEMPLATE} \
|
43 |
+
--train_split ${TRAIN_SPLIT} \
|
44 |
+
--train_name ${TRAIN_NAME} \
|
45 |
+
--output_dir ${OUTPUT_DIR} \
|
46 |
+
--save_total_limit 6 \
|
47 |
+
--train_batch_size 16 \
|
48 |
+
--epochs 3
|
49 |
+
done
|
slice_12208/added_tokens.json
ADDED
@@ -0,0 +1,4 @@
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"<image>": 32000,
|
3 |
+
"<pad>": 32001
|
4 |
+
}
|
slice_12208/chat_template.json
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"chat_template": "{% for message in messages %}{% if message['role'] != 'system' %}{{ message['role'].upper() + ': '}}{% endif %}{# Render all images first #}{% for content in message['content'] | selectattr('type', 'equalto', 'image') %}{{ '<image>\n' }}{% endfor %}{# Render all text next #}{% if message['role'] != 'assistant' %}{% for content in message['content'] | selectattr('type', 'equalto', 'text') %}{{ content['text'] + ' '}}{% endfor %}{% else %}{% for content in message['content'] | selectattr('type', 'equalto', 'text') %}{% generation %}{{ content['text'] + ' '}}{% endgeneration %}{% endfor %}{% endif %}{% endfor %}{% if add_generation_prompt %}{{ 'ASSISTANT:' }}{% endif %}"
|
3 |
+
}
|
slice_12208/config.json
ADDED
@@ -0,0 +1,85 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"_attn_implementation_autoset": true,
|
3 |
+
"architectures": [
|
4 |
+
"LlavaNextForConditionalGeneration"
|
5 |
+
],
|
6 |
+
"bos_token_id": 1,
|
7 |
+
"eos_token_id": 2,
|
8 |
+
"ignore_index": -100,
|
9 |
+
"image_grid_pinpoints": [
|
10 |
+
[
|
11 |
+
336,
|
12 |
+
672
|
13 |
+
],
|
14 |
+
[
|
15 |
+
672,
|
16 |
+
336
|
17 |
+
],
|
18 |
+
[
|
19 |
+
672,
|
20 |
+
672
|
21 |
+
],
|
22 |
+
[
|
23 |
+
1008,
|
24 |
+
336
|
25 |
+
],
|
26 |
+
[
|
27 |
+
336,
|
28 |
+
1008
|
29 |
+
]
|
30 |
+
],
|
31 |
+
"image_seq_length": 576,
|
32 |
+
"image_token_index": 32000,
|
33 |
+
"model_type": "llava_next",
|
34 |
+
"multimodal_projector_bias": true,
|
35 |
+
"pad_token_id": 32001,
|
36 |
+
"projector_hidden_act": "gelu",
|
37 |
+
"text_config": {
|
38 |
+
"_name_or_path": "mistralai/Mistral-7B-Instruct-v0.2",
|
39 |
+
"architectures": [
|
40 |
+
"MistralForCausalLM"
|
41 |
+
],
|
42 |
+
"attention_dropout": 0.0,
|
43 |
+
"head_dim": 128,
|
44 |
+
"hidden_act": "silu",
|
45 |
+
"hidden_size": 4096,
|
46 |
+
"initializer_range": 0.02,
|
47 |
+
"intermediate_size": 14336,
|
48 |
+
"max_position_embeddings": 32768,
|
49 |
+
"model_type": "mistral",
|
50 |
+
"num_attention_heads": 32,
|
51 |
+
"num_hidden_layers": 32,
|
52 |
+
"num_key_value_heads": 8,
|
53 |
+
"rms_norm_eps": 1e-05,
|
54 |
+
"rope_theta": 1000000.0,
|
55 |
+
"sliding_window": null,
|
56 |
+
"torch_dtype": "bfloat16",
|
57 |
+
"use_cache": true,
|
58 |
+
"vocab_size": 32064
|
59 |
+
},
|
60 |
+
"tie_word_embeddings": false,
|
61 |
+
"torch_dtype": "bfloat16",
|
62 |
+
"transformers_version": "4.50.0",
|
63 |
+
"use_image_newline_parameter": true,
|
64 |
+
"vision_config": {
|
65 |
+
"attention_dropout": 0.0,
|
66 |
+
"hidden_act": "quick_gelu",
|
67 |
+
"hidden_size": 1024,
|
68 |
+
"image_size": 336,
|
69 |
+
"initializer_factor": 1.0,
|
70 |
+
"initializer_range": 0.02,
|
71 |
+
"intermediate_size": 4096,
|
72 |
+
"layer_norm_eps": 1e-05,
|
73 |
+
"model_type": "clip_vision_model",
|
74 |
+
"num_attention_heads": 16,
|
75 |
+
"num_channels": 3,
|
76 |
+
"num_hidden_layers": 24,
|
77 |
+
"patch_size": 14,
|
78 |
+
"projection_dim": 768,
|
79 |
+
"torch_dtype": "bfloat16",
|
80 |
+
"vocab_size": 32000
|
81 |
+
},
|
82 |
+
"vision_feature_layer": -2,
|
83 |
+
"vision_feature_select_strategy": "default",
|
84 |
+
"vocab_size": 32064
|
85 |
+
}
|
slice_12208/preprocessor_config.json
ADDED
@@ -0,0 +1,52 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"aspect_ratio_setting": "anyres",
|
3 |
+
"crop_size": {
|
4 |
+
"height": 336,
|
5 |
+
"width": 336
|
6 |
+
},
|
7 |
+
"do_center_crop": true,
|
8 |
+
"do_convert_rgb": true,
|
9 |
+
"do_normalize": true,
|
10 |
+
"do_pad": true,
|
11 |
+
"do_rescale": true,
|
12 |
+
"do_resize": true,
|
13 |
+
"image_grid_pinpoints": [
|
14 |
+
[
|
15 |
+
336,
|
16 |
+
672
|
17 |
+
],
|
18 |
+
[
|
19 |
+
672,
|
20 |
+
336
|
21 |
+
],
|
22 |
+
[
|
23 |
+
672,
|
24 |
+
672
|
25 |
+
],
|
26 |
+
[
|
27 |
+
1008,
|
28 |
+
336
|
29 |
+
],
|
30 |
+
[
|
31 |
+
336,
|
32 |
+
1008
|
33 |
+
]
|
34 |
+
],
|
35 |
+
"image_mean": [
|
36 |
+
0.48145466,
|
37 |
+
0.4578275,
|
38 |
+
0.40821073
|
39 |
+
],
|
40 |
+
"image_processor_type": "LlavaNextImageProcessor",
|
41 |
+
"image_std": [
|
42 |
+
0.26862954,
|
43 |
+
0.26130258,
|
44 |
+
0.27577711
|
45 |
+
],
|
46 |
+
"processor_class": "LlavaNextProcessor",
|
47 |
+
"resample": 3,
|
48 |
+
"rescale_factor": 0.00392156862745098,
|
49 |
+
"size": {
|
50 |
+
"shortest_edge": 336
|
51 |
+
}
|
52 |
+
}
|
slice_12208/processor_config.json
ADDED
@@ -0,0 +1,7 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"image_token": "<image>",
|
3 |
+
"num_additional_image_tokens": 1,
|
4 |
+
"patch_size": 14,
|
5 |
+
"processor_class": "LlavaNextProcessor",
|
6 |
+
"vision_feature_select_strategy": "default"
|
7 |
+
}
|
slice_12208/pytorch_model.bin
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:6e3188f9ab268711dd1ddfd113384c09efcc5afe4771ffddd5a4619b5cdc5221
|
3 |
+
size 15133733934
|
slice_12208/special_tokens_map.json
ADDED
@@ -0,0 +1,31 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"bos_token": {
|
3 |
+
"content": "<s>",
|
4 |
+
"lstrip": false,
|
5 |
+
"normalized": false,
|
6 |
+
"rstrip": false,
|
7 |
+
"single_word": false
|
8 |
+
},
|
9 |
+
"eos_token": {
|
10 |
+
"content": "</s>",
|
11 |
+
"lstrip": false,
|
12 |
+
"normalized": false,
|
13 |
+
"rstrip": false,
|
14 |
+
"single_word": false
|
15 |
+
},
|
16 |
+
"image_token": "<image>",
|
17 |
+
"pad_token": {
|
18 |
+
"content": "<pad>",
|
19 |
+
"lstrip": false,
|
20 |
+
"normalized": false,
|
21 |
+
"rstrip": false,
|
22 |
+
"single_word": false
|
23 |
+
},
|
24 |
+
"unk_token": {
|
25 |
+
"content": "<unk>",
|
26 |
+
"lstrip": false,
|
27 |
+
"normalized": false,
|
28 |
+
"rstrip": false,
|
29 |
+
"single_word": false
|
30 |
+
}
|
31 |
+
}
|
slice_12208/tokenizer.json
ADDED
The diff for this file is too large to render.
See raw diff
|
|
slice_12208/tokenizer.model
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:dadfd56d766715c61d2ef780a525ab43b8e6da4de6865bda3d95fdef5e134055
|
3 |
+
size 493443
|
slice_12208/tokenizer_config.json
ADDED
@@ -0,0 +1,69 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"add_bos_token": true,
|
3 |
+
"add_eos_token": false,
|
4 |
+
"add_prefix_space": null,
|
5 |
+
"added_tokens_decoder": {
|
6 |
+
"0": {
|
7 |
+
"content": "<unk>",
|
8 |
+
"lstrip": false,
|
9 |
+
"normalized": false,
|
10 |
+
"rstrip": false,
|
11 |
+
"single_word": false,
|
12 |
+
"special": true
|
13 |
+
},
|
14 |
+
"1": {
|
15 |
+
"content": "<s>",
|
16 |
+
"lstrip": false,
|
17 |
+
"normalized": false,
|
18 |
+
"rstrip": false,
|
19 |
+
"single_word": false,
|
20 |
+
"special": true
|
21 |
+
},
|
22 |
+
"2": {
|
23 |
+
"content": "</s>",
|
24 |
+
"lstrip": false,
|
25 |
+
"normalized": false,
|
26 |
+
"rstrip": false,
|
27 |
+
"single_word": false,
|
28 |
+
"special": true
|
29 |
+
},
|
30 |
+
"32000": {
|
31 |
+
"content": "<image>",
|
32 |
+
"lstrip": false,
|
33 |
+
"normalized": false,
|
34 |
+
"rstrip": false,
|
35 |
+
"single_word": false,
|
36 |
+
"special": true
|
37 |
+
},
|
38 |
+
"32001": {
|
39 |
+
"content": "<pad>",
|
40 |
+
"lstrip": false,
|
41 |
+
"normalized": false,
|
42 |
+
"rstrip": false,
|
43 |
+
"single_word": false,
|
44 |
+
"special": true
|
45 |
+
}
|
46 |
+
},
|
47 |
+
"additional_special_tokens": [],
|
48 |
+
"bos_token": "<s>",
|
49 |
+
"chat_template": "{{ bos_token }}{% for message in messages %}{% if (message['role'] == 'user') != (loop.index0 % 2 == 0) %}{{ raise_exception('Conversation roles must alternate user/assistant/user/assistant/...') }}{% endif %}{% if message['role'] == 'user' %}{{ '[INST] ' + message['content'] + ' [/INST]' }}{% elif message['role'] == 'assistant' %}{{ message['content'] + eos_token}}{% else %}{{ raise_exception('Only user and assistant roles are supported!') }}{% endif %}{% endfor %}",
|
50 |
+
"clean_up_tokenization_spaces": false,
|
51 |
+
"eos_token": "</s>",
|
52 |
+
"extra_special_tokens": {
|
53 |
+
"image_token": "<image>"
|
54 |
+
},
|
55 |
+
"image_token": "<image>",
|
56 |
+
"legacy": true,
|
57 |
+
"max_length": null,
|
58 |
+
"model_max_length": 4096,
|
59 |
+
"pad_to_multiple_of": null,
|
60 |
+
"pad_token": "<pad>",
|
61 |
+
"pad_token_type_id": 0,
|
62 |
+
"padding_side": "right",
|
63 |
+
"processor_class": "LlavaNextProcessor",
|
64 |
+
"sp_model_kwargs": {},
|
65 |
+
"spaces_between_special_tokens": false,
|
66 |
+
"tokenizer_class": "LlamaTokenizer",
|
67 |
+
"unk_token": "<unk>",
|
68 |
+
"use_default_system_prompt": false
|
69 |
+
}
|
slice_24416/added_tokens.json
ADDED
@@ -0,0 +1,4 @@
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"<image>": 32000,
|
3 |
+
"<pad>": 32001
|
4 |
+
}
|
slice_24416/chat_template.json
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"chat_template": "{% for message in messages %}{% if message['role'] != 'system' %}{{ message['role'].upper() + ': '}}{% endif %}{# Render all images first #}{% for content in message['content'] | selectattr('type', 'equalto', 'image') %}{{ '<image>\n' }}{% endfor %}{# Render all text next #}{% if message['role'] != 'assistant' %}{% for content in message['content'] | selectattr('type', 'equalto', 'text') %}{{ content['text'] + ' '}}{% endfor %}{% else %}{% for content in message['content'] | selectattr('type', 'equalto', 'text') %}{% generation %}{{ content['text'] + ' '}}{% endgeneration %}{% endfor %}{% endif %}{% endfor %}{% if add_generation_prompt %}{{ 'ASSISTANT:' }}{% endif %}"
|
3 |
+
}
|
slice_24416/config.json
ADDED
@@ -0,0 +1,85 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"_attn_implementation_autoset": true,
|
3 |
+
"architectures": [
|
4 |
+
"LlavaNextForConditionalGeneration"
|
5 |
+
],
|
6 |
+
"bos_token_id": 1,
|
7 |
+
"eos_token_id": 2,
|
8 |
+
"ignore_index": -100,
|
9 |
+
"image_grid_pinpoints": [
|
10 |
+
[
|
11 |
+
336,
|
12 |
+
672
|
13 |
+
],
|
14 |
+
[
|
15 |
+
672,
|
16 |
+
336
|
17 |
+
],
|
18 |
+
[
|
19 |
+
672,
|
20 |
+
672
|
21 |
+
],
|
22 |
+
[
|
23 |
+
1008,
|
24 |
+
336
|
25 |
+
],
|
26 |
+
[
|
27 |
+
336,
|
28 |
+
1008
|
29 |
+
]
|
30 |
+
],
|
31 |
+
"image_seq_length": 576,
|
32 |
+
"image_token_index": 32000,
|
33 |
+
"model_type": "llava_next",
|
34 |
+
"multimodal_projector_bias": true,
|
35 |
+
"pad_token_id": 32001,
|
36 |
+
"projector_hidden_act": "gelu",
|
37 |
+
"text_config": {
|
38 |
+
"_name_or_path": "mistralai/Mistral-7B-Instruct-v0.2",
|
39 |
+
"architectures": [
|
40 |
+
"MistralForCausalLM"
|
41 |
+
],
|
42 |
+
"attention_dropout": 0.0,
|
43 |
+
"head_dim": 128,
|
44 |
+
"hidden_act": "silu",
|
45 |
+
"hidden_size": 4096,
|
46 |
+
"initializer_range": 0.02,
|
47 |
+
"intermediate_size": 14336,
|
48 |
+
"max_position_embeddings": 32768,
|
49 |
+
"model_type": "mistral",
|
50 |
+
"num_attention_heads": 32,
|
51 |
+
"num_hidden_layers": 32,
|
52 |
+
"num_key_value_heads": 8,
|
53 |
+
"rms_norm_eps": 1e-05,
|
54 |
+
"rope_theta": 1000000.0,
|
55 |
+
"sliding_window": null,
|
56 |
+
"torch_dtype": "bfloat16",
|
57 |
+
"use_cache": true,
|
58 |
+
"vocab_size": 32064
|
59 |
+
},
|
60 |
+
"tie_word_embeddings": false,
|
61 |
+
"torch_dtype": "bfloat16",
|
62 |
+
"transformers_version": "4.50.0",
|
63 |
+
"use_image_newline_parameter": true,
|
64 |
+
"vision_config": {
|
65 |
+
"attention_dropout": 0.0,
|
66 |
+
"hidden_act": "quick_gelu",
|
67 |
+
"hidden_size": 1024,
|
68 |
+
"image_size": 336,
|
69 |
+
"initializer_factor": 1.0,
|
70 |
+
"initializer_range": 0.02,
|
71 |
+
"intermediate_size": 4096,
|
72 |
+
"layer_norm_eps": 1e-05,
|
73 |
+
"model_type": "clip_vision_model",
|
74 |
+
"num_attention_heads": 16,
|
75 |
+
"num_channels": 3,
|
76 |
+
"num_hidden_layers": 24,
|
77 |
+
"patch_size": 14,
|
78 |
+
"projection_dim": 768,
|
79 |
+
"torch_dtype": "bfloat16",
|
80 |
+
"vocab_size": 32000
|
81 |
+
},
|
82 |
+
"vision_feature_layer": -2,
|
83 |
+
"vision_feature_select_strategy": "default",
|
84 |
+
"vocab_size": 32064
|
85 |
+
}
|
slice_24416/preprocessor_config.json
ADDED
@@ -0,0 +1,52 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"aspect_ratio_setting": "anyres",
|
3 |
+
"crop_size": {
|
4 |
+
"height": 336,
|
5 |
+
"width": 336
|
6 |
+
},
|
7 |
+
"do_center_crop": true,
|
8 |
+
"do_convert_rgb": true,
|
9 |
+
"do_normalize": true,
|
10 |
+
"do_pad": true,
|
11 |
+
"do_rescale": true,
|
12 |
+
"do_resize": true,
|
13 |
+
"image_grid_pinpoints": [
|
14 |
+
[
|
15 |
+
336,
|
16 |
+
672
|
17 |
+
],
|
18 |
+
[
|
19 |
+
672,
|
20 |
+
336
|
21 |
+
],
|
22 |
+
[
|
23 |
+
672,
|
24 |
+
672
|
25 |
+
],
|
26 |
+
[
|
27 |
+
1008,
|
28 |
+
336
|
29 |
+
],
|
30 |
+
[
|
31 |
+
336,
|
32 |
+
1008
|
33 |
+
]
|
34 |
+
],
|
35 |
+
"image_mean": [
|
36 |
+
0.48145466,
|
37 |
+
0.4578275,
|
38 |
+
0.40821073
|
39 |
+
],
|
40 |
+
"image_processor_type": "LlavaNextImageProcessor",
|
41 |
+
"image_std": [
|
42 |
+
0.26862954,
|
43 |
+
0.26130258,
|
44 |
+
0.27577711
|
45 |
+
],
|
46 |
+
"processor_class": "LlavaNextProcessor",
|
47 |
+
"resample": 3,
|
48 |
+
"rescale_factor": 0.00392156862745098,
|
49 |
+
"size": {
|
50 |
+
"shortest_edge": 336
|
51 |
+
}
|
52 |
+
}
|
slice_24416/processor_config.json
ADDED
@@ -0,0 +1,7 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"image_token": "<image>",
|
3 |
+
"num_additional_image_tokens": 1,
|
4 |
+
"patch_size": 14,
|
5 |
+
"processor_class": "LlavaNextProcessor",
|
6 |
+
"vision_feature_select_strategy": "default"
|
7 |
+
}
|
slice_24416/pytorch_model.bin
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:b0e08b154cf652cb73935b1ecb527309676b1c2b41788fdcc464a604134ea392
|
3 |
+
size 15133733934
|
slice_24416/special_tokens_map.json
ADDED
@@ -0,0 +1,31 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"bos_token": {
|
3 |
+
"content": "<s>",
|
4 |
+
"lstrip": false,
|
5 |
+
"normalized": false,
|
6 |
+
"rstrip": false,
|
7 |
+
"single_word": false
|
8 |
+
},
|
9 |
+
"eos_token": {
|
10 |
+
"content": "</s>",
|
11 |
+
"lstrip": false,
|
12 |
+
"normalized": false,
|
13 |
+
"rstrip": false,
|
14 |
+
"single_word": false
|
15 |
+
},
|
16 |
+
"image_token": "<image>",
|
17 |
+
"pad_token": {
|
18 |
+
"content": "<pad>",
|
19 |
+
"lstrip": false,
|
20 |
+
"normalized": false,
|
21 |
+
"rstrip": false,
|
22 |
+
"single_word": false
|
23 |
+
},
|
24 |
+
"unk_token": {
|
25 |
+
"content": "<unk>",
|
26 |
+
"lstrip": false,
|
27 |
+
"normalized": false,
|
28 |
+
"rstrip": false,
|
29 |
+
"single_word": false
|
30 |
+
}
|
31 |
+
}
|
slice_24416/tokenizer.json
ADDED
The diff for this file is too large to render.
See raw diff
|
|
slice_24416/tokenizer.model
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:dadfd56d766715c61d2ef780a525ab43b8e6da4de6865bda3d95fdef5e134055
|
3 |
+
size 493443
|
slice_24416/tokenizer_config.json
ADDED
@@ -0,0 +1,69 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"add_bos_token": true,
|
3 |
+
"add_eos_token": false,
|
4 |
+
"add_prefix_space": null,
|
5 |
+
"added_tokens_decoder": {
|
6 |
+
"0": {
|
7 |
+
"content": "<unk>",
|
8 |
+
"lstrip": false,
|
9 |
+
"normalized": false,
|
10 |
+
"rstrip": false,
|
11 |
+
"single_word": false,
|
12 |
+
"special": true
|
13 |
+
},
|
14 |
+
"1": {
|
15 |
+
"content": "<s>",
|
16 |
+
"lstrip": false,
|
17 |
+
"normalized": false,
|
18 |
+
"rstrip": false,
|
19 |
+
"single_word": false,
|
20 |
+
"special": true
|
21 |
+
},
|
22 |
+
"2": {
|
23 |
+
"content": "</s>",
|
24 |
+
"lstrip": false,
|
25 |
+
"normalized": false,
|
26 |
+
"rstrip": false,
|
27 |
+
"single_word": false,
|
28 |
+
"special": true
|
29 |
+
},
|
30 |
+
"32000": {
|
31 |
+
"content": "<image>",
|
32 |
+
"lstrip": false,
|
33 |
+
"normalized": false,
|
34 |
+
"rstrip": false,
|
35 |
+
"single_word": false,
|
36 |
+
"special": true
|
37 |
+
},
|
38 |
+
"32001": {
|
39 |
+
"content": "<pad>",
|
40 |
+
"lstrip": false,
|
41 |
+
"normalized": false,
|
42 |
+
"rstrip": false,
|
43 |
+
"single_word": false,
|
44 |
+
"special": true
|
45 |
+
}
|
46 |
+
},
|
47 |
+
"additional_special_tokens": [],
|
48 |
+
"bos_token": "<s>",
|
49 |
+
"chat_template": "{{ bos_token }}{% for message in messages %}{% if (message['role'] == 'user') != (loop.index0 % 2 == 0) %}{{ raise_exception('Conversation roles must alternate user/assistant/user/assistant/...') }}{% endif %}{% if message['role'] == 'user' %}{{ '[INST] ' + message['content'] + ' [/INST]' }}{% elif message['role'] == 'assistant' %}{{ message['content'] + eos_token}}{% else %}{{ raise_exception('Only user and assistant roles are supported!') }}{% endif %}{% endfor %}",
|
50 |
+
"clean_up_tokenization_spaces": false,
|
51 |
+
"eos_token": "</s>",
|
52 |
+
"extra_special_tokens": {
|
53 |
+
"image_token": "<image>"
|
54 |
+
},
|
55 |
+
"image_token": "<image>",
|
56 |
+
"legacy": true,
|
57 |
+
"max_length": null,
|
58 |
+
"model_max_length": 4096,
|
59 |
+
"pad_to_multiple_of": null,
|
60 |
+
"pad_token": "<pad>",
|
61 |
+
"pad_token_type_id": 0,
|
62 |
+
"padding_side": "right",
|
63 |
+
"processor_class": "LlavaNextProcessor",
|
64 |
+
"sp_model_kwargs": {},
|
65 |
+
"spaces_between_special_tokens": false,
|
66 |
+
"tokenizer_class": "LlamaTokenizer",
|
67 |
+
"unk_token": "<unk>",
|
68 |
+
"use_default_system_prompt": false
|
69 |
+
}
|
slice_36624/added_tokens.json
ADDED
@@ -0,0 +1,4 @@
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"<image>": 32000,
|
3 |
+
"<pad>": 32001
|
4 |
+
}
|
slice_36624/chat_template.json
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"chat_template": "{% for message in messages %}{% if message['role'] != 'system' %}{{ message['role'].upper() + ': '}}{% endif %}{# Render all images first #}{% for content in message['content'] | selectattr('type', 'equalto', 'image') %}{{ '<image>\n' }}{% endfor %}{# Render all text next #}{% if message['role'] != 'assistant' %}{% for content in message['content'] | selectattr('type', 'equalto', 'text') %}{{ content['text'] + ' '}}{% endfor %}{% else %}{% for content in message['content'] | selectattr('type', 'equalto', 'text') %}{% generation %}{{ content['text'] + ' '}}{% endgeneration %}{% endfor %}{% endif %}{% endfor %}{% if add_generation_prompt %}{{ 'ASSISTANT:' }}{% endif %}"
|
3 |
+
}
|
slice_36624/config.json
ADDED
@@ -0,0 +1,85 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"_attn_implementation_autoset": true,
|
3 |
+
"architectures": [
|
4 |
+
"LlavaNextForConditionalGeneration"
|
5 |
+
],
|
6 |
+
"bos_token_id": 1,
|
7 |
+
"eos_token_id": 2,
|
8 |
+
"ignore_index": -100,
|
9 |
+
"image_grid_pinpoints": [
|
10 |
+
[
|
11 |
+
336,
|
12 |
+
672
|
13 |
+
],
|
14 |
+
[
|
15 |
+
672,
|
16 |
+
336
|
17 |
+
],
|
18 |
+
[
|
19 |
+
672,
|
20 |
+
672
|
21 |
+
],
|
22 |
+
[
|
23 |
+
1008,
|
24 |
+
336
|
25 |
+
],
|
26 |
+
[
|
27 |
+
336,
|
28 |
+
1008
|
29 |
+
]
|
30 |
+
],
|
31 |
+
"image_seq_length": 576,
|
32 |
+
"image_token_index": 32000,
|
33 |
+
"model_type": "llava_next",
|
34 |
+
"multimodal_projector_bias": true,
|
35 |
+
"pad_token_id": 32001,
|
36 |
+
"projector_hidden_act": "gelu",
|
37 |
+
"text_config": {
|
38 |
+
"_name_or_path": "mistralai/Mistral-7B-Instruct-v0.2",
|
39 |
+
"architectures": [
|
40 |
+
"MistralForCausalLM"
|
41 |
+
],
|
42 |
+
"attention_dropout": 0.0,
|
43 |
+
"head_dim": 128,
|
44 |
+
"hidden_act": "silu",
|
45 |
+
"hidden_size": 4096,
|
46 |
+
"initializer_range": 0.02,
|
47 |
+
"intermediate_size": 14336,
|
48 |
+
"max_position_embeddings": 32768,
|
49 |
+
"model_type": "mistral",
|
50 |
+
"num_attention_heads": 32,
|
51 |
+
"num_hidden_layers": 32,
|
52 |
+
"num_key_value_heads": 8,
|
53 |
+
"rms_norm_eps": 1e-05,
|
54 |
+
"rope_theta": 1000000.0,
|
55 |
+
"sliding_window": null,
|
56 |
+
"torch_dtype": "bfloat16",
|
57 |
+
"use_cache": true,
|
58 |
+
"vocab_size": 32064
|
59 |
+
},
|
60 |
+
"tie_word_embeddings": false,
|
61 |
+
"torch_dtype": "bfloat16",
|
62 |
+
"transformers_version": "4.50.0",
|
63 |
+
"use_image_newline_parameter": true,
|
64 |
+
"vision_config": {
|
65 |
+
"attention_dropout": 0.0,
|
66 |
+
"hidden_act": "quick_gelu",
|
67 |
+
"hidden_size": 1024,
|
68 |
+
"image_size": 336,
|
69 |
+
"initializer_factor": 1.0,
|
70 |
+
"initializer_range": 0.02,
|
71 |
+
"intermediate_size": 4096,
|
72 |
+
"layer_norm_eps": 1e-05,
|
73 |
+
"model_type": "clip_vision_model",
|
74 |
+
"num_attention_heads": 16,
|
75 |
+
"num_channels": 3,
|
76 |
+
"num_hidden_layers": 24,
|
77 |
+
"patch_size": 14,
|
78 |
+
"projection_dim": 768,
|
79 |
+
"torch_dtype": "bfloat16",
|
80 |
+
"vocab_size": 32000
|
81 |
+
},
|
82 |
+
"vision_feature_layer": -2,
|
83 |
+
"vision_feature_select_strategy": "default",
|
84 |
+
"vocab_size": 32064
|
85 |
+
}
|
slice_36624/preprocessor_config.json
ADDED
@@ -0,0 +1,52 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"aspect_ratio_setting": "anyres",
|
3 |
+
"crop_size": {
|
4 |
+
"height": 336,
|
5 |
+
"width": 336
|
6 |
+
},
|
7 |
+
"do_center_crop": true,
|
8 |
+
"do_convert_rgb": true,
|
9 |
+
"do_normalize": true,
|
10 |
+
"do_pad": true,
|
11 |
+
"do_rescale": true,
|
12 |
+
"do_resize": true,
|
13 |
+
"image_grid_pinpoints": [
|
14 |
+
[
|
15 |
+
336,
|
16 |
+
672
|
17 |
+
],
|
18 |
+
[
|
19 |
+
672,
|
20 |
+
336
|
21 |
+
],
|
22 |
+
[
|
23 |
+
672,
|
24 |
+
672
|
25 |
+
],
|
26 |
+
[
|
27 |
+
1008,
|
28 |
+
336
|
29 |
+
],
|
30 |
+
[
|
31 |
+
336,
|
32 |
+
1008
|
33 |
+
]
|
34 |
+
],
|
35 |
+
"image_mean": [
|
36 |
+
0.48145466,
|
37 |
+
0.4578275,
|
38 |
+
0.40821073
|
39 |
+
],
|
40 |
+
"image_processor_type": "LlavaNextImageProcessor",
|
41 |
+
"image_std": [
|
42 |
+
0.26862954,
|
43 |
+
0.26130258,
|
44 |
+
0.27577711
|
45 |
+
],
|
46 |
+
"processor_class": "LlavaNextProcessor",
|
47 |
+
"resample": 3,
|
48 |
+
"rescale_factor": 0.00392156862745098,
|
49 |
+
"size": {
|
50 |
+
"shortest_edge": 336
|
51 |
+
}
|
52 |
+
}
|
slice_36624/processor_config.json
ADDED
@@ -0,0 +1,7 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"image_token": "<image>",
|
3 |
+
"num_additional_image_tokens": 1,
|
4 |
+
"patch_size": 14,
|
5 |
+
"processor_class": "LlavaNextProcessor",
|
6 |
+
"vision_feature_select_strategy": "default"
|
7 |
+
}
|
slice_36624/pytorch_model.bin
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:1d71fca0b80d0ad8f53fa03d6f5208cf203f94fbb7d9672908b0e8e02dcb4323
|
3 |
+
size 15133733934
|
slice_36624/special_tokens_map.json
ADDED
@@ -0,0 +1,31 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"bos_token": {
|
3 |
+
"content": "<s>",
|
4 |
+
"lstrip": false,
|
5 |
+
"normalized": false,
|
6 |
+
"rstrip": false,
|
7 |
+
"single_word": false
|
8 |
+
},
|
9 |
+
"eos_token": {
|
10 |
+
"content": "</s>",
|
11 |
+
"lstrip": false,
|
12 |
+
"normalized": false,
|
13 |
+
"rstrip": false,
|
14 |
+
"single_word": false
|
15 |
+
},
|
16 |
+
"image_token": "<image>",
|
17 |
+
"pad_token": {
|
18 |
+
"content": "<pad>",
|
19 |
+
"lstrip": false,
|
20 |
+
"normalized": false,
|
21 |
+
"rstrip": false,
|
22 |
+
"single_word": false
|
23 |
+
},
|
24 |
+
"unk_token": {
|
25 |
+
"content": "<unk>",
|
26 |
+
"lstrip": false,
|
27 |
+
"normalized": false,
|
28 |
+
"rstrip": false,
|
29 |
+
"single_word": false
|
30 |
+
}
|
31 |
+
}
|
slice_36624/tokenizer.json
ADDED
The diff for this file is too large to render.
See raw diff
|
|
slice_36624/tokenizer.model
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:dadfd56d766715c61d2ef780a525ab43b8e6da4de6865bda3d95fdef5e134055
|
3 |
+
size 493443
|
slice_36624/tokenizer_config.json
ADDED
@@ -0,0 +1,69 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"add_bos_token": true,
|
3 |
+
"add_eos_token": false,
|
4 |
+
"add_prefix_space": null,
|
5 |
+
"added_tokens_decoder": {
|
6 |
+
"0": {
|
7 |
+
"content": "<unk>",
|
8 |
+
"lstrip": false,
|
9 |
+
"normalized": false,
|
10 |
+
"rstrip": false,
|
11 |
+
"single_word": false,
|
12 |
+
"special": true
|
13 |
+
},
|
14 |
+
"1": {
|
15 |
+
"content": "<s>",
|
16 |
+
"lstrip": false,
|
17 |
+
"normalized": false,
|
18 |
+
"rstrip": false,
|
19 |
+
"single_word": false,
|
20 |
+
"special": true
|
21 |
+
},
|
22 |
+
"2": {
|
23 |
+
"content": "</s>",
|
24 |
+
"lstrip": false,
|
25 |
+
"normalized": false,
|
26 |
+
"rstrip": false,
|
27 |
+
"single_word": false,
|
28 |
+
"special": true
|
29 |
+
},
|
30 |
+
"32000": {
|
31 |
+
"content": "<image>",
|
32 |
+
"lstrip": false,
|
33 |
+
"normalized": false,
|
34 |
+
"rstrip": false,
|
35 |
+
"single_word": false,
|
36 |
+
"special": true
|
37 |
+
},
|
38 |
+
"32001": {
|
39 |
+
"content": "<pad>",
|
40 |
+
"lstrip": false,
|
41 |
+
"normalized": false,
|
42 |
+
"rstrip": false,
|
43 |
+
"single_word": false,
|
44 |
+
"special": true
|
45 |
+
}
|
46 |
+
},
|
47 |
+
"additional_special_tokens": [],
|
48 |
+
"bos_token": "<s>",
|
49 |
+
"chat_template": "{{ bos_token }}{% for message in messages %}{% if (message['role'] == 'user') != (loop.index0 % 2 == 0) %}{{ raise_exception('Conversation roles must alternate user/assistant/user/assistant/...') }}{% endif %}{% if message['role'] == 'user' %}{{ '[INST] ' + message['content'] + ' [/INST]' }}{% elif message['role'] == 'assistant' %}{{ message['content'] + eos_token}}{% else %}{{ raise_exception('Only user and assistant roles are supported!') }}{% endif %}{% endfor %}",
|
50 |
+
"clean_up_tokenization_spaces": false,
|
51 |
+
"eos_token": "</s>",
|
52 |
+
"extra_special_tokens": {
|
53 |
+
"image_token": "<image>"
|
54 |
+
},
|
55 |
+
"image_token": "<image>",
|
56 |
+
"legacy": true,
|
57 |
+
"max_length": null,
|
58 |
+
"model_max_length": 4096,
|
59 |
+
"pad_to_multiple_of": null,
|
60 |
+
"pad_token": "<pad>",
|
61 |
+
"pad_token_type_id": 0,
|
62 |
+
"padding_side": "right",
|
63 |
+
"processor_class": "LlavaNextProcessor",
|
64 |
+
"sp_model_kwargs": {},
|
65 |
+
"spaces_between_special_tokens": false,
|
66 |
+
"tokenizer_class": "LlamaTokenizer",
|
67 |
+
"unk_token": "<unk>",
|
68 |
+
"use_default_system_prompt": false
|
69 |
+
}
|
slice_48832/added_tokens.json
ADDED
@@ -0,0 +1,4 @@
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"<image>": 32000,
|
3 |
+
"<pad>": 32001
|
4 |
+
}
|
slice_48832/chat_template.json
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"chat_template": "{% for message in messages %}{% if message['role'] != 'system' %}{{ message['role'].upper() + ': '}}{% endif %}{# Render all images first #}{% for content in message['content'] | selectattr('type', 'equalto', 'image') %}{{ '<image>\n' }}{% endfor %}{# Render all text next #}{% if message['role'] != 'assistant' %}{% for content in message['content'] | selectattr('type', 'equalto', 'text') %}{{ content['text'] + ' '}}{% endfor %}{% else %}{% for content in message['content'] | selectattr('type', 'equalto', 'text') %}{% generation %}{{ content['text'] + ' '}}{% endgeneration %}{% endfor %}{% endif %}{% endfor %}{% if add_generation_prompt %}{{ 'ASSISTANT:' }}{% endif %}"
|
3 |
+
}
|
slice_48832/config.json
ADDED
@@ -0,0 +1,85 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"_attn_implementation_autoset": true,
|
3 |
+
"architectures": [
|
4 |
+
"LlavaNextForConditionalGeneration"
|
5 |
+
],
|
6 |
+
"bos_token_id": 1,
|
7 |
+
"eos_token_id": 2,
|
8 |
+
"ignore_index": -100,
|
9 |
+
"image_grid_pinpoints": [
|
10 |
+
[
|
11 |
+
336,
|
12 |
+
672
|
13 |
+
],
|
14 |
+
[
|
15 |
+
672,
|
16 |
+
336
|
17 |
+
],
|
18 |
+
[
|
19 |
+
672,
|
20 |
+
672
|
21 |
+
],
|
22 |
+
[
|
23 |
+
1008,
|
24 |
+
336
|
25 |
+
],
|
26 |
+
[
|
27 |
+
336,
|
28 |
+
1008
|
29 |
+
]
|
30 |
+
],
|
31 |
+
"image_seq_length": 576,
|
32 |
+
"image_token_index": 32000,
|
33 |
+
"model_type": "llava_next",
|
34 |
+
"multimodal_projector_bias": true,
|
35 |
+
"pad_token_id": 32001,
|
36 |
+
"projector_hidden_act": "gelu",
|
37 |
+
"text_config": {
|
38 |
+
"_name_or_path": "mistralai/Mistral-7B-Instruct-v0.2",
|
39 |
+
"architectures": [
|
40 |
+
"MistralForCausalLM"
|
41 |
+
],
|
42 |
+
"attention_dropout": 0.0,
|
43 |
+
"head_dim": 128,
|
44 |
+
"hidden_act": "silu",
|
45 |
+
"hidden_size": 4096,
|
46 |
+
"initializer_range": 0.02,
|
47 |
+
"intermediate_size": 14336,
|
48 |
+
"max_position_embeddings": 32768,
|
49 |
+
"model_type": "mistral",
|
50 |
+
"num_attention_heads": 32,
|
51 |
+
"num_hidden_layers": 32,
|
52 |
+
"num_key_value_heads": 8,
|
53 |
+
"rms_norm_eps": 1e-05,
|
54 |
+
"rope_theta": 1000000.0,
|
55 |
+
"sliding_window": null,
|
56 |
+
"torch_dtype": "bfloat16",
|
57 |
+
"use_cache": true,
|
58 |
+
"vocab_size": 32064
|
59 |
+
},
|
60 |
+
"tie_word_embeddings": false,
|
61 |
+
"torch_dtype": "bfloat16",
|
62 |
+
"transformers_version": "4.50.0",
|
63 |
+
"use_image_newline_parameter": true,
|
64 |
+
"vision_config": {
|
65 |
+
"attention_dropout": 0.0,
|
66 |
+
"hidden_act": "quick_gelu",
|
67 |
+
"hidden_size": 1024,
|
68 |
+
"image_size": 336,
|
69 |
+
"initializer_factor": 1.0,
|
70 |
+
"initializer_range": 0.02,
|
71 |
+
"intermediate_size": 4096,
|
72 |
+
"layer_norm_eps": 1e-05,
|
73 |
+
"model_type": "clip_vision_model",
|
74 |
+
"num_attention_heads": 16,
|
75 |
+
"num_channels": 3,
|
76 |
+
"num_hidden_layers": 24,
|
77 |
+
"patch_size": 14,
|
78 |
+
"projection_dim": 768,
|
79 |
+
"torch_dtype": "bfloat16",
|
80 |
+
"vocab_size": 32000
|
81 |
+
},
|
82 |
+
"vision_feature_layer": -2,
|
83 |
+
"vision_feature_select_strategy": "default",
|
84 |
+
"vocab_size": 32064
|
85 |
+
}
|
slice_48832/preprocessor_config.json
ADDED
@@ -0,0 +1,52 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"aspect_ratio_setting": "anyres",
|
3 |
+
"crop_size": {
|
4 |
+
"height": 336,
|
5 |
+
"width": 336
|
6 |
+
},
|
7 |
+
"do_center_crop": true,
|
8 |
+
"do_convert_rgb": true,
|
9 |
+
"do_normalize": true,
|
10 |
+
"do_pad": true,
|
11 |
+
"do_rescale": true,
|
12 |
+
"do_resize": true,
|
13 |
+
"image_grid_pinpoints": [
|
14 |
+
[
|
15 |
+
336,
|
16 |
+
672
|
17 |
+
],
|
18 |
+
[
|
19 |
+
672,
|
20 |
+
336
|
21 |
+
],
|
22 |
+
[
|
23 |
+
672,
|
24 |
+
672
|
25 |
+
],
|
26 |
+
[
|
27 |
+
1008,
|
28 |
+
336
|
29 |
+
],
|
30 |
+
[
|
31 |
+
336,
|
32 |
+
1008
|
33 |
+
]
|
34 |
+
],
|
35 |
+
"image_mean": [
|
36 |
+
0.48145466,
|
37 |
+
0.4578275,
|
38 |
+
0.40821073
|
39 |
+
],
|
40 |
+
"image_processor_type": "LlavaNextImageProcessor",
|
41 |
+
"image_std": [
|
42 |
+
0.26862954,
|
43 |
+
0.26130258,
|
44 |
+
0.27577711
|
45 |
+
],
|
46 |
+
"processor_class": "LlavaNextProcessor",
|
47 |
+
"resample": 3,
|
48 |
+
"rescale_factor": 0.00392156862745098,
|
49 |
+
"size": {
|
50 |
+
"shortest_edge": 336
|
51 |
+
}
|
52 |
+
}
|
slice_48832/processor_config.json
ADDED
@@ -0,0 +1,7 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"image_token": "<image>",
|
3 |
+
"num_additional_image_tokens": 1,
|
4 |
+
"patch_size": 14,
|
5 |
+
"processor_class": "LlavaNextProcessor",
|
6 |
+
"vision_feature_select_strategy": "default"
|
7 |
+
}
|
slice_48832/pytorch_model.bin
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:0c7bc2f53054ae607fc6ed062ed63edc9549dcd56a12277d6d5a2f60cf98e8c8
|
3 |
+
size 15133733934
|
slice_48832/special_tokens_map.json
ADDED
@@ -0,0 +1,31 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"bos_token": {
|
3 |
+
"content": "<s>",
|
4 |
+
"lstrip": false,
|
5 |
+
"normalized": false,
|
6 |
+
"rstrip": false,
|
7 |
+
"single_word": false
|
8 |
+
},
|
9 |
+
"eos_token": {
|
10 |
+
"content": "</s>",
|
11 |
+
"lstrip": false,
|
12 |
+
"normalized": false,
|
13 |
+
"rstrip": false,
|
14 |
+
"single_word": false
|
15 |
+
},
|
16 |
+
"image_token": "<image>",
|
17 |
+
"pad_token": {
|
18 |
+
"content": "<pad>",
|
19 |
+
"lstrip": false,
|
20 |
+
"normalized": false,
|
21 |
+
"rstrip": false,
|
22 |
+
"single_word": false
|
23 |
+
},
|
24 |
+
"unk_token": {
|
25 |
+
"content": "<unk>",
|
26 |
+
"lstrip": false,
|
27 |
+
"normalized": false,
|
28 |
+
"rstrip": false,
|
29 |
+
"single_word": false
|
30 |
+
}
|
31 |
+
}
|
slice_48832/tokenizer.json
ADDED
The diff for this file is too large to render.
See raw diff
|
|
slice_48832/tokenizer.model
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:dadfd56d766715c61d2ef780a525ab43b8e6da4de6865bda3d95fdef5e134055
|
3 |
+
size 493443
|
slice_48832/tokenizer_config.json
ADDED
@@ -0,0 +1,69 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"add_bos_token": true,
|
3 |
+
"add_eos_token": false,
|
4 |
+
"add_prefix_space": null,
|
5 |
+
"added_tokens_decoder": {
|
6 |
+
"0": {
|
7 |
+
"content": "<unk>",
|
8 |
+
"lstrip": false,
|
9 |
+
"normalized": false,
|
10 |
+
"rstrip": false,
|
11 |
+
"single_word": false,
|
12 |
+
"special": true
|
13 |
+
},
|
14 |
+
"1": {
|
15 |
+
"content": "<s>",
|
16 |
+
"lstrip": false,
|
17 |
+
"normalized": false,
|
18 |
+
"rstrip": false,
|
19 |
+
"single_word": false,
|
20 |
+
"special": true
|
21 |
+
},
|
22 |
+
"2": {
|
23 |
+
"content": "</s>",
|
24 |
+
"lstrip": false,
|
25 |
+
"normalized": false,
|
26 |
+
"rstrip": false,
|
27 |
+
"single_word": false,
|
28 |
+
"special": true
|
29 |
+
},
|
30 |
+
"32000": {
|
31 |
+
"content": "<image>",
|
32 |
+
"lstrip": false,
|
33 |
+
"normalized": false,
|
34 |
+
"rstrip": false,
|
35 |
+
"single_word": false,
|
36 |
+
"special": true
|
37 |
+
},
|
38 |
+
"32001": {
|
39 |
+
"content": "<pad>",
|
40 |
+
"lstrip": false,
|
41 |
+
"normalized": false,
|
42 |
+
"rstrip": false,
|
43 |
+
"single_word": false,
|
44 |
+
"special": true
|
45 |
+
}
|
46 |
+
},
|
47 |
+
"additional_special_tokens": [],
|
48 |
+
"bos_token": "<s>",
|
49 |
+
"chat_template": "{{ bos_token }}{% for message in messages %}{% if (message['role'] == 'user') != (loop.index0 % 2 == 0) %}{{ raise_exception('Conversation roles must alternate user/assistant/user/assistant/...') }}{% endif %}{% if message['role'] == 'user' %}{{ '[INST] ' + message['content'] + ' [/INST]' }}{% elif message['role'] == 'assistant' %}{{ message['content'] + eos_token}}{% else %}{{ raise_exception('Only user and assistant roles are supported!') }}{% endif %}{% endfor %}",
|
50 |
+
"clean_up_tokenization_spaces": false,
|
51 |
+
"eos_token": "</s>",
|
52 |
+
"extra_special_tokens": {
|
53 |
+
"image_token": "<image>"
|
54 |
+
},
|
55 |
+
"image_token": "<image>",
|
56 |
+
"legacy": true,
|
57 |
+
"max_length": null,
|
58 |
+
"model_max_length": 4096,
|
59 |
+
"pad_to_multiple_of": null,
|
60 |
+
"pad_token": "<pad>",
|
61 |
+
"pad_token_type_id": 0,
|
62 |
+
"padding_side": "right",
|
63 |
+
"processor_class": "LlavaNextProcessor",
|
64 |
+
"sp_model_kwargs": {},
|
65 |
+
"spaces_between_special_tokens": false,
|
66 |
+
"tokenizer_class": "LlamaTokenizer",
|
67 |
+
"unk_token": "<unk>",
|
68 |
+
"use_default_system_prompt": false
|
69 |
+
}
|
slice_61040/added_tokens.json
ADDED
@@ -0,0 +1,4 @@
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"<image>": 32000,
|
3 |
+
"<pad>": 32001
|
4 |
+
}
|
slice_61040/chat_template.json
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"chat_template": "{% for message in messages %}{% if message['role'] != 'system' %}{{ message['role'].upper() + ': '}}{% endif %}{# Render all images first #}{% for content in message['content'] | selectattr('type', 'equalto', 'image') %}{{ '<image>\n' }}{% endfor %}{# Render all text next #}{% if message['role'] != 'assistant' %}{% for content in message['content'] | selectattr('type', 'equalto', 'text') %}{{ content['text'] + ' '}}{% endfor %}{% else %}{% for content in message['content'] | selectattr('type', 'equalto', 'text') %}{% generation %}{{ content['text'] + ' '}}{% endgeneration %}{% endfor %}{% endif %}{% endfor %}{% if add_generation_prompt %}{{ 'ASSISTANT:' }}{% endif %}"
|
3 |
+
}
|
slice_61040/config.json
ADDED
@@ -0,0 +1,85 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"_attn_implementation_autoset": true,
|
3 |
+
"architectures": [
|
4 |
+
"LlavaNextForConditionalGeneration"
|
5 |
+
],
|
6 |
+
"bos_token_id": 1,
|
7 |
+
"eos_token_id": 2,
|
8 |
+
"ignore_index": -100,
|
9 |
+
"image_grid_pinpoints": [
|
10 |
+
[
|
11 |
+
336,
|
12 |
+
672
|
13 |
+
],
|
14 |
+
[
|
15 |
+
672,
|
16 |
+
336
|
17 |
+
],
|
18 |
+
[
|
19 |
+
672,
|
20 |
+
672
|
21 |
+
],
|
22 |
+
[
|
23 |
+
1008,
|
24 |
+
336
|
25 |
+
],
|
26 |
+
[
|
27 |
+
336,
|
28 |
+
1008
|
29 |
+
]
|
30 |
+
],
|
31 |
+
"image_seq_length": 576,
|
32 |
+
"image_token_index": 32000,
|
33 |
+
"model_type": "llava_next",
|
34 |
+
"multimodal_projector_bias": true,
|
35 |
+
"pad_token_id": 32001,
|
36 |
+
"projector_hidden_act": "gelu",
|
37 |
+
"text_config": {
|
38 |
+
"_name_or_path": "mistralai/Mistral-7B-Instruct-v0.2",
|
39 |
+
"architectures": [
|
40 |
+
"MistralForCausalLM"
|
41 |
+
],
|
42 |
+
"attention_dropout": 0.0,
|
43 |
+
"head_dim": 128,
|
44 |
+
"hidden_act": "silu",
|
45 |
+
"hidden_size": 4096,
|
46 |
+
"initializer_range": 0.02,
|
47 |
+
"intermediate_size": 14336,
|
48 |
+
"max_position_embeddings": 32768,
|
49 |
+
"model_type": "mistral",
|
50 |
+
"num_attention_heads": 32,
|
51 |
+
"num_hidden_layers": 32,
|
52 |
+
"num_key_value_heads": 8,
|
53 |
+
"rms_norm_eps": 1e-05,
|
54 |
+
"rope_theta": 1000000.0,
|
55 |
+
"sliding_window": null,
|
56 |
+
"torch_dtype": "bfloat16",
|
57 |
+
"use_cache": true,
|
58 |
+
"vocab_size": 32064
|
59 |
+
},
|
60 |
+
"tie_word_embeddings": false,
|
61 |
+
"torch_dtype": "bfloat16",
|
62 |
+
"transformers_version": "4.50.0",
|
63 |
+
"use_image_newline_parameter": true,
|
64 |
+
"vision_config": {
|
65 |
+
"attention_dropout": 0.0,
|
66 |
+
"hidden_act": "quick_gelu",
|
67 |
+
"hidden_size": 1024,
|
68 |
+
"image_size": 336,
|
69 |
+
"initializer_factor": 1.0,
|
70 |
+
"initializer_range": 0.02,
|
71 |
+
"intermediate_size": 4096,
|
72 |
+
"layer_norm_eps": 1e-05,
|
73 |
+
"model_type": "clip_vision_model",
|
74 |
+
"num_attention_heads": 16,
|
75 |
+
"num_channels": 3,
|
76 |
+
"num_hidden_layers": 24,
|
77 |
+
"patch_size": 14,
|
78 |
+
"projection_dim": 768,
|
79 |
+
"torch_dtype": "bfloat16",
|
80 |
+
"vocab_size": 32000
|
81 |
+
},
|
82 |
+
"vision_feature_layer": -2,
|
83 |
+
"vision_feature_select_strategy": "default",
|
84 |
+
"vocab_size": 32064
|
85 |
+
}
|
slice_61040/preprocessor_config.json
ADDED
@@ -0,0 +1,52 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"aspect_ratio_setting": "anyres",
|
3 |
+
"crop_size": {
|
4 |
+
"height": 336,
|
5 |
+
"width": 336
|
6 |
+
},
|
7 |
+
"do_center_crop": true,
|
8 |
+
"do_convert_rgb": true,
|
9 |
+
"do_normalize": true,
|
10 |
+
"do_pad": true,
|
11 |
+
"do_rescale": true,
|
12 |
+
"do_resize": true,
|
13 |
+
"image_grid_pinpoints": [
|
14 |
+
[
|
15 |
+
336,
|
16 |
+
672
|
17 |
+
],
|
18 |
+
[
|
19 |
+
672,
|
20 |
+
336
|
21 |
+
],
|
22 |
+
[
|
23 |
+
672,
|
24 |
+
672
|
25 |
+
],
|
26 |
+
[
|
27 |
+
1008,
|
28 |
+
336
|
29 |
+
],
|
30 |
+
[
|
31 |
+
336,
|
32 |
+
1008
|
33 |
+
]
|
34 |
+
],
|
35 |
+
"image_mean": [
|
36 |
+
0.48145466,
|
37 |
+
0.4578275,
|
38 |
+
0.40821073
|
39 |
+
],
|
40 |
+
"image_processor_type": "LlavaNextImageProcessor",
|
41 |
+
"image_std": [
|
42 |
+
0.26862954,
|
43 |
+
0.26130258,
|
44 |
+
0.27577711
|
45 |
+
],
|
46 |
+
"processor_class": "LlavaNextProcessor",
|
47 |
+
"resample": 3,
|
48 |
+
"rescale_factor": 0.00392156862745098,
|
49 |
+
"size": {
|
50 |
+
"shortest_edge": 336
|
51 |
+
}
|
52 |
+
}
|
slice_61040/processor_config.json
ADDED
@@ -0,0 +1,7 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"image_token": "<image>",
|
3 |
+
"num_additional_image_tokens": 1,
|
4 |
+
"patch_size": 14,
|
5 |
+
"processor_class": "LlavaNextProcessor",
|
6 |
+
"vision_feature_select_strategy": "default"
|
7 |
+
}
|
slice_61040/pytorch_model.bin
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:9595744c46c595c10945c3a98cdbbaec4d93aee2426da2e70482287143c244fb
|
3 |
+
size 15133733934
|