Compare commits
1 Commits
1204bace2c
...
vim-colors
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
4ff86c280d |
@@ -4,15 +4,13 @@
|
|||||||
# environment variables for aichat can be set at ~/.config/aichat/.env
|
# environment variables for aichat can be set at ~/.config/aichat/.env
|
||||||
|
|
||||||
#model: claude:claude-sonnet-4-5-20250929
|
#model: claude:claude-sonnet-4-5-20250929
|
||||||
#model: gemini:gemini-3-flash-preview
|
model: gemini:gemini-3-flash-preview
|
||||||
keybindings: vi
|
keybindings: vi
|
||||||
repl_prelude: role:cloudops
|
repl_prelude: role:cloudops
|
||||||
clients:
|
clients:
|
||||||
- type: bedrock
|
|
||||||
name: aws
|
|
||||||
- type: claude
|
- type: claude
|
||||||
- type: gemini
|
- type: gemini
|
||||||
#- type: openai
|
- type: openai
|
||||||
#- type: openai-compatible
|
- type: openai-compatible
|
||||||
# name: moonshot
|
name: moonshot
|
||||||
# api_base: https://api.moonshot.ai/v1
|
api_base: https://api.moonshot.ai/v1
|
||||||
|
|||||||
@@ -2,11 +2,19 @@ version: 0.30.0
|
|||||||
list:
|
list:
|
||||||
- provider: openai
|
- provider: openai
|
||||||
models:
|
models:
|
||||||
- name: gpt-5.2
|
- name: gpt-5.1
|
||||||
type: chat
|
type: chat
|
||||||
max_input_tokens: 400000
|
max_input_tokens: 400000
|
||||||
input_price: 1.75
|
input_price: 1.25
|
||||||
output_price: 14.0
|
output_price: 10.0
|
||||||
|
max_output_tokens: 128000
|
||||||
|
supports_vision: true
|
||||||
|
supports_function_calling: true
|
||||||
|
- name: gpt-5.1-chat-latest
|
||||||
|
type: chat
|
||||||
|
max_input_tokens: 400000
|
||||||
|
input_price: 1.25
|
||||||
|
output_price: 10.0
|
||||||
max_output_tokens: 128000
|
max_output_tokens: 128000
|
||||||
supports_vision: true
|
supports_vision: true
|
||||||
supports_function_calling: true
|
supports_function_calling: true
|
||||||
@@ -18,6 +26,14 @@ list:
|
|||||||
max_output_tokens: 128000
|
max_output_tokens: 128000
|
||||||
supports_vision: true
|
supports_vision: true
|
||||||
supports_function_calling: true
|
supports_function_calling: true
|
||||||
|
- name: gpt-5-chat-latest
|
||||||
|
type: chat
|
||||||
|
max_input_tokens: 400000
|
||||||
|
input_price: 1.25
|
||||||
|
output_price: 10.0
|
||||||
|
max_output_tokens: 128000
|
||||||
|
supports_vision: true
|
||||||
|
supports_function_calling: true
|
||||||
- name: gpt-5-mini
|
- name: gpt-5-mini
|
||||||
type: chat
|
type: chat
|
||||||
max_input_tokens: 400000
|
max_input_tokens: 400000
|
||||||
@@ -42,6 +58,22 @@ list:
|
|||||||
max_output_tokens: 32768
|
max_output_tokens: 32768
|
||||||
supports_vision: true
|
supports_vision: true
|
||||||
supports_function_calling: true
|
supports_function_calling: true
|
||||||
|
- name: gpt-4.1-mini
|
||||||
|
type: chat
|
||||||
|
max_input_tokens: 1047576
|
||||||
|
input_price: 0.4
|
||||||
|
output_price: 1.6
|
||||||
|
max_output_tokens: 32768
|
||||||
|
supports_vision: true
|
||||||
|
supports_function_calling: true
|
||||||
|
- name: gpt-4.1-nano
|
||||||
|
type: chat
|
||||||
|
max_input_tokens: 1047576
|
||||||
|
input_price: 0.1
|
||||||
|
output_price: 0.4
|
||||||
|
max_output_tokens: 32768
|
||||||
|
supports_vision: true
|
||||||
|
supports_function_calling: true
|
||||||
- name: gpt-4o
|
- name: gpt-4o
|
||||||
type: chat
|
type: chat
|
||||||
max_input_tokens: 128000
|
max_input_tokens: 128000
|
||||||
@@ -50,6 +82,98 @@ list:
|
|||||||
max_output_tokens: 16384
|
max_output_tokens: 16384
|
||||||
supports_vision: true
|
supports_vision: true
|
||||||
supports_function_calling: true
|
supports_function_calling: true
|
||||||
|
- name: gpt-4o-mini
|
||||||
|
type: chat
|
||||||
|
max_input_tokens: 128000
|
||||||
|
input_price: 0.15
|
||||||
|
output_price: 0.6
|
||||||
|
max_output_tokens: 16384
|
||||||
|
supports_vision: true
|
||||||
|
supports_function_calling: true
|
||||||
|
- name: o4-mini
|
||||||
|
type: chat
|
||||||
|
max_input_tokens: 200000
|
||||||
|
input_price: 1.1
|
||||||
|
output_price: 4.4
|
||||||
|
patch:
|
||||||
|
body:
|
||||||
|
max_tokens: null
|
||||||
|
temperature: null
|
||||||
|
top_p: null
|
||||||
|
supports_vision: true
|
||||||
|
supports_function_calling: true
|
||||||
|
system_prompt_prefix: Formatting re-enabled
|
||||||
|
- name: o4-mini-high
|
||||||
|
type: chat
|
||||||
|
real_name: o4-mini
|
||||||
|
max_input_tokens: 200000
|
||||||
|
input_price: 1.1
|
||||||
|
output_price: 4.4
|
||||||
|
patch:
|
||||||
|
body:
|
||||||
|
reasoning_effort: high
|
||||||
|
max_tokens: null
|
||||||
|
temperature: null
|
||||||
|
top_p: null
|
||||||
|
supports_vision: true
|
||||||
|
supports_function_calling: true
|
||||||
|
system_prompt_prefix: Formatting re-enabled
|
||||||
|
- name: o3
|
||||||
|
type: chat
|
||||||
|
max_input_tokens: 200000
|
||||||
|
input_price: 2.0
|
||||||
|
output_price: 8.0
|
||||||
|
patch:
|
||||||
|
body:
|
||||||
|
max_tokens: null
|
||||||
|
temperature: null
|
||||||
|
top_p: null
|
||||||
|
supports_vision: true
|
||||||
|
supports_function_calling: true
|
||||||
|
system_prompt_prefix: Formatting re-enabled
|
||||||
|
- name: o3-high
|
||||||
|
type: chat
|
||||||
|
real_name: o3
|
||||||
|
max_input_tokens: 200000
|
||||||
|
input_price: 2.0
|
||||||
|
output_price: 8.0
|
||||||
|
patch:
|
||||||
|
body:
|
||||||
|
reasoning_effort: high
|
||||||
|
max_tokens: null
|
||||||
|
temperature: null
|
||||||
|
top_p: null
|
||||||
|
supports_vision: true
|
||||||
|
supports_function_calling: true
|
||||||
|
system_prompt_prefix: Formatting re-enabled
|
||||||
|
- name: o3-mini
|
||||||
|
type: chat
|
||||||
|
max_input_tokens: 200000
|
||||||
|
input_price: 1.1
|
||||||
|
output_price: 4.4
|
||||||
|
patch:
|
||||||
|
body:
|
||||||
|
max_tokens: null
|
||||||
|
temperature: null
|
||||||
|
top_p: null
|
||||||
|
supports_vision: true
|
||||||
|
supports_function_calling: true
|
||||||
|
system_prompt_prefix: Formatting re-enabled
|
||||||
|
- name: o3-mini-high
|
||||||
|
type: chat
|
||||||
|
real_name: o3-mini
|
||||||
|
max_input_tokens: 200000
|
||||||
|
input_price: 1.1
|
||||||
|
output_price: 4.4
|
||||||
|
patch:
|
||||||
|
body:
|
||||||
|
reasoning_effort: high
|
||||||
|
max_tokens: null
|
||||||
|
temperature: null
|
||||||
|
top_p: null
|
||||||
|
supports_vision: true
|
||||||
|
supports_function_calling: true
|
||||||
|
system_prompt_prefix: Formatting re-enabled
|
||||||
- name: gpt-4-turbo
|
- name: gpt-4-turbo
|
||||||
type: chat
|
type: chat
|
||||||
max_input_tokens: 128000
|
max_input_tokens: 128000
|
||||||
@@ -79,6 +203,22 @@ list:
|
|||||||
max_batch_size: 100
|
max_batch_size: 100
|
||||||
- provider: gemini
|
- provider: gemini
|
||||||
models:
|
models:
|
||||||
|
- name: gemini-3-flash-preview
|
||||||
|
type: chat
|
||||||
|
max_input_tokens: 1048576
|
||||||
|
input_price: 0.0
|
||||||
|
output_price: 0.0
|
||||||
|
max_output_tokens: 65536
|
||||||
|
supports_vision: true
|
||||||
|
supports_function_calling: true
|
||||||
|
- name: gemini-3-pro-preview
|
||||||
|
type: chat
|
||||||
|
max_input_tokens: 1048576
|
||||||
|
input_price: 0.0
|
||||||
|
output_price: 0.0
|
||||||
|
max_output_tokens: 65536
|
||||||
|
supports_vision: true
|
||||||
|
supports_function_calling: true
|
||||||
- name: gemini-2.5-flash
|
- name: gemini-2.5-flash
|
||||||
type: chat
|
type: chat
|
||||||
max_input_tokens: 1048576
|
max_input_tokens: 1048576
|
||||||
@@ -103,16 +243,6 @@ list:
|
|||||||
max_output_tokens: 64000
|
max_output_tokens: 64000
|
||||||
supports_vision: true
|
supports_vision: true
|
||||||
supports_function_calling: true
|
supports_function_calling: true
|
||||||
- name: gemini-3-pro-preview
|
|
||||||
type: chat
|
|
||||||
max_input_tokens: 1048576
|
|
||||||
supports_vision: true
|
|
||||||
supports_function_calling: true
|
|
||||||
- name: gemini-3-flash-preview
|
|
||||||
type: chat
|
|
||||||
max_input_tokens: 1048576
|
|
||||||
supports_vision: true
|
|
||||||
supports_function_calling: true
|
|
||||||
- name: gemini-2.0-flash
|
- name: gemini-2.0-flash
|
||||||
type: chat
|
type: chat
|
||||||
max_input_tokens: 1048576
|
max_input_tokens: 1048576
|
||||||
@@ -309,13 +439,6 @@ list:
|
|||||||
supports_function_calling: true
|
supports_function_calling: true
|
||||||
- provider: mistral
|
- provider: mistral
|
||||||
models:
|
models:
|
||||||
- name: mistral-large-latest
|
|
||||||
type: chat
|
|
||||||
input_price: 0.5
|
|
||||||
output_price: 1.5
|
|
||||||
max_output_tokens: 262144
|
|
||||||
supports_vision: true
|
|
||||||
supports_function_calling: true
|
|
||||||
- name: mistral-medium-latest
|
- name: mistral-medium-latest
|
||||||
type: chat
|
type: chat
|
||||||
max_input_tokens: 131072
|
max_input_tokens: 131072
|
||||||
@@ -332,38 +455,32 @@ list:
|
|||||||
supports_function_calling: true
|
supports_function_calling: true
|
||||||
- name: magistral-medium-latest
|
- name: magistral-medium-latest
|
||||||
type: chat
|
type: chat
|
||||||
max_input_tokens: 131072
|
max_input_tokens: 40960
|
||||||
input_price: 2.0
|
input_price: 2.0
|
||||||
output_price: 5.0
|
output_price: 5.0
|
||||||
- name: magistral-small-latest
|
- name: magistral-small-latest
|
||||||
type: chat
|
type: chat
|
||||||
max_input_tokens: 131072
|
max_input_tokens: 40960
|
||||||
input_price: 0.5
|
input_price: 0.5
|
||||||
output_price: 1.5
|
output_price: 1.5
|
||||||
- name: devstral-medium-latest
|
- name: devstral-medium-latest
|
||||||
type: chat
|
type: chat
|
||||||
max_input_tokens: 262144
|
max_input_tokens: 256000
|
||||||
input_price: 0.4
|
input_price: 0.4
|
||||||
output_price: 2.0
|
output_price: 2.0
|
||||||
supports_function_calling: true
|
supports_function_calling: true
|
||||||
- name: devstral-small-latest
|
- name: devstral-small-latest
|
||||||
type: chat
|
type: chat
|
||||||
max_input_tokens: 262144
|
max_input_tokens: 256000
|
||||||
input_price: 0.1
|
input_price: 0.1
|
||||||
output_price: 0.3
|
output_price: 0.3
|
||||||
supports_function_calling: true
|
supports_function_calling: true
|
||||||
- name: codestral-latest
|
- name: codestral-latest
|
||||||
type: chat
|
type: chat
|
||||||
max_input_tokens: 262144
|
max_input_tokens: 256000
|
||||||
input_price: 0.3
|
input_price: 0.3
|
||||||
output_price: 0.9
|
output_price: 0.9
|
||||||
supports_function_calling: true
|
supports_function_calling: true
|
||||||
- name: ministral-14b-latest
|
|
||||||
type: chat
|
|
||||||
max_input_tokens: 262144
|
|
||||||
input_price: 0.2
|
|
||||||
output_price: 0.2
|
|
||||||
supports_function_calling: true
|
|
||||||
- name: mistral-embed
|
- name: mistral-embed
|
||||||
type: embedding
|
type: embedding
|
||||||
max_input_tokens: 8092
|
max_input_tokens: 8092
|
||||||
@@ -441,24 +558,42 @@ list:
|
|||||||
max_input_tokens: 4096
|
max_input_tokens: 4096
|
||||||
- provider: xai
|
- provider: xai
|
||||||
models:
|
models:
|
||||||
- name: grok-4-1-fast-non-reasoning
|
- name: grok-4
|
||||||
|
type: chat
|
||||||
|
max_input_tokens: 256000
|
||||||
|
input_price: 3.0
|
||||||
|
output_price: 15.0
|
||||||
|
supports_function_calling: true
|
||||||
|
- name: grok-4-fast-non-reasoning
|
||||||
type: chat
|
type: chat
|
||||||
max_input_tokens: 2000000
|
max_input_tokens: 2000000
|
||||||
input_price: 0.2
|
input_price: 0.2
|
||||||
output_price: 0.5
|
output_price: 0.5
|
||||||
supports_function_calling: true
|
supports_function_calling: true
|
||||||
- name: grok-4-1-fast-reasoning
|
- name: grok-4-fast-reasoning
|
||||||
type: chat
|
type: chat
|
||||||
max_input_tokens: 2000000
|
max_input_tokens: 2000000
|
||||||
input_price: 0.2
|
input_price: 0.2
|
||||||
output_price: 0.5
|
output_price: 0.5
|
||||||
supports_function_calling: true
|
supports_function_calling: true
|
||||||
- name: grok-code-fast-1
|
- name: grok-code-fast
|
||||||
type: chat
|
type: chat
|
||||||
max_input_tokens: 256000
|
max_input_tokens: 256000
|
||||||
input_price: 0.2
|
input_price: 0.2
|
||||||
output_price: 1.5
|
output_price: 1.5
|
||||||
supports_function_calling: true
|
supports_function_calling: true
|
||||||
|
- name: grok-3
|
||||||
|
type: chat
|
||||||
|
max_input_tokens: 131072
|
||||||
|
input_price: 3.0
|
||||||
|
output_price: 15.0
|
||||||
|
supports_function_calling: true
|
||||||
|
- name: grok-3-mini
|
||||||
|
type: chat
|
||||||
|
max_input_tokens: 131072
|
||||||
|
input_price: 0.3
|
||||||
|
output_price: 0.5
|
||||||
|
supports_function_calling: true
|
||||||
- provider: perplexity
|
- provider: perplexity
|
||||||
models:
|
models:
|
||||||
- name: sonar-pro
|
- name: sonar-pro
|
||||||
@@ -476,6 +611,11 @@ list:
|
|||||||
max_input_tokens: 128000
|
max_input_tokens: 128000
|
||||||
input_price: 2.0
|
input_price: 2.0
|
||||||
output_price: 8.0
|
output_price: 8.0
|
||||||
|
- name: sonar-reasoning
|
||||||
|
type: chat
|
||||||
|
max_input_tokens: 128000
|
||||||
|
input_price: 1.0
|
||||||
|
output_price: 5.0
|
||||||
- name: sonar-deep-research
|
- name: sonar-deep-research
|
||||||
type: chat
|
type: chat
|
||||||
max_input_tokens: 128000
|
max_input_tokens: 128000
|
||||||
@@ -562,16 +702,6 @@ list:
|
|||||||
max_output_tokens: 65536
|
max_output_tokens: 65536
|
||||||
supports_vision: true
|
supports_vision: true
|
||||||
supports_function_calling: true
|
supports_function_calling: true
|
||||||
- name: gemini-3-pro-preview
|
|
||||||
type: chat
|
|
||||||
max_input_tokens: 1048576
|
|
||||||
supports_vision: true
|
|
||||||
supports_function_calling: true
|
|
||||||
- name: gemini-3-flash-preview
|
|
||||||
type: chat
|
|
||||||
max_input_tokens: 1048576
|
|
||||||
supports_vision: true
|
|
||||||
supports_function_calling: true
|
|
||||||
- name: gemini-2.0-flash-001
|
- name: gemini-2.0-flash-001
|
||||||
type: chat
|
type: chat
|
||||||
max_input_tokens: 1048576
|
max_input_tokens: 1048576
|
||||||
@@ -747,6 +877,18 @@ list:
|
|||||||
require_max_tokens: true
|
require_max_tokens: true
|
||||||
supports_vision: true
|
supports_vision: true
|
||||||
supports_function_calling: true
|
supports_function_calling: true
|
||||||
|
- name: mistral-small-2503
|
||||||
|
type: chat
|
||||||
|
max_input_tokens: 32000
|
||||||
|
input_price: 0.1
|
||||||
|
output_price: 0.3
|
||||||
|
supports_function_calling: true
|
||||||
|
- name: codestral-2501
|
||||||
|
type: chat
|
||||||
|
max_input_tokens: 256000
|
||||||
|
input_price: 0.3
|
||||||
|
output_price: 0.9
|
||||||
|
supports_function_calling: true
|
||||||
- name: text-embedding-005
|
- name: text-embedding-005
|
||||||
type: embedding
|
type: embedding
|
||||||
max_input_tokens: 20000
|
max_input_tokens: 20000
|
||||||
@@ -1194,23 +1336,32 @@ list:
|
|||||||
max_batch_size: 10
|
max_batch_size: 10
|
||||||
- provider: hunyuan
|
- provider: hunyuan
|
||||||
models:
|
models:
|
||||||
- name: hunyuan-2.0-instruct-20251111
|
- name: hunyuan-turbos-latest
|
||||||
type: chat
|
type: chat
|
||||||
max_input_tokens: 131072
|
max_input_tokens: 28000
|
||||||
input_price: 0.112
|
input_price: 0.112
|
||||||
output_price: 0.28
|
output_price: 0.28
|
||||||
supports_function_calling: true
|
supports_function_calling: true
|
||||||
- name: hunyuan-2.0-thinking-20251109
|
- name: hunyuan-t1-latest
|
||||||
type: chat
|
type: chat
|
||||||
max_input_tokens: 131072
|
max_input_tokens: 28000
|
||||||
input_price: 0.14
|
input_price: 0.14
|
||||||
output_price: 0.56
|
output_price: 0.56
|
||||||
supports_function_calling: true
|
- name: hunyuan-lite
|
||||||
- name: hunyuan-vision-1.5-instruct
|
|
||||||
type: chat
|
type: chat
|
||||||
max_input_tokens: 24576
|
max_input_tokens: 250000
|
||||||
|
input_price: 0.0
|
||||||
|
output_price: 0.0
|
||||||
|
supports_function_calling: true
|
||||||
|
- name: hunyuan-turbos-vision
|
||||||
|
type: chat
|
||||||
|
max_input_tokens: 6144
|
||||||
input_price: 0.42
|
input_price: 0.42
|
||||||
output_price: 1.26
|
output_price: 0.84
|
||||||
|
supports_vision: true
|
||||||
|
- name: hunyuan-t1-vision
|
||||||
|
type: chat
|
||||||
|
max_input_tokens: 24000
|
||||||
supports_vision: true
|
supports_vision: true
|
||||||
- name: hunyuan-embedding
|
- name: hunyuan-embedding
|
||||||
type: embedding
|
type: embedding
|
||||||
@@ -1263,29 +1414,65 @@ list:
|
|||||||
max_output_tokens: 32768
|
max_output_tokens: 32768
|
||||||
- provider: zhipuai
|
- provider: zhipuai
|
||||||
models:
|
models:
|
||||||
- name: glm-4.7
|
- name: glm-4.6
|
||||||
type: chat
|
type: chat
|
||||||
max_input_tokens: 202752
|
max_input_tokens: 202752
|
||||||
|
input_price: 0.28
|
||||||
|
output_price: 1.12
|
||||||
|
supports_function_calling: true
|
||||||
|
- name: glm-4.5
|
||||||
|
type: chat
|
||||||
|
max_input_tokens: 131072
|
||||||
|
input_price: 0.28
|
||||||
|
output_price: 1.12
|
||||||
|
- name: glm-4.5-x
|
||||||
|
type: chat
|
||||||
|
max_input_tokens: 131072
|
||||||
|
input_price: 1.12
|
||||||
|
output_price: 4.48
|
||||||
|
supports_function_calling: true
|
||||||
|
- name: glm-4.5-air
|
||||||
|
type: chat
|
||||||
|
max_input_tokens: 131072
|
||||||
|
input_price: 0.084
|
||||||
|
output_price: 0.56
|
||||||
|
- name: glm-4.5-airx
|
||||||
|
type: chat
|
||||||
|
max_input_tokens: 131072
|
||||||
input_price: 0.56
|
input_price: 0.56
|
||||||
output_price: 2.24
|
output_price: 2.24
|
||||||
supports_function_calling: true
|
supports_function_calling: true
|
||||||
- name: glm-4.7:instruct
|
- name: glm-4.5-flash
|
||||||
type: chat
|
type: chat
|
||||||
real_name: glm-4.7
|
max_input_tokens: 131072
|
||||||
max_input_tokens: 202752
|
input_price: 0.0
|
||||||
input_price: 0.56
|
output_price: 0.0
|
||||||
output_price: 2.24
|
- name: glm-4.5v
|
||||||
patch:
|
|
||||||
body:
|
|
||||||
thinking:
|
|
||||||
type: disabled
|
|
||||||
supports_function_calling: true
|
|
||||||
- name: glm-4.6v
|
|
||||||
type: chat
|
type: chat
|
||||||
max_input_tokens: 65536
|
max_input_tokens: 65536
|
||||||
input_price: 0.28
|
input_price: 0.56
|
||||||
output_price: 0.84
|
output_price: 1.68
|
||||||
supports_vision: true
|
supports_vision: true
|
||||||
|
- name: glm-z1-air
|
||||||
|
type: chat
|
||||||
|
max_input_tokens: 131072
|
||||||
|
input_price: 0.07
|
||||||
|
output_price: 0.07
|
||||||
|
- name: glm-z1-airx
|
||||||
|
type: chat
|
||||||
|
max_input_tokens: 131072
|
||||||
|
input_price: 0.7
|
||||||
|
output_price: 0.7
|
||||||
|
- name: glm-z1-flashx
|
||||||
|
type: chat
|
||||||
|
max_input_tokens: 131072
|
||||||
|
input_price: 0.014
|
||||||
|
output_price: 0.014
|
||||||
|
- name: glm-z1-flash
|
||||||
|
type: chat
|
||||||
|
max_input_tokens: 131072
|
||||||
|
input_price: 0.0
|
||||||
|
output_price: 0.0
|
||||||
- name: embedding-3
|
- name: embedding-3
|
||||||
type: embedding
|
type: embedding
|
||||||
max_input_tokens: 8192
|
max_input_tokens: 8192
|
||||||
@@ -1298,25 +1485,27 @@ list:
|
|||||||
input_price: 0.112
|
input_price: 0.112
|
||||||
- provider: minimax
|
- provider: minimax
|
||||||
models:
|
models:
|
||||||
- name: minimax-m2.1
|
- name: minimax-m2
|
||||||
type: chat
|
type: chat
|
||||||
max_input_tokens: 204800
|
max_input_tokens: 204800
|
||||||
input_price: 0.294
|
input_price: 0.294
|
||||||
output_price: 1.176
|
output_price: 1.176
|
||||||
supports_function_calling: true
|
supports_function_calling: true
|
||||||
- name: minimax-m2.1-lightning
|
|
||||||
type: chat
|
|
||||||
max_input_tokens: 204800
|
|
||||||
input_price: 0.294
|
|
||||||
output_price: 2.352
|
|
||||||
supports_function_calling: true
|
|
||||||
- provider: openrouter
|
- provider: openrouter
|
||||||
models:
|
models:
|
||||||
- name: openai/gpt-5.2
|
- name: openai/gpt-5.1
|
||||||
type: chat
|
type: chat
|
||||||
max_input_tokens: 400000
|
max_input_tokens: 400000
|
||||||
input_price: 1.75
|
input_price: 1.25
|
||||||
output_price: 14.0
|
output_price: 10.0
|
||||||
|
max_output_tokens: 128000
|
||||||
|
supports_vision: true
|
||||||
|
supports_function_calling: true
|
||||||
|
- name: openai/gpt-5.1-chat
|
||||||
|
type: chat
|
||||||
|
max_input_tokens: 400000
|
||||||
|
input_price: 1.25
|
||||||
|
output_price: 10.0
|
||||||
max_output_tokens: 128000
|
max_output_tokens: 128000
|
||||||
supports_vision: true
|
supports_vision: true
|
||||||
supports_function_calling: true
|
supports_function_calling: true
|
||||||
@@ -1328,6 +1517,14 @@ list:
|
|||||||
max_output_tokens: 128000
|
max_output_tokens: 128000
|
||||||
supports_vision: true
|
supports_vision: true
|
||||||
supports_function_calling: true
|
supports_function_calling: true
|
||||||
|
- name: openai/gpt-5-chat
|
||||||
|
type: chat
|
||||||
|
max_input_tokens: 400000
|
||||||
|
input_price: 1.25
|
||||||
|
output_price: 10.0
|
||||||
|
max_output_tokens: 128000
|
||||||
|
supports_vision: true
|
||||||
|
supports_function_calling: true
|
||||||
- name: openai/gpt-5-mini
|
- name: openai/gpt-5-mini
|
||||||
type: chat
|
type: chat
|
||||||
max_input_tokens: 400000
|
max_input_tokens: 400000
|
||||||
@@ -1352,6 +1549,22 @@ list:
|
|||||||
max_output_tokens: 32768
|
max_output_tokens: 32768
|
||||||
supports_vision: true
|
supports_vision: true
|
||||||
supports_function_calling: true
|
supports_function_calling: true
|
||||||
|
- name: openai/gpt-4.1-mini
|
||||||
|
type: chat
|
||||||
|
max_input_tokens: 1047576
|
||||||
|
input_price: 0.4
|
||||||
|
output_price: 1.6
|
||||||
|
max_output_tokens: 32768
|
||||||
|
supports_vision: true
|
||||||
|
supports_function_calling: true
|
||||||
|
- name: openai/gpt-4.1-nano
|
||||||
|
type: chat
|
||||||
|
max_input_tokens: 1047576
|
||||||
|
input_price: 0.1
|
||||||
|
output_price: 0.4
|
||||||
|
max_output_tokens: 32768
|
||||||
|
supports_vision: true
|
||||||
|
supports_function_calling: true
|
||||||
- name: openai/gpt-4o
|
- name: openai/gpt-4o
|
||||||
type: chat
|
type: chat
|
||||||
max_input_tokens: 128000
|
max_input_tokens: 128000
|
||||||
@@ -1359,6 +1572,91 @@ list:
|
|||||||
output_price: 10.0
|
output_price: 10.0
|
||||||
supports_vision: true
|
supports_vision: true
|
||||||
supports_function_calling: true
|
supports_function_calling: true
|
||||||
|
- name: openai/gpt-4o-mini
|
||||||
|
type: chat
|
||||||
|
max_input_tokens: 128000
|
||||||
|
input_price: 0.15
|
||||||
|
output_price: 0.6
|
||||||
|
supports_vision: true
|
||||||
|
supports_function_calling: true
|
||||||
|
- name: openai/o4-mini
|
||||||
|
type: chat
|
||||||
|
max_input_tokens: 200000
|
||||||
|
input_price: 1.1
|
||||||
|
output_price: 4.4
|
||||||
|
patch:
|
||||||
|
body:
|
||||||
|
max_tokens: null
|
||||||
|
temperature: null
|
||||||
|
top_p: null
|
||||||
|
supports_vision: true
|
||||||
|
supports_function_calling: true
|
||||||
|
system_prompt_prefix: Formatting re-enabled
|
||||||
|
- name: openai/o4-mini-high
|
||||||
|
type: chat
|
||||||
|
max_input_tokens: 200000
|
||||||
|
input_price: 1.1
|
||||||
|
output_price: 4.4
|
||||||
|
patch:
|
||||||
|
body:
|
||||||
|
reasoning_effort: high
|
||||||
|
max_tokens: null
|
||||||
|
temperature: null
|
||||||
|
top_p: null
|
||||||
|
supports_vision: true
|
||||||
|
supports_function_calling: true
|
||||||
|
system_prompt_prefix: Formatting re-enabled
|
||||||
|
- name: openai/o3
|
||||||
|
type: chat
|
||||||
|
max_input_tokens: 200000
|
||||||
|
input_price: 2.0
|
||||||
|
output_price: 8.0
|
||||||
|
patch:
|
||||||
|
body:
|
||||||
|
max_tokens: null
|
||||||
|
temperature: null
|
||||||
|
top_p: null
|
||||||
|
supports_vision: true
|
||||||
|
supports_function_calling: true
|
||||||
|
system_prompt_prefix: Formatting re-enabled
|
||||||
|
- name: openai/o3-high
|
||||||
|
type: chat
|
||||||
|
real_name: openai/o3
|
||||||
|
max_input_tokens: 200000
|
||||||
|
input_price: 2.0
|
||||||
|
output_price: 8.0
|
||||||
|
patch:
|
||||||
|
body:
|
||||||
|
reasoning_effort: high
|
||||||
|
temperature: null
|
||||||
|
top_p: null
|
||||||
|
supports_vision: true
|
||||||
|
supports_function_calling: true
|
||||||
|
system_prompt_prefix: Formatting re-enabled
|
||||||
|
- name: openai/o3-mini
|
||||||
|
type: chat
|
||||||
|
max_input_tokens: 200000
|
||||||
|
input_price: 1.1
|
||||||
|
output_price: 4.4
|
||||||
|
patch:
|
||||||
|
body:
|
||||||
|
temperature: null
|
||||||
|
top_p: null
|
||||||
|
supports_vision: true
|
||||||
|
supports_function_calling: true
|
||||||
|
system_prompt_prefix: Formatting re-enabled
|
||||||
|
- name: openai/o3-mini-high
|
||||||
|
type: chat
|
||||||
|
max_input_tokens: 200000
|
||||||
|
input_price: 1.1
|
||||||
|
output_price: 4.4
|
||||||
|
patch:
|
||||||
|
body:
|
||||||
|
temperature: null
|
||||||
|
top_p: null
|
||||||
|
supports_vision: true
|
||||||
|
supports_function_calling: true
|
||||||
|
system_prompt_prefix: Formatting re-enabled
|
||||||
- name: openai/gpt-oss-120b
|
- name: openai/gpt-oss-120b
|
||||||
type: chat
|
type: chat
|
||||||
max_input_tokens: 131072
|
max_input_tokens: 131072
|
||||||
@@ -1492,12 +1790,6 @@ list:
|
|||||||
max_input_tokens: 131072
|
max_input_tokens: 131072
|
||||||
input_price: 0.12
|
input_price: 0.12
|
||||||
output_price: 0.3
|
output_price: 0.3
|
||||||
- name: mistralai/mistral-large-2512
|
|
||||||
type: chat
|
|
||||||
max_input_tokens: 262144
|
|
||||||
input_price: 0.5
|
|
||||||
output_price: 1.5
|
|
||||||
supports_function_calling: true
|
|
||||||
- name: mistralai/mistral-medium-3.1
|
- name: mistralai/mistral-medium-3.1
|
||||||
type: chat
|
type: chat
|
||||||
max_input_tokens: 131072
|
max_input_tokens: 131072
|
||||||
@@ -1511,11 +1803,26 @@ list:
|
|||||||
input_price: 0.1
|
input_price: 0.1
|
||||||
output_price: 0.3
|
output_price: 0.3
|
||||||
supports_vision: true
|
supports_vision: true
|
||||||
- name: mistralai/devstral-2512
|
- name: mistralai/magistral-medium-2506
|
||||||
type: chat
|
type: chat
|
||||||
max_input_tokens: 262144
|
max_input_tokens: 40960
|
||||||
|
input_price: 2.0
|
||||||
|
output_price: 5.0
|
||||||
|
- name: mistralai/magistral-medium-2506:thinking
|
||||||
|
type: chat
|
||||||
|
max_input_tokens: 40960
|
||||||
|
input_price: 2.0
|
||||||
|
output_price: 5.0
|
||||||
|
- name: mistralai/magistral-small-2506
|
||||||
|
type: chat
|
||||||
|
max_input_tokens: 40960
|
||||||
input_price: 0.5
|
input_price: 0.5
|
||||||
output_price: 0.22
|
output_price: 1.5
|
||||||
|
- name: mistralai/devstral-medium
|
||||||
|
type: chat
|
||||||
|
max_input_tokens: 131072
|
||||||
|
input_price: 0.4
|
||||||
|
output_price: 2.0
|
||||||
supports_function_calling: true
|
supports_function_calling: true
|
||||||
- name: mistralai/devstral-small
|
- name: mistralai/devstral-small
|
||||||
type: chat
|
type: chat
|
||||||
@@ -1529,12 +1836,6 @@ list:
|
|||||||
input_price: 0.3
|
input_price: 0.3
|
||||||
output_price: 0.9
|
output_price: 0.9
|
||||||
supports_function_calling: true
|
supports_function_calling: true
|
||||||
- name: mistralai/ministral-14b-2512
|
|
||||||
type: chat
|
|
||||||
max_input_tokens: 262144
|
|
||||||
input_price: 0.2
|
|
||||||
output_price: 0.2
|
|
||||||
supports_function_calling: true
|
|
||||||
- name: ai21/jamba-large-1.7
|
- name: ai21/jamba-large-1.7
|
||||||
type: chat
|
type: chat
|
||||||
max_input_tokens: 256000
|
max_input_tokens: 256000
|
||||||
@@ -1559,11 +1860,29 @@ list:
|
|||||||
input_price: 0.0375
|
input_price: 0.0375
|
||||||
output_price: 0.15
|
output_price: 0.15
|
||||||
max_output_tokens: 4096
|
max_output_tokens: 4096
|
||||||
- name: deepseek/deepseek-v3.2
|
- name: deepseek/deepseek-v3.2-exp
|
||||||
type: chat
|
type: chat
|
||||||
max_input_tokens: 163840
|
max_input_tokens: 163840
|
||||||
input_price: 0.25
|
input_price: 0.27
|
||||||
output_price: 0.38
|
output_price: 0.4
|
||||||
|
- name: deepseek/deepseek-v3.1-terminus
|
||||||
|
type: chat
|
||||||
|
max_input_tokens: 163840
|
||||||
|
input_price: 0.23
|
||||||
|
output_price: 0.9
|
||||||
|
- name: deepseek/deepseek-chat-v3.1
|
||||||
|
type: chat
|
||||||
|
max_input_tokens: 163840
|
||||||
|
input_price: 0.2
|
||||||
|
output_price: 0.8
|
||||||
|
- name: deepseek/deepseek-r1-0528
|
||||||
|
type: chat
|
||||||
|
max_input_tokens: 128000
|
||||||
|
input_price: 0.5
|
||||||
|
output_price: 2.15
|
||||||
|
patch:
|
||||||
|
body:
|
||||||
|
include_reasoning: true
|
||||||
- name: qwen/qwen3-max
|
- name: qwen/qwen3-max
|
||||||
type: chat
|
type: chat
|
||||||
max_input_tokens: 262144
|
max_input_tokens: 262144
|
||||||
@@ -1663,7 +1982,13 @@ list:
|
|||||||
input_price: 0.29
|
input_price: 0.29
|
||||||
output_price: 1.15
|
output_price: 1.15
|
||||||
supports_function_calling: true
|
supports_function_calling: true
|
||||||
- name: x-ai/grok-4.1-fast
|
- name: x-ai/grok-4
|
||||||
|
type: chat
|
||||||
|
max_input_tokens: 256000
|
||||||
|
input_price: 3.0
|
||||||
|
output_price: 15.0
|
||||||
|
supports_function_calling: true
|
||||||
|
- name: x-ai/grok-4-fast
|
||||||
type: chat
|
type: chat
|
||||||
max_input_tokens: 2000000
|
max_input_tokens: 2000000
|
||||||
input_price: 0.2
|
input_price: 0.2
|
||||||
@@ -1719,6 +2044,14 @@ list:
|
|||||||
patch:
|
patch:
|
||||||
body:
|
body:
|
||||||
include_reasoning: true
|
include_reasoning: true
|
||||||
|
- name: perplexity/sonar-reasoning
|
||||||
|
type: chat
|
||||||
|
max_input_tokens: 127000
|
||||||
|
input_price: 1.0
|
||||||
|
output_price: 5.0
|
||||||
|
patch:
|
||||||
|
body:
|
||||||
|
include_reasoning: true
|
||||||
- name: perplexity/sonar-deep-research
|
- name: perplexity/sonar-deep-research
|
||||||
type: chat
|
type: chat
|
||||||
max_input_tokens: 200000
|
max_input_tokens: 200000
|
||||||
@@ -1727,24 +2060,17 @@ list:
|
|||||||
patch:
|
patch:
|
||||||
body:
|
body:
|
||||||
include_reasoning: true
|
include_reasoning: true
|
||||||
- name: minimax/minimax-m2.1
|
- name: minimax/minimax-m2
|
||||||
type: chat
|
type: chat
|
||||||
max_input_tokens: 196608
|
max_input_tokens: 196608
|
||||||
input_price: 0.12
|
input_price: 0.15
|
||||||
output_price: 0.48
|
output_price: 0.45
|
||||||
supports_function_calling: true
|
- name: z-ai/glm-4.6
|
||||||
- name: z-ai/glm-4.7
|
|
||||||
type: chat
|
type: chat
|
||||||
max_input_tokens: 202752
|
max_input_tokens: 202752
|
||||||
input_price: 0.16
|
input_price: 0.5
|
||||||
output_price: 0.8
|
output_price: 1.75
|
||||||
supports_function_calling: true
|
supports_function_calling: true
|
||||||
- name: z-ai/glm-4.6v
|
|
||||||
type: chat
|
|
||||||
max_input_tokens: 131072
|
|
||||||
input_price: 0.3
|
|
||||||
output_price: 0.9
|
|
||||||
supports_vision: true
|
|
||||||
- provider: github
|
- provider: github
|
||||||
models:
|
models:
|
||||||
- name: gpt-5
|
- name: gpt-5
|
||||||
@@ -1753,6 +2079,12 @@ list:
|
|||||||
max_output_tokens: 128000
|
max_output_tokens: 128000
|
||||||
supports_vision: true
|
supports_vision: true
|
||||||
supports_function_calling: true
|
supports_function_calling: true
|
||||||
|
- name: gpt-5-chat
|
||||||
|
type: chat
|
||||||
|
max_input_tokens: 400000
|
||||||
|
max_output_tokens: 128000
|
||||||
|
supports_vision: true
|
||||||
|
supports_function_calling: true
|
||||||
- name: gpt-5-mini
|
- name: gpt-5-mini
|
||||||
type: chat
|
type: chat
|
||||||
max_input_tokens: 400000
|
max_input_tokens: 400000
|
||||||
@@ -1771,11 +2103,100 @@ list:
|
|||||||
max_output_tokens: 32768
|
max_output_tokens: 32768
|
||||||
supports_vision: true
|
supports_vision: true
|
||||||
supports_function_calling: true
|
supports_function_calling: true
|
||||||
|
- name: gpt-4.1-mini
|
||||||
|
type: chat
|
||||||
|
max_input_tokens: 1047576
|
||||||
|
max_output_tokens: 32768
|
||||||
|
supports_vision: true
|
||||||
|
supports_function_calling: true
|
||||||
|
- name: gpt-4.1-nano
|
||||||
|
type: chat
|
||||||
|
max_input_tokens: 1047576
|
||||||
|
max_output_tokens: 32768
|
||||||
|
supports_vision: true
|
||||||
|
supports_function_calling: true
|
||||||
- name: gpt-4o
|
- name: gpt-4o
|
||||||
type: chat
|
type: chat
|
||||||
max_input_tokens: 128000
|
max_input_tokens: 128000
|
||||||
max_output_tokens: 16384
|
max_output_tokens: 16384
|
||||||
supports_function_calling: true
|
supports_function_calling: true
|
||||||
|
- name: gpt-4o-mini
|
||||||
|
type: chat
|
||||||
|
max_input_tokens: 128000
|
||||||
|
max_output_tokens: 16384
|
||||||
|
supports_function_calling: true
|
||||||
|
- name: o4-mini
|
||||||
|
type: chat
|
||||||
|
max_input_tokens: 200000
|
||||||
|
patch:
|
||||||
|
body:
|
||||||
|
max_tokens: null
|
||||||
|
temperature: null
|
||||||
|
top_p: null
|
||||||
|
supports_vision: true
|
||||||
|
supports_function_calling: true
|
||||||
|
system_prompt_prefix: Formatting re-enabled
|
||||||
|
- name: o4-mini-high
|
||||||
|
type: chat
|
||||||
|
real_name: o4-mini
|
||||||
|
max_input_tokens: 200000
|
||||||
|
patch:
|
||||||
|
body:
|
||||||
|
reasoning_effort: high
|
||||||
|
max_tokens: null
|
||||||
|
temperature: null
|
||||||
|
top_p: null
|
||||||
|
supports_vision: true
|
||||||
|
supports_function_calling: true
|
||||||
|
system_prompt_prefix: Formatting re-enabled
|
||||||
|
- name: o3
|
||||||
|
type: chat
|
||||||
|
max_input_tokens: 200000
|
||||||
|
patch:
|
||||||
|
body:
|
||||||
|
max_tokens: null
|
||||||
|
temperature: null
|
||||||
|
top_p: null
|
||||||
|
supports_vision: true
|
||||||
|
supports_function_calling: true
|
||||||
|
system_prompt_prefix: Formatting re-enabled
|
||||||
|
- name: o3-high
|
||||||
|
type: chat
|
||||||
|
real_name: o3
|
||||||
|
max_input_tokens: 200000
|
||||||
|
patch:
|
||||||
|
body:
|
||||||
|
reasoning_effort: high
|
||||||
|
max_tokens: null
|
||||||
|
temperature: null
|
||||||
|
top_p: null
|
||||||
|
supports_vision: true
|
||||||
|
supports_function_calling: true
|
||||||
|
system_prompt_prefix: Formatting re-enabled
|
||||||
|
- name: o3-mini
|
||||||
|
type: chat
|
||||||
|
max_input_tokens: 200000
|
||||||
|
patch:
|
||||||
|
body:
|
||||||
|
max_tokens: null
|
||||||
|
temperature: null
|
||||||
|
top_p: null
|
||||||
|
supports_vision: true
|
||||||
|
supports_function_calling: true
|
||||||
|
system_prompt_prefix: Formatting re-enabled
|
||||||
|
- name: o3-mini-high
|
||||||
|
type: chat
|
||||||
|
real_name: o3-mini
|
||||||
|
max_input_tokens: 200000
|
||||||
|
patch:
|
||||||
|
body:
|
||||||
|
reasoning_effort: high
|
||||||
|
max_tokens: null
|
||||||
|
temperature: null
|
||||||
|
top_p: null
|
||||||
|
supports_vision: true
|
||||||
|
supports_function_calling: true
|
||||||
|
system_prompt_prefix: Formatting re-enabled
|
||||||
- name: text-embedding-3-large
|
- name: text-embedding-3-large
|
||||||
type: embedding
|
type: embedding
|
||||||
max_tokens_per_chunk: 8191
|
max_tokens_per_chunk: 8191
|
||||||
@@ -1917,12 +2338,26 @@ list:
|
|||||||
input_price: 0.18
|
input_price: 0.18
|
||||||
output_price: 0.69
|
output_price: 0.69
|
||||||
supports_vision: true
|
supports_vision: true
|
||||||
- name: deepseek-ai/DeepSeek-V3.2
|
- name: deepseek-ai/DeepSeek-V3.2-Exp
|
||||||
type: chat
|
type: chat
|
||||||
max_input_tokens: 163840
|
max_input_tokens: 163840
|
||||||
input_price: 0.26
|
input_price: 0.27
|
||||||
output_price: 0.39
|
output_price: 0.4
|
||||||
supports_function_calling: true
|
- name: deepseek-ai/DeepSeek-V3.1-Terminus
|
||||||
|
type: chat
|
||||||
|
max_input_tokens: 163840
|
||||||
|
input_price: 0.27
|
||||||
|
output_price: 1.0
|
||||||
|
- name: deepseek-ai/DeepSeek-V3.1
|
||||||
|
type: chat
|
||||||
|
max_input_tokens: 163840
|
||||||
|
input_price: 0.3
|
||||||
|
output_price: 1.0
|
||||||
|
- name: deepseek-ai/DeepSeek-R1-0528
|
||||||
|
type: chat
|
||||||
|
max_input_tokens: 163840
|
||||||
|
input_price: 0.5
|
||||||
|
output_price: 2.15
|
||||||
- name: google/gemma-3-27b-it
|
- name: google/gemma-3-27b-it
|
||||||
type: chat
|
type: chat
|
||||||
max_input_tokens: 131072
|
max_input_tokens: 131072
|
||||||
@@ -1945,24 +2380,12 @@ list:
|
|||||||
input_price: 0.55
|
input_price: 0.55
|
||||||
output_price: 2.5
|
output_price: 2.5
|
||||||
supports_function_calling: true
|
supports_function_calling: true
|
||||||
- name: MiniMaxAI/MiniMax-M2.1
|
- name: zai-org/GLM-4.6
|
||||||
type: chat
|
|
||||||
max_input_tokens: 262144
|
|
||||||
input_price: 0.28
|
|
||||||
output_price: 1.2
|
|
||||||
supports_function_calling: true
|
|
||||||
- name: zai-org/GLM-4.7
|
|
||||||
type: chat
|
type: chat
|
||||||
max_input_tokens: 202752
|
max_input_tokens: 202752
|
||||||
input_price: 0.43
|
input_price: 0.6
|
||||||
output_price: 1.75
|
output_price: 1.9
|
||||||
supports_function_calling: true
|
supports_function_calling: true
|
||||||
- name: zai-org/GLM-4.6V
|
|
||||||
type: chat
|
|
||||||
max_input_tokens: 131072
|
|
||||||
input_price: 0.3
|
|
||||||
output_price: 0.9
|
|
||||||
supports_vision: true
|
|
||||||
- name: BAAI/bge-large-en-v1.5
|
- name: BAAI/bge-large-en-v1.5
|
||||||
type: embedding
|
type: embedding
|
||||||
input_price: 0.01
|
input_price: 0.01
|
||||||
|
|||||||
@@ -1,4 +1,15 @@
|
|||||||
# Source library files in alphabetical order
|
export_secret() {
|
||||||
for lib in ~/.config/direnv/lib/*.sh; do
|
local name="$1"
|
||||||
[[ -f "$lib" ]] && source "$lib"
|
local file="/run/secrets/${name}"
|
||||||
done
|
|
||||||
|
if [[ ! -f "$file" ]]; then
|
||||||
|
printf "secret file not found: %s\n" $file >&2
|
||||||
|
return 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Read first line, remove trailing spaces/tabs, convert nulls to newlines
|
||||||
|
local value
|
||||||
|
value=$(head -n1 "$file" | sed 's/[ \t]*$//' | tr '\0' '\n')
|
||||||
|
|
||||||
|
export "${name}=${value}"
|
||||||
|
}
|
||||||
|
|||||||
@@ -1,16 +0,0 @@
|
|||||||
# Exports a Podman / Docker secret as an environment variable
|
|
||||||
export-secret() {
|
|
||||||
local name="$1"
|
|
||||||
local file="/run/secrets/${name}"
|
|
||||||
|
|
||||||
if [[ ! -f "$file" ]]; then
|
|
||||||
printf "secret file not found: %s\n" $file >&2
|
|
||||||
return 1
|
|
||||||
fi
|
|
||||||
|
|
||||||
# Read first line, remove trailing spaces/tabs, convert nulls to newlines
|
|
||||||
local value
|
|
||||||
value=$(head -n1 "$file" | sed 's/[ \t]*$//' | tr '\0' '\n')
|
|
||||||
|
|
||||||
export "${name}=${value}"
|
|
||||||
}
|
|
||||||
@@ -1,73 +0,0 @@
|
|||||||
#!/usr/bin/env bash
|
|
||||||
|
|
||||||
# repo_overlay - a direnv function to copy files from centralized overlays directory
|
|
||||||
#
|
|
||||||
# Set REPO_OVERLAY_DIR to customize the overlays base directory (default: $HOME/overlays)
|
|
||||||
#
|
|
||||||
# Usage: Add 'repo_overlay' to your .envrc file
|
|
||||||
|
|
||||||
repo-overlay() {
|
|
||||||
# Get the repo URL from git remote
|
|
||||||
local repo_url
|
|
||||||
repo_url=$(git config --get remote.origin.url 2>/dev/null)
|
|
||||||
|
|
||||||
if [ -z "$repo_url" ]; then
|
|
||||||
echo "repo_overlay: Not a git repository or no origin remote configured"
|
|
||||||
return 1
|
|
||||||
fi
|
|
||||||
|
|
||||||
# Extract owner and repo name from URL
|
|
||||||
# Handles both SSH (git@github.com:owner/repo.git) and HTTPS (https://github.com/owner/repo.git)
|
|
||||||
local owner_repo
|
|
||||||
if [[ "$repo_url" =~ ^git@[^:]+:(.+)\.git$ ]] || \
|
|
||||||
[[ "$repo_url" =~ ^https?://[^/]+/(.+)\.git$ ]] || \
|
|
||||||
[[ "$repo_url" =~ ^git@[^:]+:(.+)$ ]] || \
|
|
||||||
[[ "$repo_url" =~ ^https?://[^/]+/(.+)$ ]]; then
|
|
||||||
owner_repo="${BASH_REMATCH[1]}"
|
|
||||||
else
|
|
||||||
echo "repo_overlay: Could not parse repository URL: $repo_url"
|
|
||||||
return 1
|
|
||||||
fi
|
|
||||||
|
|
||||||
# Determine overlay source directory
|
|
||||||
local overlay_base="${REPO_OVERLAY_DIR:-$HOME/overlays}"
|
|
||||||
local overlay_source="${overlay_base}/${owner_repo}"
|
|
||||||
|
|
||||||
if [ ! -d "$overlay_source" ]; then
|
|
||||||
echo "repo_overlay: No overlay directory found at ${overlay_source}"
|
|
||||||
return 0
|
|
||||||
fi
|
|
||||||
|
|
||||||
# Copy files from overlay directory
|
|
||||||
local copied_count=0
|
|
||||||
local skipped_count=0
|
|
||||||
|
|
||||||
# Find all files in overlay directory (excluding directories)
|
|
||||||
while IFS= read -r -d '' source_file; do
|
|
||||||
# Get relative path from overlay source
|
|
||||||
local rel_path="${source_file#$overlay_source/}"
|
|
||||||
local dest_file="./${rel_path}"
|
|
||||||
|
|
||||||
if [ -f "$dest_file" ]; then
|
|
||||||
echo "repo_overlay: Skipping ${rel_path} (already exists)"
|
|
||||||
((skipped_count++))
|
|
||||||
else
|
|
||||||
# Create parent directory if needed
|
|
||||||
local dest_dir
|
|
||||||
dest_dir=$(dirname "$dest_file")
|
|
||||||
mkdir -p "$dest_dir"
|
|
||||||
|
|
||||||
# Copy the file
|
|
||||||
cp "$source_file" "$dest_file"
|
|
||||||
echo "repo_overlay: Copied ${rel_path}"
|
|
||||||
((copied_count++))
|
|
||||||
fi
|
|
||||||
done < <(find "$overlay_source" -type f -print0)
|
|
||||||
|
|
||||||
# Summary
|
|
||||||
if [ $copied_count -eq 0 ] && [ $skipped_count -eq 0 ]; then
|
|
||||||
echo "repo_overlay: No files found in ${overlay_source}"
|
|
||||||
else
|
|
||||||
echo "repo_overlay: Complete (${copied_count} copied, ${skipped_count} skipped)"
|
|
||||||
fi
|
|
||||||
}
|
|
||||||
@@ -1,12 +1,12 @@
|
|||||||
# local network -> local port
|
# local network -> local port
|
||||||
Match Host tuulikki.scalene.net exec "nc -w 1 192.168.101.1 22"
|
Match Host tuulikki.scalene.net exec "sudo ping -c 1 -W 1 -q 192.168.101.1"
|
||||||
Port 22
|
Port 22
|
||||||
|
|
||||||
Host tuulikki.scalene.net
|
Host tuulikki.scalene.net
|
||||||
Port 22022
|
Port 22022
|
||||||
|
|
||||||
# local network -> local port
|
# local network -> local port
|
||||||
Match Host tonttu.scalene.net exec "nc -w 1 192.168.101.1 22"
|
Match Host tonttu.scalene.net exec "sudo ping -c 1 -W 1 -q 192.168.101.1"
|
||||||
Port 22
|
Port 22
|
||||||
|
|
||||||
Host tonttu.scalene.net
|
Host tonttu.scalene.net
|
||||||
@@ -16,7 +16,7 @@ Host www.scalene.net
|
|||||||
User scalene
|
User scalene
|
||||||
Port 22022
|
Port 22022
|
||||||
|
|
||||||
Match Host git.scalene.net exec "nc -w 1 192.168.101.1 22"
|
Match Host git.scalene.net exec "sudo ping -c 1 -W 1 -q 192.168.101.1"
|
||||||
Hostname tonttu.scalene.net
|
Hostname tonttu.scalene.net
|
||||||
|
|
||||||
Host *.scalene.net
|
Host *.scalene.net
|
||||||
|
|||||||
@@ -69,9 +69,6 @@ au FileType markdown setlocal tabstop=4 expandtab shiftwidth=4 softtabstop=4 wra
|
|||||||
" typing idate inserts the current date
|
" typing idate inserts the current date
|
||||||
iab idate <c-r>=strftime("%Y-%m-%d")<cr>
|
iab idate <c-r>=strftime("%Y-%m-%d")<cr>
|
||||||
|
|
||||||
" insert triple backticks for markdown code blocks with Ctrl-b
|
|
||||||
inoremap <C-b> ```<CR>```<Esc>O
|
|
||||||
|
|
||||||
" use \ toc to insert a table of contents for markdown from the current line to EOF
|
" use \ toc to insert a table of contents for markdown from the current line to EOF
|
||||||
" nmap <leader>toc <cmd>.put =execute('.,$write !gh-md-toc --hide-header --hide-footer')<cr>
|
" nmap <leader>toc <cmd>.put =execute('.,$write !gh-md-toc --hide-header --hide-footer')<cr>
|
||||||
nmap <leader>toc :put =system('gh-md-toc --hide-header --hide-footer --indent=4', getline('.', '$'))<cr>
|
nmap <leader>toc :put =system('gh-md-toc --hide-header --hide-footer --indent=4', getline('.', '$'))<cr>
|
||||||
|
|||||||
@@ -1,7 +1,6 @@
|
|||||||
# {{format-date now '%Y-%m-%d'}}
|
# {{format-date now '%Y-%m-%d'}}
|
||||||
|
|
||||||
[Yesterday](daily/{{format-date (date "yesterday") '%Y%m%d'}}.md)
|
[yesterday](daily/{{format-date (date "yesterday") '%Y%m%d'}}.md)
|
||||||
[Weekly Goals](daily/weekly-goals.md)
|
|
||||||
|
|
||||||
## goals
|
## goals
|
||||||
- a bullet
|
- a bullet
|
||||||
|
|||||||
@@ -1,17 +0,0 @@
|
|||||||
aws-get-bedrock-profile () {
|
|
||||||
profile_arn=$(aws bedrock list-inference-profiles \
|
|
||||||
| jq -r '.inferenceProfileSummaries
|
|
||||||
| map(select(.inferenceProfileId | startswith("us.")))
|
|
||||||
| sort_by(.inferenceProfileName)
|
|
||||||
| .[]
|
|
||||||
| [.inferenceProfileArn, .inferenceProfileName, .description]
|
|
||||||
| @tsv' \
|
|
||||||
| fzf \
|
|
||||||
--header="$(printf '%-30s\t%s\n' 'NAME' 'DESCRIPTION')" \
|
|
||||||
--no-preview \
|
|
||||||
--delimiter='\t' \
|
|
||||||
--with-nth=2,3 \
|
|
||||||
| awk '{print $1}')
|
|
||||||
[[ -z "$profile_arn" ]] && return 1
|
|
||||||
echo "$profile_arn"
|
|
||||||
}
|
|
||||||
@@ -1,3 +1,3 @@
|
|||||||
aws-logout() {
|
aws-logout() {
|
||||||
unset AWS_ACCESS_KEY_ID AWS_SECRET_ACCESS_KEY AWS_SESSION_TOKEN AWS_ENVIRONMENT AWS_PROFILE
|
unset AWS_ACCESS_KEY_ID AWS_SECRET_ACCESS_KEY AWS_SESSION_TOKEN AWS_ENVIRONMENT
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,30 +0,0 @@
|
|||||||
aws-sso-login() {
|
|
||||||
local profile="${1:-$AWS_PROFILE}"
|
|
||||||
|
|
||||||
if [[ -z "$profile" ]]; then
|
|
||||||
profile=$(aws configure list-profiles \
|
|
||||||
| fzf --height 40% --reverse --no-preview)
|
|
||||||
fi
|
|
||||||
|
|
||||||
if [[ -z "$profile" ]]; then
|
|
||||||
echo "Error: No profile provided or selected." >&2
|
|
||||||
return 1
|
|
||||||
fi
|
|
||||||
|
|
||||||
# Validate existing session; login if expired or missing
|
|
||||||
if ! aws sts get-caller-identity --profile "$profile" &>/dev/null; then
|
|
||||||
aws sso login --use-device-code --profile "$profile" || return 1
|
|
||||||
fi
|
|
||||||
|
|
||||||
# Export credentials to environment for third-party tools
|
|
||||||
local creds
|
|
||||||
creds=$(aws configure export-credentials --profile "$profile" --format env)
|
|
||||||
if [[ $? -eq 0 ]]; then
|
|
||||||
eval "$creds"
|
|
||||||
export AWS_PROFILE="$profile"
|
|
||||||
# echo "Environment configured for profile: $profile"
|
|
||||||
else
|
|
||||||
echo "Error: Failed to export credentials." >&2
|
|
||||||
return 1
|
|
||||||
fi
|
|
||||||
}
|
|
||||||
@@ -48,9 +48,9 @@ zstyle ':vcs_info:git*' actionformats '%%S%r%%s:%{$fg[red]%}%b:%a%{$reset_color%
|
|||||||
|
|
||||||
function set_prompt() {
|
function set_prompt() {
|
||||||
|
|
||||||
if [[ -n "$AWS_PROFILE" ]]; then
|
if [[ -n "$AWS_ENVIRONMENT" ]]; then
|
||||||
aws_prompt="
|
aws_prompt="
|
||||||
%S%B$AWS_PROFILE%b%s
|
%S%B$AWS_ENVIRONMENT%b%s
|
||||||
"
|
"
|
||||||
else
|
else
|
||||||
aws_prompt=""
|
aws_prompt=""
|
||||||
|
|||||||
Reference in New Issue
Block a user