π€ Models | π¬ Chat | π¬ APIs (FREE!) | π₯οΈ Github
A.X 4.0 Family HighlightsSK Telecom released A.X 4.0 (pronounced "A dot X"), a large language model (LLM) optimized for Korean-language understanding and enterprise deployment, on July 03, 2025. Built on the open-source Qwen2.5 model, A.X 4.0 has been further trained with large-scale Korean datasets to deliver outstanding performance in real-world business environments.
transformers>=4.46.0
or the latest version is required to use skt/A.X-4.0
pip install transformers>=4.46.0
Example Usage
import torch
from transformers import AutoModelForCausalLM, AutoTokenizer
model_name = "skt/A.X-4.0"
model = AutoModelForCausalLM.from_pretrained(
model_name,
torch_dtype=torch.bfloat16,
device_map="auto",
)
model.eval()
tokenizer = AutoTokenizer.from_pretrained(model_name)
messages = [
{"role": "system", "content": "λΉμ μ μ¬μ©μκ° μ 곡νλ μμ΄ λ¬Έμ₯λ€μ νκ΅μ΄λ‘ λ²μνλ AI μ λ¬Έκ°μ
λλ€."},
{"role": "user", "content": "The first human went into space and orbited the Earth on April 12, 1961."},
]
input_ids = tokenizer.apply_chat_template(messages, add_generation_prompt=True, return_tensors="pt").to(model.device)
with torch.no_grad():
output = model.generate(
input_ids,
max_new_tokens=128,
do_sample=False,
)
len_input_prompt = len(input_ids[0])
response = tokenizer.decode(output[0][len_input_prompt:], skip_special_tokens=True)
print(response)
with vLLM
vllm>=v0.6.4.post1
or the latest version is required to use tool-use functionpip install vllm>=v0.6.4.post1
VLLM_OPTION="--enable-auto-tool-choice --tool-call-parser hermes"
vllm serve skt/A.X-4.0 $VLLM_OPTION
Example Usage
from openai import OpenAI
def call(messages, model):
completion = client.chat.completions.create(
model=model,
messages=messages,
)
print(completion.choices[0].message)
client = OpenAI(
base_url="http://localhost:8000/v1",
api_key="api_key"
)
model = "skt/A.X-4.0"
messages = [{"role": "user", "content": "μμ΄μ»¨ μ¬λ¦μ² μ μ μ¨λλ? νμ€λ‘ λ΅λ³ν΄μ€"}]
call(messages, model)
messages = [{"role": "user", "content": "What is the appropriate temperature for air conditioning in summer? Response in a single sentence."}]
call(messages, model)
Examples for tool-use
from openai import OpenAI
def call(messages, model):
completion = client.chat.completions.create(
model=model,
messages=messages,
tools=tools
)
print(completion.choices[0].message)
client = OpenAI(
base_url="http://localhost:8000/v1",
api_key="api_key"
)
model = "skt/A.X-4.0"
calculate_discount = {
"type": "function",
"function": {
"name": "calculate_discount",
"description": "μκ°κ²©κ³Ό ν μΈμ¨(νΌμΌνΈ λ¨μ)μ μ
λ ₯λ°μ ν μΈλ κ°κ²©μκ³μ°νλ€.",
"parameters": {
"type": "object",
"properties": {
"original_price": {
"type": "number",
"description": "μνμ μλ κ°κ²©"
},
"discount_percentage": {
"type": "number",
"description": "μ μ©ν ν μΈμ¨(μ: 20% ν μΈμ κ²½μ° 20μ μ
λ ₯)"
}
},
"required": ["original_price", "discount_percentage"]
}
}
}
get_exchange_rate = {
"type": "function",
"function": {
"name": "get_exchange_rate",
"description": "λ ν΅ν κ°μ νμ¨μ κ°μ Έμ¨λ€.",
"parameters": {
"type": "object",
"properties": {
"base_currency": {
"type": "string",
"description": "The currency to convert from."
},
"target_currency": {
"type": "string",
"description": "The currency to convert to."
}
},
"required": ["base_currency", "target_currency"]
}
}
}
tools = [calculate_discount, get_exchange_rate]
messages = [{"role": "user", "content": "μ°λ¦¬κ° λ μ¬μΌλλλ° μλ 57600μμΈλ° μ§μν μΈ λ°μ μ μκ±°λ ? ν μΈκ°μ’ κ³μ°ν΄μ€"}]
call(messages, model)
messages = [
{"role": "user", "content": "μ°λ¦¬κ° λ μ¬μΌλλλ° μλ 57600μμΈλ° μ§μν μΈ λ°μ μ μκ±°λ ? ν μΈκ°μ’ κ³μ°ν΄μ€"},
{"role": "assistant", "content": "μ§μ ν μΈμ λͺ νΌμΌνΈ λ°μ μ μλμ§ μλ €μ£Όμκ² μ΄μ?"},
{"role": "user", "content": "15% ν μΈ λ°μ μ μμ΄."},
]
call(messages, model)
messages = [
{"role": "user", "content": "μ°λ¦¬κ° λ μ¬μΌλλλ° μλ 57600μμΈλ° μ§μν μΈ λ°μ μ μκ±°λ ? ν μΈκ°μ’ κ³μ°ν΄μ€"},
{"role": "assistant", "content": "μ§μ ν μΈμ λͺ νΌμΌνΈ λ°μ μ μλμ§ μλ €μ£Όμκ² μ΄μ?"},
{"role": "user", "content": "15% ν μΈ λ°μ μ μμ΄."},
{"role": "tool", "tool_call_id": "random_id", "name": "calculate_discount", "content": "{\"original_price\": 57600, \"discount_percentage\": 15, \"discounted_price\": 48960.0}"}
]
call(messages, model)
Citation
@article{SKTAdotX4,
title={A.X 4.0},
author={SKT AI Model Lab},
year={2025},
url={https://huggingface.co/skt/A.X-4.0}
}
Contact
RetroSearch is an open source project built by @garambo | Open a GitHub Issue
Search and Browse the WWW like it's 1997 | Search results from DuckDuckGo
HTML:
3.2
| Encoding:
UTF-8
| Version:
0.7.4