mirror of
https://github.com/ggerganov/llama.cpp.git
synced 2024-12-27 06:39:25 +01:00
Merge branch 'ggerganov:master' into t5-clean-3
This commit is contained in:
commit
1c8d37a267
@ -511,7 +511,7 @@ Building the program with BLAS support may lead to some performance improvements
|
|||||||
| LLAMA_CUDA_FORCE_DMMV | Boolean | false | Force the use of dequantization + matrix vector multiplication kernels instead of using kernels that do matrix vector multiplication on quantized data. By default the decision is made based on compute capability (MMVQ for 6.1/Pascal/GTX 1000 or higher). Does not affect k-quants. |
|
| LLAMA_CUDA_FORCE_DMMV | Boolean | false | Force the use of dequantization + matrix vector multiplication kernels instead of using kernels that do matrix vector multiplication on quantized data. By default the decision is made based on compute capability (MMVQ for 6.1/Pascal/GTX 1000 or higher). Does not affect k-quants. |
|
||||||
| LLAMA_CUDA_DMMV_X | Positive integer >= 32 | 32 | Number of values in x direction processed by the CUDA dequantization + matrix vector multiplication kernel per iteration. Increasing this value can improve performance on fast GPUs. Power of 2 heavily recommended. Does not affect k-quants. |
|
| LLAMA_CUDA_DMMV_X | Positive integer >= 32 | 32 | Number of values in x direction processed by the CUDA dequantization + matrix vector multiplication kernel per iteration. Increasing this value can improve performance on fast GPUs. Power of 2 heavily recommended. Does not affect k-quants. |
|
||||||
| LLAMA_CUDA_MMV_Y | Positive integer | 1 | Block size in y direction for the CUDA mul mat vec kernels. Increasing this value can improve performance on fast GPUs. Power of 2 recommended. |
|
| LLAMA_CUDA_MMV_Y | Positive integer | 1 | Block size in y direction for the CUDA mul mat vec kernels. Increasing this value can improve performance on fast GPUs. Power of 2 recommended. |
|
||||||
| LLAMA_CUDA_FORCE_MMQ | Boolean | false | Force the use of custom matrix multiplication kernels for quantized models instead of FP16 cuBLAS even if there is no int8 tensor core implementation available (affects V100, RDNA3). Speed for large batch sizes will be worse but VRAM consumption will be lower. |
|
| LLAMA_CUDA_FORCE_MMQ | Boolean | false | Force the use of custom matrix multiplication kernels for quantized models instead of FP16 cuBLAS even if there is no int8 tensor core implementation available (affects V100, RDNA3). MMQ kernels are enabled by default on GPUs with int8 tensor core support. With MMQ force enabled, speed for large batch sizes will be worse but VRAM consumption will be lower. |
|
||||||
| LLAMA_CUDA_FORCE_CUBLAS | Boolean | false | Force the use of FP16 cuBLAS instead of custom matrix multiplication kernels for quantized models |
|
| LLAMA_CUDA_FORCE_CUBLAS | Boolean | false | Force the use of FP16 cuBLAS instead of custom matrix multiplication kernels for quantized models |
|
||||||
| LLAMA_CUDA_F16 | Boolean | false | If enabled, use half-precision floating point arithmetic for the CUDA dequantization + mul mat vec kernels and for the q4_1 and q5_1 matrix matrix multiplication kernels. Can improve performance on relatively recent GPUs. |
|
| LLAMA_CUDA_F16 | Boolean | false | If enabled, use half-precision floating point arithmetic for the CUDA dequantization + mul mat vec kernels and for the q4_1 and q5_1 matrix matrix multiplication kernels. Can improve performance on relatively recent GPUs. |
|
||||||
| LLAMA_CUDA_KQUANTS_ITER | 1 or 2 | 2 | Number of values processed per iteration and per CUDA thread for Q2_K and Q6_K quantization formats. Setting this value to 1 can improve performance for slow GPUs. |
|
| LLAMA_CUDA_KQUANTS_ITER | 1 or 2 | 2 | Number of values processed per iteration and per CUDA thread for Q2_K and Q6_K quantization formats. Setting this value to 1 can improve performance for slow GPUs. |
|
||||||
|
@ -614,6 +614,75 @@ private:
|
|||||||
return _add_rule(name, "\"\\\"\" " + to_rule(transform()) + " \"\\\"\" space");
|
return _add_rule(name, "\"\\\"\" " + to_rule(transform()) + " \"\\\"\" space");
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/*
|
||||||
|
Returns a rule that matches a JSON string that is none of the provided strings
|
||||||
|
|
||||||
|
not_strings({"a"})
|
||||||
|
-> ["] ( [a] char+ | [^"a] char* )? ["] space
|
||||||
|
not_strings({"and", "also"})
|
||||||
|
-> ["] ( [a] ([l] ([s] ([o] char+ | [^"o] char*) | [^"s] char*) | [n] ([d] char+ | [^"d] char*) | [^"ln] char*) | [^"a] char* )? ["] space
|
||||||
|
*/
|
||||||
|
std::string _not_strings(const std::vector<std::string> & strings) {
|
||||||
|
|
||||||
|
struct TrieNode {
|
||||||
|
std::map<char, TrieNode> children;
|
||||||
|
bool is_end_of_string;
|
||||||
|
|
||||||
|
TrieNode() : is_end_of_string(false) {}
|
||||||
|
|
||||||
|
void insert(const std::string & string) {
|
||||||
|
auto node = this;
|
||||||
|
for (char c : string) {
|
||||||
|
node = &node->children[c];
|
||||||
|
}
|
||||||
|
node->is_end_of_string = true;
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
TrieNode trie;
|
||||||
|
for (const auto & s : strings) {
|
||||||
|
trie.insert(s);
|
||||||
|
}
|
||||||
|
|
||||||
|
std::string char_rule = _add_primitive("char", PRIMITIVE_RULES.at("char"));
|
||||||
|
std::ostringstream out;
|
||||||
|
out << "[\"] ( ";
|
||||||
|
std::function<void(const TrieNode &)> visit = [&](const TrieNode & node) {
|
||||||
|
std::ostringstream rejects;
|
||||||
|
auto first = true;
|
||||||
|
for (const auto & kv : node.children) {
|
||||||
|
rejects << kv.first;
|
||||||
|
if (first) {
|
||||||
|
first = false;
|
||||||
|
} else {
|
||||||
|
out << " | ";
|
||||||
|
}
|
||||||
|
out << "[" << kv.first << "]";
|
||||||
|
if (!kv.second.children.empty()) {
|
||||||
|
out << " (";
|
||||||
|
visit(kv.second);
|
||||||
|
out << ")";
|
||||||
|
} else if (kv.second.is_end_of_string) {
|
||||||
|
out << " " << char_rule << "+";
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if (!node.children.empty()) {
|
||||||
|
if (!first) {
|
||||||
|
out << " | ";
|
||||||
|
}
|
||||||
|
out << "[^\"" << rejects.str() << "] " << char_rule << "*";
|
||||||
|
}
|
||||||
|
};
|
||||||
|
visit(trie);
|
||||||
|
|
||||||
|
out << " )";
|
||||||
|
if (!trie.is_end_of_string) {
|
||||||
|
out << "?";
|
||||||
|
}
|
||||||
|
out << " [\"] space";
|
||||||
|
return out.str();
|
||||||
|
}
|
||||||
|
|
||||||
std::string _resolve_ref(const std::string & ref) {
|
std::string _resolve_ref(const std::string & ref) {
|
||||||
std::string ref_name = ref.substr(ref.find_last_of('/') + 1);
|
std::string ref_name = ref.substr(ref.find_last_of('/') + 1);
|
||||||
if (_rules.find(ref_name) == _rules.end() && _refs_being_resolved.find(ref) == _refs_being_resolved.end()) {
|
if (_rules.find(ref_name) == _rules.end() && _refs_being_resolved.find(ref) == _refs_being_resolved.end()) {
|
||||||
@ -634,6 +703,7 @@ private:
|
|||||||
std::vector<std::string> required_props;
|
std::vector<std::string> required_props;
|
||||||
std::vector<std::string> optional_props;
|
std::vector<std::string> optional_props;
|
||||||
std::unordered_map<std::string, std::string> prop_kv_rule_names;
|
std::unordered_map<std::string, std::string> prop_kv_rule_names;
|
||||||
|
std::vector<std::string> prop_names;
|
||||||
for (const auto & kv : properties) {
|
for (const auto & kv : properties) {
|
||||||
const auto &prop_name = kv.first;
|
const auto &prop_name = kv.first;
|
||||||
const auto &prop_schema = kv.second;
|
const auto &prop_schema = kv.second;
|
||||||
@ -648,11 +718,18 @@ private:
|
|||||||
} else {
|
} else {
|
||||||
optional_props.push_back(prop_name);
|
optional_props.push_back(prop_name);
|
||||||
}
|
}
|
||||||
|
prop_names.push_back(prop_name);
|
||||||
}
|
}
|
||||||
if (additional_properties.is_object() || (additional_properties.is_boolean() && additional_properties.get<bool>())) {
|
if (!(additional_properties.is_boolean() && !additional_properties.get<bool>())) {
|
||||||
std::string sub_name = name + (name.empty() ? "" : "-") + "additional";
|
std::string sub_name = name + (name.empty() ? "" : "-") + "additional";
|
||||||
std::string value_rule = visit(additional_properties.is_object() ? additional_properties : json::object(), sub_name + "-value");
|
std::string value_rule =
|
||||||
std::string kv_rule = _add_rule(sub_name + "-kv", _add_primitive("string", PRIMITIVE_RULES.at("string")) + " \":\" space " + value_rule);
|
additional_properties.is_object() ? visit(additional_properties, sub_name + "-value")
|
||||||
|
: _add_primitive("value", PRIMITIVE_RULES.at("value"));
|
||||||
|
|
||||||
|
auto key_rule =
|
||||||
|
prop_names.empty() ? _add_primitive("string", PRIMITIVE_RULES.at("string"))
|
||||||
|
: _add_rule(sub_name + "-k", _not_strings(prop_names));
|
||||||
|
std::string kv_rule = _add_rule(sub_name + "-kv", key_rule + " \":\" space " + value_rule);
|
||||||
prop_kv_rule_names["*"] = kv_rule;
|
prop_kv_rule_names["*"] = kv_rule;
|
||||||
optional_props.push_back("*");
|
optional_props.push_back("*");
|
||||||
}
|
}
|
||||||
@ -678,15 +755,11 @@ private:
|
|||||||
}
|
}
|
||||||
std::string k = ks[0];
|
std::string k = ks[0];
|
||||||
std::string kv_rule_name = prop_kv_rule_names[k];
|
std::string kv_rule_name = prop_kv_rule_names[k];
|
||||||
if (k == "*") {
|
std::string comma_ref = "( \",\" space " + kv_rule_name + " )";
|
||||||
res = _add_rule(
|
if (first_is_optional) {
|
||||||
name + (name.empty() ? "" : "-") + "additional-kvs",
|
res = comma_ref + (k == "*" ? "*" : "?");
|
||||||
kv_rule_name + " ( \",\" space " + kv_rule_name + " )*"
|
|
||||||
);
|
|
||||||
} else if (first_is_optional) {
|
|
||||||
res = "( \",\" space " + kv_rule_name + " )?";
|
|
||||||
} else {
|
} else {
|
||||||
res = kv_rule_name;
|
res = kv_rule_name + (k == "*" ? " " + comma_ref + "*" : "");
|
||||||
}
|
}
|
||||||
if (ks.size() > 1) {
|
if (ks.size() > 1) {
|
||||||
res += " " + _add_rule(
|
res += " " + _add_rule(
|
||||||
@ -820,17 +893,19 @@ public:
|
|||||||
} else if (schema_type.is_array()) {
|
} else if (schema_type.is_array()) {
|
||||||
std::vector<json> schema_types;
|
std::vector<json> schema_types;
|
||||||
for (const auto & t : schema_type) {
|
for (const auto & t : schema_type) {
|
||||||
schema_types.push_back({{"type", t}});
|
json schema_copy(schema);
|
||||||
|
schema_copy["type"] = t;
|
||||||
|
schema_types.push_back(schema_copy);
|
||||||
}
|
}
|
||||||
return _add_rule(rule_name, _generate_union_rule(name, schema_types));
|
return _add_rule(rule_name, _generate_union_rule(name, schema_types));
|
||||||
} else if (schema.contains("const")) {
|
} else if (schema.contains("const")) {
|
||||||
return _add_rule(rule_name, _generate_constant_rule(schema["const"]));
|
return _add_rule(rule_name, _generate_constant_rule(schema["const"]) + " space");
|
||||||
} else if (schema.contains("enum")) {
|
} else if (schema.contains("enum")) {
|
||||||
std::vector<std::string> enum_values;
|
std::vector<std::string> enum_values;
|
||||||
for (const auto & v : schema["enum"]) {
|
for (const auto & v : schema["enum"]) {
|
||||||
enum_values.push_back(_generate_constant_rule(v));
|
enum_values.push_back(_generate_constant_rule(v));
|
||||||
}
|
}
|
||||||
return _add_rule(rule_name, join(enum_values.begin(), enum_values.end(), " | "));
|
return _add_rule(rule_name, "(" + join(enum_values.begin(), enum_values.end(), " | ") + ") space");
|
||||||
} else if ((schema_type.is_null() || schema_type == "object")
|
} else if ((schema_type.is_null() || schema_type == "object")
|
||||||
&& (schema.contains("properties") ||
|
&& (schema.contains("properties") ||
|
||||||
(schema.contains("additionalProperties") && schema["additionalProperties"] != true))) {
|
(schema.contains("additionalProperties") && schema["additionalProperties"] != true))) {
|
||||||
|
@ -3,7 +3,7 @@
|
|||||||
#! pip install pydantic
|
#! pip install pydantic
|
||||||
#! python json-schema-pydantic-example.py
|
#! python json-schema-pydantic-example.py
|
||||||
|
|
||||||
from pydantic import BaseModel, TypeAdapter
|
from pydantic import BaseModel, Extra, TypeAdapter
|
||||||
from annotated_types import MinLen
|
from annotated_types import MinLen
|
||||||
from typing import Annotated, List, Optional
|
from typing import Annotated, List, Optional
|
||||||
import json, requests
|
import json, requests
|
||||||
@ -50,12 +50,16 @@ else:
|
|||||||
if __name__ == '__main__':
|
if __name__ == '__main__':
|
||||||
|
|
||||||
class QAPair(BaseModel):
|
class QAPair(BaseModel):
|
||||||
|
class Config:
|
||||||
|
extra = 'forbid' # triggers additionalProperties: false in the JSON schema
|
||||||
question: str
|
question: str
|
||||||
concise_answer: str
|
concise_answer: str
|
||||||
justification: str
|
justification: str
|
||||||
stars: Annotated[int, Field(ge=1, le=5)]
|
stars: Annotated[int, Field(ge=1, le=5)]
|
||||||
|
|
||||||
class PyramidalSummary(BaseModel):
|
class PyramidalSummary(BaseModel):
|
||||||
|
class Config:
|
||||||
|
extra = 'forbid' # triggers additionalProperties: false in the JSON schema
|
||||||
title: str
|
title: str
|
||||||
summary: str
|
summary: str
|
||||||
question_answers: Annotated[List[QAPair], MinLen(2)]
|
question_answers: Annotated[List[QAPair], MinLen(2)]
|
||||||
|
@ -4,8 +4,7 @@ import itertools
|
|||||||
import json
|
import json
|
||||||
import re
|
import re
|
||||||
import sys
|
import sys
|
||||||
from typing import Any, Callable, Dict, List, Optional, Set, Tuple, Union
|
from typing import Any, List, Optional, Set, Tuple, Union
|
||||||
|
|
||||||
|
|
||||||
def _build_repetition(item_rule, min_items, max_items, separator_rule=None):
|
def _build_repetition(item_rule, min_items, max_items, separator_rule=None):
|
||||||
|
|
||||||
@ -276,6 +275,51 @@ class SchemaConverter:
|
|||||||
|
|
||||||
return ''.join(('(', *recurse(0), ')'))
|
return ''.join(('(', *recurse(0), ')'))
|
||||||
|
|
||||||
|
def _not_strings(self, strings):
|
||||||
|
class TrieNode:
|
||||||
|
def __init__(self):
|
||||||
|
self.children = {}
|
||||||
|
self.is_end_of_string = False
|
||||||
|
|
||||||
|
def insert(self, string):
|
||||||
|
node = self
|
||||||
|
for c in string:
|
||||||
|
node = node.children.setdefault(c, TrieNode())
|
||||||
|
node.is_end_of_string = True
|
||||||
|
|
||||||
|
trie = TrieNode()
|
||||||
|
for s in strings:
|
||||||
|
trie.insert(s)
|
||||||
|
|
||||||
|
char_rule = self._add_primitive('char', PRIMITIVE_RULES['char'])
|
||||||
|
out = ['["] ( ']
|
||||||
|
|
||||||
|
def visit(node):
|
||||||
|
rejects = []
|
||||||
|
first = True
|
||||||
|
for c in sorted(node.children.keys()):
|
||||||
|
child = node.children[c]
|
||||||
|
rejects.append(c)
|
||||||
|
if first:
|
||||||
|
first = False
|
||||||
|
else:
|
||||||
|
out.append(' | ')
|
||||||
|
out.append(f'[{c}]')
|
||||||
|
if child.children:
|
||||||
|
out.append(f' (')
|
||||||
|
visit(child)
|
||||||
|
out.append(')')
|
||||||
|
elif child.is_end_of_string:
|
||||||
|
out.append(f' {char_rule}+')
|
||||||
|
if node.children:
|
||||||
|
if not first:
|
||||||
|
out.append(' | ')
|
||||||
|
out.append(f'[^"{"".join(rejects)}] {char_rule}*')
|
||||||
|
visit(trie)
|
||||||
|
|
||||||
|
out.append(f' ){"" if trie.is_end_of_string else "?"} ["] space')
|
||||||
|
return ''.join(out)
|
||||||
|
|
||||||
def _add_rule(self, name, rule):
|
def _add_rule(self, name, rule):
|
||||||
esc_name = INVALID_RULE_CHARS_RE.sub('-', name)
|
esc_name = INVALID_RULE_CHARS_RE.sub('-', name)
|
||||||
if esc_name not in self._rules or self._rules[esc_name] == rule:
|
if esc_name not in self._rules or self._rules[esc_name] == rule:
|
||||||
@ -521,13 +565,13 @@ class SchemaConverter:
|
|||||||
return self._add_rule(rule_name, self._generate_union_rule(name, schema.get('oneOf') or schema['anyOf']))
|
return self._add_rule(rule_name, self._generate_union_rule(name, schema.get('oneOf') or schema['anyOf']))
|
||||||
|
|
||||||
elif isinstance(schema_type, list):
|
elif isinstance(schema_type, list):
|
||||||
return self._add_rule(rule_name, self._generate_union_rule(name, [{'type': t} for t in schema_type]))
|
return self._add_rule(rule_name, self._generate_union_rule(name, [{**schema, 'type': t} for t in schema_type]))
|
||||||
|
|
||||||
elif 'const' in schema:
|
elif 'const' in schema:
|
||||||
return self._add_rule(rule_name, self._generate_constant_rule(schema['const']))
|
return self._add_rule(rule_name, self._generate_constant_rule(schema['const']) + ' space')
|
||||||
|
|
||||||
elif 'enum' in schema:
|
elif 'enum' in schema:
|
||||||
rule = ' | '.join((self._generate_constant_rule(v) for v in schema['enum']))
|
rule = '(' + ' | '.join((self._generate_constant_rule(v) for v in schema['enum'])) + ') space'
|
||||||
return self._add_rule(rule_name, rule)
|
return self._add_rule(rule_name, rule)
|
||||||
|
|
||||||
elif schema_type in (None, 'object') and \
|
elif schema_type in (None, 'object') and \
|
||||||
@ -632,7 +676,7 @@ class SchemaConverter:
|
|||||||
self._add_primitive(dep, dep_rule)
|
self._add_primitive(dep, dep_rule)
|
||||||
return n
|
return n
|
||||||
|
|
||||||
def _build_object_rule(self, properties: List[Tuple[str, Any]], required: Set[str], name: str, additional_properties: Union[bool, Any]):
|
def _build_object_rule(self, properties: List[Tuple[str, Any]], required: Set[str], name: str, additional_properties: Optional[Union[bool, Any]]):
|
||||||
prop_order = self._prop_order
|
prop_order = self._prop_order
|
||||||
# sort by position in prop_order (if specified) then by original order
|
# sort by position in prop_order (if specified) then by original order
|
||||||
sorted_props = [kv[0] for _, kv in sorted(enumerate(properties), key=lambda ikv: (prop_order.get(ikv[1][0], len(prop_order)), ikv[0]))]
|
sorted_props = [kv[0] for _, kv in sorted(enumerate(properties), key=lambda ikv: (prop_order.get(ikv[1][0], len(prop_order)), ikv[0]))]
|
||||||
@ -647,12 +691,16 @@ class SchemaConverter:
|
|||||||
required_props = [k for k in sorted_props if k in required]
|
required_props = [k for k in sorted_props if k in required]
|
||||||
optional_props = [k for k in sorted_props if k not in required]
|
optional_props = [k for k in sorted_props if k not in required]
|
||||||
|
|
||||||
if additional_properties == True or isinstance(additional_properties, dict):
|
if additional_properties != False:
|
||||||
sub_name = f'{name}{"-" if name else ""}additional'
|
sub_name = f'{name}{"-" if name else ""}additional'
|
||||||
value_rule = self.visit({} if additional_properties == True else additional_properties, f'{sub_name}-value')
|
value_rule = self.visit(additional_properties, f'{sub_name}-value') if isinstance(additional_properties, dict) else \
|
||||||
|
self._add_primitive('value', PRIMITIVE_RULES['value'])
|
||||||
|
key_rule = self._add_primitive('string', PRIMITIVE_RULES['string']) if not sorted_props \
|
||||||
|
else self._add_rule(f'{sub_name}-k', self._not_strings(sorted_props))
|
||||||
|
|
||||||
prop_kv_rule_names["*"] = self._add_rule(
|
prop_kv_rule_names["*"] = self._add_rule(
|
||||||
f'{sub_name}-kv',
|
f'{sub_name}-kv',
|
||||||
self._add_primitive('string', PRIMITIVE_RULES['string']) + f' ":" space {value_rule}'
|
f'{key_rule} ":" space {value_rule}'
|
||||||
)
|
)
|
||||||
optional_props.append("*")
|
optional_props.append("*")
|
||||||
|
|
||||||
@ -667,15 +715,11 @@ class SchemaConverter:
|
|||||||
def get_recursive_refs(ks, first_is_optional):
|
def get_recursive_refs(ks, first_is_optional):
|
||||||
[k, *rest] = ks
|
[k, *rest] = ks
|
||||||
kv_rule_name = prop_kv_rule_names[k]
|
kv_rule_name = prop_kv_rule_names[k]
|
||||||
if k == '*':
|
comma_ref = f'( "," space {kv_rule_name} )'
|
||||||
res = self._add_rule(
|
if first_is_optional:
|
||||||
f'{name}{"-" if name else ""}additional-kvs',
|
res = comma_ref + ('*' if k == '*' else '?')
|
||||||
f'{kv_rule_name} ( "," space ' + kv_rule_name + ' )*'
|
|
||||||
)
|
|
||||||
elif first_is_optional:
|
|
||||||
res = f'( "," space {kv_rule_name} )?'
|
|
||||||
else:
|
else:
|
||||||
res = kv_rule_name
|
res = kv_rule_name + (' ' + comma_ref + "*" if k == '*' else '')
|
||||||
if len(rest) > 0:
|
if len(rest) > 0:
|
||||||
res += ' ' + self._add_rule(
|
res += ' ' + self._add_rule(
|
||||||
f'{name}{"-" if name else ""}{k}-rest',
|
f'{name}{"-" if name else ""}{k}-rest',
|
||||||
|
@ -532,6 +532,64 @@ export class SchemaConverter {
|
|||||||
return this._addRule(name, "\"\\\"\" " + toRule(transform()) + " \"\\\"\" space")
|
return this._addRule(name, "\"\\\"\" " + toRule(transform()) + " \"\\\"\" space")
|
||||||
}
|
}
|
||||||
|
|
||||||
|
_notStrings(strings) {
|
||||||
|
class TrieNode {
|
||||||
|
constructor() {
|
||||||
|
this.children = {};
|
||||||
|
this.isEndOfString = false;
|
||||||
|
}
|
||||||
|
|
||||||
|
insert(str) {
|
||||||
|
let node = this;
|
||||||
|
for (const c of str) {
|
||||||
|
node = node.children[c] = node.children[c] || new TrieNode();
|
||||||
|
}
|
||||||
|
node.isEndOfString = true;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const trie = new TrieNode();
|
||||||
|
for (const s of strings) {
|
||||||
|
trie.insert(s);
|
||||||
|
}
|
||||||
|
|
||||||
|
const charRuleName = this._addPrimitive('char', PRIMITIVE_RULES['char']);
|
||||||
|
const out = ['["] ( '];
|
||||||
|
|
||||||
|
const visit = (node) => {
|
||||||
|
const rejects = [];
|
||||||
|
let first = true;
|
||||||
|
for (const c of Object.keys(node.children).sort()) {
|
||||||
|
const child = node.children[c];
|
||||||
|
rejects.push(c);
|
||||||
|
if (first) {
|
||||||
|
first = false;
|
||||||
|
} else {
|
||||||
|
out.push(' | ');
|
||||||
|
}
|
||||||
|
out.push(`[${c}]`);
|
||||||
|
if (Object.keys(child.children).length > 0) {
|
||||||
|
out.push(' (');
|
||||||
|
visit(child);
|
||||||
|
out.push(')');
|
||||||
|
} else if (child.isEndOfString) {
|
||||||
|
out.push(` ${charRuleName}+`);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if (Object.keys(node.children).length > 0) {
|
||||||
|
if (!first) {
|
||||||
|
out.push(' | ');
|
||||||
|
}
|
||||||
|
out.push(`[^"${rejects.join('')}] ${charRuleName}*`);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
visit(trie);
|
||||||
|
|
||||||
|
out.push(` )${trie.isEndOfString ? '' : '?'} ["] space`);
|
||||||
|
return out.join('');
|
||||||
|
}
|
||||||
|
|
||||||
_resolveRef(ref) {
|
_resolveRef(ref) {
|
||||||
let refName = ref.split('/').pop();
|
let refName = ref.split('/').pop();
|
||||||
if (!(refName in this._rules) && !this._refsBeingResolved.has(ref)) {
|
if (!(refName in this._rules) && !this._refsBeingResolved.has(ref)) {
|
||||||
@ -558,11 +616,11 @@ export class SchemaConverter {
|
|||||||
} else if (schema.oneOf || schema.anyOf) {
|
} else if (schema.oneOf || schema.anyOf) {
|
||||||
return this._addRule(ruleName, this._generateUnionRule(name, schema.oneOf || schema.anyOf));
|
return this._addRule(ruleName, this._generateUnionRule(name, schema.oneOf || schema.anyOf));
|
||||||
} else if (Array.isArray(schemaType)) {
|
} else if (Array.isArray(schemaType)) {
|
||||||
return this._addRule(ruleName, this._generateUnionRule(name, schemaType.map(t => ({ type: t }))));
|
return this._addRule(ruleName, this._generateUnionRule(name, schemaType.map(t => ({...schema, type: t}))));
|
||||||
} else if ('const' in schema) {
|
} else if ('const' in schema) {
|
||||||
return this._addRule(ruleName, this._generateConstantRule(schema.const));
|
return this._addRule(ruleName, this._generateConstantRule(schema.const) + ' space');
|
||||||
} else if ('enum' in schema) {
|
} else if ('enum' in schema) {
|
||||||
const rule = schema.enum.map(v => this._generateConstantRule(v)).join(' | ');
|
const rule = '(' + schema.enum.map(v => this._generateConstantRule(v)).join(' | ') + ') space';
|
||||||
return this._addRule(ruleName, rule);
|
return this._addRule(ruleName, rule);
|
||||||
} else if ((schemaType === undefined || schemaType === 'object') &&
|
} else if ((schemaType === undefined || schemaType === 'object') &&
|
||||||
('properties' in schema ||
|
('properties' in schema ||
|
||||||
@ -599,7 +657,7 @@ export class SchemaConverter {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
return this._addRule(ruleName, this._buildObjectRule(properties, required, name, /* additionalProperties= */ false));
|
return this._addRule(ruleName, this._buildObjectRule(properties, required, name, null));
|
||||||
} else if ((schemaType === undefined || schemaType === 'array') && ('items' in schema || 'prefixItems' in schema)) {
|
} else if ((schemaType === undefined || schemaType === 'array') && ('items' in schema || 'prefixItems' in schema)) {
|
||||||
const items = schema.items ?? schema.prefixItems;
|
const items = schema.items ?? schema.prefixItems;
|
||||||
if (Array.isArray(items)) {
|
if (Array.isArray(items)) {
|
||||||
@ -693,12 +751,19 @@ export class SchemaConverter {
|
|||||||
const requiredProps = sortedProps.filter(k => required.has(k));
|
const requiredProps = sortedProps.filter(k => required.has(k));
|
||||||
const optionalProps = sortedProps.filter(k => !required.has(k));
|
const optionalProps = sortedProps.filter(k => !required.has(k));
|
||||||
|
|
||||||
if (typeof additionalProperties === 'object' || additionalProperties === true) {
|
if (additionalProperties !== false) {
|
||||||
const subName = `${name ?? ''}${name ? '-' : ''}additional`;
|
const subName = `${name ?? ''}${name ? '-' : ''}additional`;
|
||||||
const valueRule = this.visit(additionalProperties === true ? {} : additionalProperties, `${subName}-value`);
|
const valueRule =
|
||||||
|
additionalProperties != null && typeof additionalProperties === 'object' ? this.visit(additionalProperties, `${subName}-value`)
|
||||||
|
: this._addPrimitive('value', PRIMITIVE_RULES['value']);
|
||||||
|
|
||||||
|
const key_rule =
|
||||||
|
sortedProps.length === 0 ? this._addPrimitive('string', PRIMITIVE_RULES['string'])
|
||||||
|
: this._addRule(`${subName}-k`, this._notStrings(sortedProps));
|
||||||
|
|
||||||
propKvRuleNames['*'] = this._addRule(
|
propKvRuleNames['*'] = this._addRule(
|
||||||
`${subName}-kv`,
|
`${subName}-kv`,
|
||||||
`${this._addPrimitive('string', PRIMITIVE_RULES['string'])} ":" space ${valueRule}`);
|
`${key_rule} ":" space ${valueRule}`);
|
||||||
optionalProps.push('*');
|
optionalProps.push('*');
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -715,15 +780,11 @@ export class SchemaConverter {
|
|||||||
const [k, ...rest] = ks;
|
const [k, ...rest] = ks;
|
||||||
const kvRuleName = propKvRuleNames[k];
|
const kvRuleName = propKvRuleNames[k];
|
||||||
let res;
|
let res;
|
||||||
if (k === '*') {
|
const commaRef = `( "," space ${kvRuleName} )`;
|
||||||
res = this._addRule(
|
if (firstIsOptional) {
|
||||||
`${name ?? ''}${name ? '-' : ''}additional-kvs`,
|
res = commaRef + (k === '*' ? '*' : '?');
|
||||||
`${kvRuleName} ( "," space ` + kvRuleName + ` )*`
|
|
||||||
)
|
|
||||||
} else if (firstIsOptional) {
|
|
||||||
res = `( "," space ${kvRuleName} )?`;
|
|
||||||
} else {
|
} else {
|
||||||
res = kvRuleName;
|
res = kvRuleName + (k === '*' ? ' ' + commaRef + '*' : '');
|
||||||
}
|
}
|
||||||
if (rest.length > 0) {
|
if (rest.length > 0) {
|
||||||
res += ' ' + this._addRule(
|
res += ' ' + this._addRule(
|
||||||
|
@ -82,7 +82,7 @@ Feature: llama.cpp server
|
|||||||
|
|
||||||
Examples: Prompts
|
Examples: Prompts
|
||||||
| response_format | n_predicted | re_content |
|
| response_format | n_predicted | re_content |
|
||||||
| {"type": "json_object", "schema": {"const": "42"}} | 5 | "42" |
|
| {"type": "json_object", "schema": {"const": "42"}} | 6 | "42" |
|
||||||
| {"type": "json_object", "schema": {"items": [{"type": "integer"}]}} | 10 | \[ -300 \] |
|
| {"type": "json_object", "schema": {"items": [{"type": "integer"}]}} | 10 | \[ -300 \] |
|
||||||
| {"type": "json_object"} | 10 | \{ " Jacky. |
|
| {"type": "json_object"} | 10 | \{ " Jacky. |
|
||||||
|
|
||||||
|
@ -23,7 +23,7 @@ struct mma_int_A_I16K4 {
|
|||||||
|
|
||||||
__device__ __forceinline__ void load(const int * __restrict__ xs0, const int & stride) {
|
__device__ __forceinline__ void load(const int * __restrict__ xs0, const int & stride) {
|
||||||
#if defined(INT8_MMA_AVAILABLE)
|
#if defined(INT8_MMA_AVAILABLE)
|
||||||
const int * xs = xs0 + (threadIdx.x%I)*stride + (threadIdx.x/I)*(K/2);
|
const int * xs = xs0 + (threadIdx.x%I)*stride;
|
||||||
asm("ldmatrix.sync.aligned.m8n8.x2.b16 {%0, %1}, [%2];"
|
asm("ldmatrix.sync.aligned.m8n8.x2.b16 {%0, %1}, [%2];"
|
||||||
: "+r"(x[0]), "+r"(x[1])
|
: "+r"(x[0]), "+r"(x[1])
|
||||||
: "l"(xs));
|
: "l"(xs));
|
||||||
|
367
llama.cpp
367
llama.cpp
@ -2398,13 +2398,21 @@ struct llama_control_vector {
|
|||||||
int32_t layer_start = -1;
|
int32_t layer_start = -1;
|
||||||
int32_t layer_end = -1;
|
int32_t layer_end = -1;
|
||||||
|
|
||||||
ggml_tensor * tensor_for(int il) const {
|
struct ggml_tensor * tensor_for(int il) const {
|
||||||
if (il < 0 || il < layer_start || il > layer_end || (size_t) il >= tensors.size()) {
|
if (il < 0 || il < layer_start || il > layer_end || (size_t) il >= tensors.size()) {
|
||||||
return nullptr;
|
return nullptr;
|
||||||
}
|
}
|
||||||
return tensors[il];
|
return tensors[il];
|
||||||
}
|
}
|
||||||
|
|
||||||
|
struct ggml_tensor * apply_to(struct ggml_context * ctx, struct ggml_tensor * cur, int il) const {
|
||||||
|
ggml_tensor * layer_dir = tensor_for(il);
|
||||||
|
if (layer_dir != nullptr) {
|
||||||
|
cur = ggml_add(ctx, cur, layer_dir);
|
||||||
|
}
|
||||||
|
return cur;
|
||||||
|
}
|
||||||
|
|
||||||
~llama_control_vector() {
|
~llama_control_vector() {
|
||||||
for (struct ggml_context * ctx : ctxs) {
|
for (struct ggml_context * ctx : ctxs) {
|
||||||
ggml_free(ctx);
|
ggml_free(ctx);
|
||||||
@ -7339,10 +7347,13 @@ static struct ggml_tensor * llm_build_ffn(
|
|||||||
struct ggml_tensor * cur,
|
struct ggml_tensor * cur,
|
||||||
struct ggml_tensor * up,
|
struct ggml_tensor * up,
|
||||||
struct ggml_tensor * up_b,
|
struct ggml_tensor * up_b,
|
||||||
|
struct ggml_tensor * up_s,
|
||||||
struct ggml_tensor * gate,
|
struct ggml_tensor * gate,
|
||||||
struct ggml_tensor * gate_b,
|
struct ggml_tensor * gate_b,
|
||||||
|
struct ggml_tensor * gate_s,
|
||||||
struct ggml_tensor * down,
|
struct ggml_tensor * down,
|
||||||
struct ggml_tensor * down_b,
|
struct ggml_tensor * down_b,
|
||||||
|
struct ggml_tensor * down_s,
|
||||||
struct ggml_tensor * act_scales,
|
struct ggml_tensor * act_scales,
|
||||||
llm_ffn_op_type type_op,
|
llm_ffn_op_type type_op,
|
||||||
llm_ffn_gate_type type_gate,
|
llm_ffn_gate_type type_gate,
|
||||||
@ -7356,6 +7367,11 @@ static struct ggml_tensor * llm_build_ffn(
|
|||||||
cb(tmp, "ffn_up_b", il);
|
cb(tmp, "ffn_up_b", il);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if (up_s) {
|
||||||
|
tmp = ggml_mul(ctx, tmp, up_s);
|
||||||
|
cb(tmp, "ffn_up_s", il);
|
||||||
|
}
|
||||||
|
|
||||||
if (gate) {
|
if (gate) {
|
||||||
switch (type_gate) {
|
switch (type_gate) {
|
||||||
case LLM_FFN_SEQ:
|
case LLM_FFN_SEQ:
|
||||||
@ -7374,6 +7390,12 @@ static struct ggml_tensor * llm_build_ffn(
|
|||||||
cur = ggml_add(ctx, cur, gate_b);
|
cur = ggml_add(ctx, cur, gate_b);
|
||||||
cb(cur, "ffn_gate_b", il);
|
cb(cur, "ffn_gate_b", il);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if (gate_s) {
|
||||||
|
cur = ggml_mul(ctx, cur, gate_s);
|
||||||
|
cb(cur, "ffn_gate_s", il);
|
||||||
|
}
|
||||||
|
|
||||||
} else {
|
} else {
|
||||||
cur = tmp;
|
cur = tmp;
|
||||||
}
|
}
|
||||||
@ -7413,7 +7435,10 @@ static struct ggml_tensor * llm_build_ffn(
|
|||||||
cb(cur, "ffn_gate_par", il);
|
cb(cur, "ffn_gate_par", il);
|
||||||
}
|
}
|
||||||
|
|
||||||
cur = ggml_mul_mat(ctx, down, cur);
|
if (down) {
|
||||||
|
cur = ggml_mul_mat(ctx, down, cur);
|
||||||
|
}
|
||||||
|
|
||||||
if (down_b) {
|
if (down_b) {
|
||||||
cb(cur, "ffn_down", il);
|
cb(cur, "ffn_down", il);
|
||||||
}
|
}
|
||||||
@ -7422,6 +7447,11 @@ static struct ggml_tensor * llm_build_ffn(
|
|||||||
cur = ggml_add(ctx, cur, down_b);
|
cur = ggml_add(ctx, cur, down_b);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if (down_s) {
|
||||||
|
cur = ggml_mul(ctx, cur, down_s);
|
||||||
|
cb(cur, "ffn_down_s", il);
|
||||||
|
}
|
||||||
|
|
||||||
return cur;
|
return cur;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -8184,9 +8214,9 @@ struct llm_build_context {
|
|||||||
cb(cur, "ffn_norm", il);
|
cb(cur, "ffn_norm", il);
|
||||||
|
|
||||||
cur = llm_build_ffn(ctx0, cur,
|
cur = llm_build_ffn(ctx0, cur,
|
||||||
model.layers[il].ffn_up, model.layers[il].ffn_up_b,
|
model.layers[il].ffn_up, model.layers[il].ffn_up_b, NULL,
|
||||||
model.layers[il].ffn_gate, model.layers[il].ffn_gate_b,
|
model.layers[il].ffn_gate, model.layers[il].ffn_gate_b, NULL,
|
||||||
model.layers[il].ffn_down, model.layers[il].ffn_down_b,
|
model.layers[il].ffn_down, model.layers[il].ffn_down_b, NULL,
|
||||||
NULL,
|
NULL,
|
||||||
LLM_FFN_SILU, LLM_FFN_PAR, cb, il);
|
LLM_FFN_SILU, LLM_FFN_PAR, cb, il);
|
||||||
cb(cur, "ffn_out", il);
|
cb(cur, "ffn_out", il);
|
||||||
@ -8212,10 +8242,7 @@ struct llm_build_context {
|
|||||||
cur = ggml_add(ctx0, cur, ffn_inp);
|
cur = ggml_add(ctx0, cur, ffn_inp);
|
||||||
cb(cur, "ffn_out", il);
|
cb(cur, "ffn_out", il);
|
||||||
|
|
||||||
ggml_tensor * layer_dir = lctx.cvec.tensor_for(il);
|
cur = lctx.cvec.apply_to(ctx0, cur, il);
|
||||||
if (layer_dir != nullptr) {
|
|
||||||
cur = ggml_add(ctx0, cur, layer_dir);
|
|
||||||
}
|
|
||||||
cb(cur, "l_out", il);
|
cb(cur, "l_out", il);
|
||||||
|
|
||||||
// input for next layer
|
// input for next layer
|
||||||
@ -8321,15 +8348,16 @@ struct llm_build_context {
|
|||||||
cb(cur, "ffn_norm", il);
|
cb(cur, "ffn_norm", il);
|
||||||
|
|
||||||
cur = llm_build_ffn(ctx0, cur,
|
cur = llm_build_ffn(ctx0, cur,
|
||||||
model.layers[il].ffn_up, NULL,
|
model.layers[il].ffn_up, NULL, NULL,
|
||||||
model.layers[il].ffn_gate, NULL,
|
model.layers[il].ffn_gate, NULL, NULL,
|
||||||
model.layers[il].ffn_down, NULL,
|
model.layers[il].ffn_down, NULL, NULL,
|
||||||
NULL,
|
NULL,
|
||||||
LLM_FFN_SILU, LLM_FFN_PAR, cb, il);
|
LLM_FFN_SILU, LLM_FFN_PAR, cb, il);
|
||||||
cb(cur, "ffn_out", il);
|
cb(cur, "ffn_out", il);
|
||||||
}
|
}
|
||||||
|
|
||||||
cur = ggml_add(ctx0, cur, ffn_inp);
|
cur = ggml_add(ctx0, cur, ffn_inp);
|
||||||
|
cur = lctx.cvec.apply_to(ctx0, cur, il);
|
||||||
cb(cur, "l_out", il);
|
cb(cur, "l_out", il);
|
||||||
|
|
||||||
// input for next layer
|
// input for next layer
|
||||||
@ -8425,15 +8453,16 @@ struct llm_build_context {
|
|||||||
cb(cur, "ffn_norm", il);
|
cb(cur, "ffn_norm", il);
|
||||||
|
|
||||||
cur = llm_build_ffn(ctx0, cur,
|
cur = llm_build_ffn(ctx0, cur,
|
||||||
model.layers[il].ffn_up, NULL,
|
model.layers[il].ffn_up, NULL, NULL,
|
||||||
model.layers[il].ffn_gate, NULL,
|
model.layers[il].ffn_gate, NULL, NULL,
|
||||||
model.layers[il].ffn_down, NULL,
|
model.layers[il].ffn_down, NULL, NULL,
|
||||||
NULL,
|
NULL,
|
||||||
LLM_FFN_SILU, LLM_FFN_PAR, cb, il);
|
LLM_FFN_SILU, LLM_FFN_PAR, cb, il);
|
||||||
cb(cur, "ffn_out", il);
|
cb(cur, "ffn_out", il);
|
||||||
}
|
}
|
||||||
|
|
||||||
cur = ggml_add(ctx0, cur, ffn_inp);
|
cur = ggml_add(ctx0, cur, ffn_inp);
|
||||||
|
cur = lctx.cvec.apply_to(ctx0, cur, il);
|
||||||
cb(cur, "l_out", il);
|
cb(cur, "l_out", il);
|
||||||
|
|
||||||
// input for next layer
|
// input for next layer
|
||||||
@ -8540,18 +8569,17 @@ struct llm_build_context {
|
|||||||
// feed forward
|
// feed forward
|
||||||
{
|
{
|
||||||
cur = llm_build_ffn(ctx0, attn_norm, // !! use the attn norm, not the result
|
cur = llm_build_ffn(ctx0, attn_norm, // !! use the attn norm, not the result
|
||||||
model.layers[il].ffn_up, NULL,
|
model.layers[il].ffn_up, NULL, NULL,
|
||||||
NULL, NULL,
|
NULL, NULL, NULL,
|
||||||
model.layers[il].ffn_down, NULL,
|
model.layers[il].ffn_down, NULL, NULL,
|
||||||
NULL,
|
NULL,
|
||||||
LLM_FFN_GELU, LLM_FFN_SEQ, cb, il);
|
LLM_FFN_GELU, LLM_FFN_SEQ, cb, il);
|
||||||
cb(cur, "ffn_out", il);
|
cb(cur, "ffn_out", il);
|
||||||
}
|
}
|
||||||
|
|
||||||
cur = ggml_add(ctx0, cur, ffn_inp);
|
cur = ggml_add(ctx0, cur, ffn_inp);
|
||||||
cb(cur, "l_out", il);
|
|
||||||
|
|
||||||
cur = ggml_add(ctx0, cur, inpL);
|
cur = ggml_add(ctx0, cur, inpL);
|
||||||
|
cur = lctx.cvec.apply_to(ctx0, cur, il);
|
||||||
cb(cur, "l_out", il);
|
cb(cur, "l_out", il);
|
||||||
|
|
||||||
// input for next layer
|
// input for next layer
|
||||||
@ -8703,10 +8731,7 @@ struct llm_build_context {
|
|||||||
cur = ggml_add(ctx0, cur, ffn_inp);
|
cur = ggml_add(ctx0, cur, ffn_inp);
|
||||||
cb(cur, "ffn_out", il);
|
cb(cur, "ffn_out", il);
|
||||||
|
|
||||||
ggml_tensor * layer_dir = lctx.cvec.tensor_for(il);
|
cur = lctx.cvec.apply_to(ctx0, cur, il);
|
||||||
if (layer_dir != nullptr) {
|
|
||||||
cur = ggml_add(ctx0, cur, layer_dir);
|
|
||||||
}
|
|
||||||
cb(cur, "l_out", il);
|
cb(cur, "l_out", il);
|
||||||
|
|
||||||
// input for next layer
|
// input for next layer
|
||||||
@ -8837,10 +8862,7 @@ struct llm_build_context {
|
|||||||
cur = ggml_add(ctx0, cur, ffn_inp);
|
cur = ggml_add(ctx0, cur, ffn_inp);
|
||||||
cb(cur, "ffn_out", il);
|
cb(cur, "ffn_out", il);
|
||||||
|
|
||||||
ggml_tensor * layer_dir = lctx.cvec.tensor_for(il);
|
cur = lctx.cvec.apply_to(ctx0, cur, il);
|
||||||
if (layer_dir != nullptr) {
|
|
||||||
cur = ggml_add(ctx0, cur, layer_dir);
|
|
||||||
}
|
|
||||||
cb(cur, "l_out", il);
|
cb(cur, "l_out", il);
|
||||||
|
|
||||||
// input for next layer
|
// input for next layer
|
||||||
@ -8938,16 +8960,20 @@ struct llm_build_context {
|
|||||||
cb(cur, "ffn_norm", il);
|
cb(cur, "ffn_norm", il);
|
||||||
|
|
||||||
cur = llm_build_ffn(ctx0, cur,
|
cur = llm_build_ffn(ctx0, cur,
|
||||||
model.layers[il].ffn_up, model.layers[il].ffn_up_b,
|
model.layers[il].ffn_up, model.layers[il].ffn_up_b, NULL,
|
||||||
NULL, NULL,
|
NULL, NULL, NULL,
|
||||||
model.layers[il].ffn_down, model.layers[il].ffn_down_b,
|
model.layers[il].ffn_down, model.layers[il].ffn_down_b, NULL,
|
||||||
NULL,
|
NULL,
|
||||||
LLM_FFN_GELU, LLM_FFN_SEQ, cb, il);
|
LLM_FFN_GELU, LLM_FFN_SEQ, cb, il);
|
||||||
cb(cur, "ffn_out", il);
|
cb(cur, "ffn_out", il);
|
||||||
}
|
}
|
||||||
|
|
||||||
inpL = ggml_add(ctx0, cur, ffn_inp);
|
cur = ggml_add(ctx0, cur, ffn_inp);
|
||||||
cb(inpL, "l_out", il);
|
cur = lctx.cvec.apply_to(ctx0, cur, il);
|
||||||
|
cb(cur, "l_out", il);
|
||||||
|
|
||||||
|
// input for next layer
|
||||||
|
inpL = cur;
|
||||||
}
|
}
|
||||||
|
|
||||||
cur = llm_build_norm(ctx0, inpL, hparams,
|
cur = llm_build_norm(ctx0, inpL, hparams,
|
||||||
@ -9026,15 +9052,16 @@ struct llm_build_context {
|
|||||||
cb(cur, "ffn_norm", il);
|
cb(cur, "ffn_norm", il);
|
||||||
|
|
||||||
cur = llm_build_ffn(ctx0, cur,
|
cur = llm_build_ffn(ctx0, cur,
|
||||||
model.layers[il].ffn_up, NULL,
|
model.layers[il].ffn_up, NULL, NULL,
|
||||||
model.layers[il].ffn_gate, NULL,
|
model.layers[il].ffn_gate, NULL, NULL,
|
||||||
model.layers[il].ffn_down, NULL,
|
model.layers[il].ffn_down, NULL, NULL,
|
||||||
NULL,
|
NULL,
|
||||||
LLM_FFN_SILU, LLM_FFN_PAR, cb, il);
|
LLM_FFN_SILU, LLM_FFN_PAR, cb, il);
|
||||||
cb(cur, "ffn_out", il);
|
cb(cur, "ffn_out", il);
|
||||||
}
|
}
|
||||||
|
|
||||||
cur = ggml_add(ctx0, cur, ffn_inp);
|
cur = ggml_add(ctx0, cur, ffn_inp);
|
||||||
|
cur = lctx.cvec.apply_to(ctx0, cur, il);
|
||||||
cb(cur, "l_out", il);
|
cb(cur, "l_out", il);
|
||||||
|
|
||||||
// input for next layer
|
// input for next layer
|
||||||
@ -9210,23 +9237,23 @@ struct llm_build_context {
|
|||||||
// feed-forward network
|
// feed-forward network
|
||||||
if (model.arch == LLM_ARCH_BERT) {
|
if (model.arch == LLM_ARCH_BERT) {
|
||||||
cur = llm_build_ffn(ctx0, cur,
|
cur = llm_build_ffn(ctx0, cur,
|
||||||
model.layers[il].ffn_up, model.layers[il].ffn_up_b,
|
model.layers[il].ffn_up, model.layers[il].ffn_up_b, NULL,
|
||||||
NULL, NULL,
|
NULL, NULL, NULL,
|
||||||
model.layers[il].ffn_down, model.layers[il].ffn_down_b,
|
model.layers[il].ffn_down, model.layers[il].ffn_down_b, NULL,
|
||||||
NULL,
|
NULL,
|
||||||
LLM_FFN_GELU, LLM_FFN_SEQ, cb, il);
|
LLM_FFN_GELU, LLM_FFN_SEQ, cb, il);
|
||||||
} else if (model.arch == LLM_ARCH_JINA_BERT_V2) {
|
} else if (model.arch == LLM_ARCH_JINA_BERT_V2) {
|
||||||
cur = llm_build_ffn(ctx0, cur,
|
cur = llm_build_ffn(ctx0, cur,
|
||||||
model.layers[il].ffn_up, NULL,
|
model.layers[il].ffn_up, NULL, NULL,
|
||||||
model.layers[il].ffn_gate, NULL,
|
model.layers[il].ffn_gate, NULL, NULL,
|
||||||
model.layers[il].ffn_down, model.layers[il].ffn_down_b,
|
model.layers[il].ffn_down, model.layers[il].ffn_down_b, NULL,
|
||||||
NULL,
|
NULL,
|
||||||
LLM_FFN_GELU, LLM_FFN_PAR, cb, il);
|
LLM_FFN_GELU, LLM_FFN_PAR, cb, il);
|
||||||
} else {
|
} else {
|
||||||
cur = llm_build_ffn(ctx0, cur,
|
cur = llm_build_ffn(ctx0, cur,
|
||||||
model.layers[il].ffn_up, NULL,
|
model.layers[il].ffn_up, NULL, NULL,
|
||||||
model.layers[il].ffn_gate, NULL,
|
model.layers[il].ffn_gate, NULL, NULL,
|
||||||
model.layers[il].ffn_down, NULL,
|
model.layers[il].ffn_down, NULL, NULL,
|
||||||
NULL,
|
NULL,
|
||||||
LLM_FFN_SILU, LLM_FFN_PAR, cb, il);
|
LLM_FFN_SILU, LLM_FFN_PAR, cb, il);
|
||||||
}
|
}
|
||||||
@ -9322,16 +9349,20 @@ struct llm_build_context {
|
|||||||
cb(cur, "ffn_norm", il);
|
cb(cur, "ffn_norm", il);
|
||||||
|
|
||||||
cur = llm_build_ffn(ctx0, cur,
|
cur = llm_build_ffn(ctx0, cur,
|
||||||
model.layers[il].ffn_up, model.layers[il].ffn_up_b,
|
model.layers[il].ffn_up, model.layers[il].ffn_up_b, NULL,
|
||||||
NULL, NULL,
|
NULL, NULL, NULL,
|
||||||
model.layers[il].ffn_down, model.layers[il].ffn_down_b,
|
model.layers[il].ffn_down, model.layers[il].ffn_down_b, NULL,
|
||||||
NULL,
|
NULL,
|
||||||
LLM_FFN_GELU, LLM_FFN_SEQ, cb, il);
|
LLM_FFN_GELU, LLM_FFN_SEQ, cb, il);
|
||||||
cb(cur, "ffn_out", il);
|
cb(cur, "ffn_out", il);
|
||||||
}
|
}
|
||||||
|
|
||||||
inpL = ggml_add(ctx0, cur, ffn_inp);
|
cur = ggml_add(ctx0, cur, ffn_inp);
|
||||||
cb(inpL, "l_out", il);
|
cur = lctx.cvec.apply_to(ctx0, cur, il);
|
||||||
|
cb(cur, "l_out", il);
|
||||||
|
|
||||||
|
// input for next layer
|
||||||
|
inpL = cur;
|
||||||
}
|
}
|
||||||
|
|
||||||
cur = llm_build_norm(ctx0, inpL, hparams,
|
cur = llm_build_norm(ctx0, inpL, hparams,
|
||||||
@ -9456,15 +9487,16 @@ struct llm_build_context {
|
|||||||
LLM_NORM, cb, il);
|
LLM_NORM, cb, il);
|
||||||
cb(cur, "ffn_norm", il);
|
cb(cur, "ffn_norm", il);
|
||||||
cur = llm_build_ffn(ctx0, cur,
|
cur = llm_build_ffn(ctx0, cur,
|
||||||
model.layers[il].ffn_up, model.layers[il].ffn_up_b,
|
model.layers[il].ffn_up, model.layers[il].ffn_up_b, NULL,
|
||||||
NULL, NULL,
|
NULL, NULL, NULL,
|
||||||
model.layers[il].ffn_down, model.layers[il].ffn_down_b,
|
model.layers[il].ffn_down, model.layers[il].ffn_down_b, NULL,
|
||||||
model.layers[il].ffn_act,
|
model.layers[il].ffn_act,
|
||||||
LLM_FFN_GELU, LLM_FFN_SEQ, cb, il);
|
LLM_FFN_GELU, LLM_FFN_SEQ, cb, il);
|
||||||
cb(cur, "ffn_out", il);
|
cb(cur, "ffn_out", il);
|
||||||
}
|
}
|
||||||
|
|
||||||
cur = ggml_add(ctx0, cur, ffn_inp);
|
cur = ggml_add(ctx0, cur, ffn_inp);
|
||||||
|
cur = lctx.cvec.apply_to(ctx0, cur, il);
|
||||||
cb(cur, "l_out", il);
|
cb(cur, "l_out", il);
|
||||||
|
|
||||||
// input for next layer
|
// input for next layer
|
||||||
@ -9604,15 +9636,16 @@ struct llm_build_context {
|
|||||||
cur = inpSA;
|
cur = inpSA;
|
||||||
}
|
}
|
||||||
cur = llm_build_ffn(ctx0, cur,
|
cur = llm_build_ffn(ctx0, cur,
|
||||||
model.layers[il].ffn_up, NULL,
|
model.layers[il].ffn_up, NULL, NULL,
|
||||||
model.layers[il].ffn_gate, NULL,
|
model.layers[il].ffn_gate, NULL, NULL,
|
||||||
model.layers[il].ffn_down, NULL,
|
model.layers[il].ffn_down, NULL, NULL,
|
||||||
NULL,
|
NULL,
|
||||||
LLM_FFN_SILU, LLM_FFN_PAR, cb, il);
|
LLM_FFN_SILU, LLM_FFN_PAR, cb, il);
|
||||||
cb(cur, "ffn_out", il);
|
cb(cur, "ffn_out", il);
|
||||||
}
|
}
|
||||||
|
|
||||||
cur = ggml_add(ctx0, cur, ffn_inp);
|
cur = ggml_add(ctx0, cur, ffn_inp);
|
||||||
|
cur = lctx.cvec.apply_to(ctx0, cur, il);
|
||||||
cb(cur, "l_out", il);
|
cb(cur, "l_out", il);
|
||||||
|
|
||||||
// input for next layer
|
// input for next layer
|
||||||
@ -9716,15 +9749,16 @@ struct llm_build_context {
|
|||||||
cb(cur, "ffn_norm", il);
|
cb(cur, "ffn_norm", il);
|
||||||
|
|
||||||
cur = llm_build_ffn(ctx0, cur,
|
cur = llm_build_ffn(ctx0, cur,
|
||||||
model.layers[il].ffn_up, NULL,
|
model.layers[il].ffn_up, NULL, NULL,
|
||||||
model.layers[il].ffn_gate, NULL,
|
model.layers[il].ffn_gate, NULL, NULL,
|
||||||
model.layers[il].ffn_down, NULL,
|
model.layers[il].ffn_down, NULL, NULL,
|
||||||
NULL,
|
NULL,
|
||||||
LLM_FFN_SILU, LLM_FFN_PAR, cb, il);
|
LLM_FFN_SILU, LLM_FFN_PAR, cb, il);
|
||||||
cb(cur, "ffn_out", il);
|
cb(cur, "ffn_out", il);
|
||||||
}
|
}
|
||||||
|
|
||||||
cur = ggml_add(ctx0, cur, ffn_inp);
|
cur = ggml_add(ctx0, cur, ffn_inp);
|
||||||
|
cur = lctx.cvec.apply_to(ctx0, cur, il);
|
||||||
cb(cur, "l_out", il);
|
cb(cur, "l_out", il);
|
||||||
|
|
||||||
// input for next layer
|
// input for next layer
|
||||||
@ -9828,14 +9862,15 @@ struct llm_build_context {
|
|||||||
cb(cur, "ffn_norm", il);
|
cb(cur, "ffn_norm", il);
|
||||||
|
|
||||||
cur = llm_build_ffn(ctx0, cur,
|
cur = llm_build_ffn(ctx0, cur,
|
||||||
model.layers[il].ffn_up, NULL,
|
model.layers[il].ffn_up, NULL, NULL,
|
||||||
model.layers[il].ffn_gate, NULL,
|
model.layers[il].ffn_gate, NULL, NULL,
|
||||||
model.layers[il].ffn_down, NULL,
|
model.layers[il].ffn_down, NULL, NULL,
|
||||||
NULL,
|
NULL,
|
||||||
LLM_FFN_SILU, LLM_FFN_PAR, cb, il);
|
LLM_FFN_SILU, LLM_FFN_PAR, cb, il);
|
||||||
cb(cur, "ffn_out", il);
|
cb(cur, "ffn_out", il);
|
||||||
|
|
||||||
cur = ggml_add(ctx0, cur, ffn_inp);
|
cur = ggml_add(ctx0, cur, ffn_inp);
|
||||||
|
cur = lctx.cvec.apply_to(ctx0, cur, il);
|
||||||
cb(cur, "l_out", il);
|
cb(cur, "l_out", il);
|
||||||
|
|
||||||
// input for next layer
|
// input for next layer
|
||||||
@ -9964,9 +9999,9 @@ struct llm_build_context {
|
|||||||
cb(cur_gate, "ffn_shexp_gate", il);
|
cb(cur_gate, "ffn_shexp_gate", il);
|
||||||
|
|
||||||
ggml_tensor * cur_ffn = llm_build_ffn(ctx0, cur,
|
ggml_tensor * cur_ffn = llm_build_ffn(ctx0, cur,
|
||||||
model.layers[il].ffn_up_shexp, NULL,
|
model.layers[il].ffn_up_shexp, NULL, NULL,
|
||||||
model.layers[il].ffn_gate_shexp, NULL,
|
model.layers[il].ffn_gate_shexp, NULL, NULL,
|
||||||
model.layers[il].ffn_down_shexp, NULL,
|
model.layers[il].ffn_down_shexp, NULL, NULL,
|
||||||
NULL,
|
NULL,
|
||||||
LLM_FFN_SILU, LLM_FFN_PAR, cb, il);
|
LLM_FFN_SILU, LLM_FFN_PAR, cb, il);
|
||||||
cb(cur_ffn, "ffn_shexp", il);
|
cb(cur_ffn, "ffn_shexp", il);
|
||||||
@ -9981,6 +10016,7 @@ struct llm_build_context {
|
|||||||
}
|
}
|
||||||
|
|
||||||
cur = ggml_add(ctx0, cur, ffn_inp);
|
cur = ggml_add(ctx0, cur, ffn_inp);
|
||||||
|
cur = lctx.cvec.apply_to(ctx0, cur, il);
|
||||||
cb(cur, "l_out", il);
|
cb(cur, "l_out", il);
|
||||||
|
|
||||||
// input for next layer
|
// input for next layer
|
||||||
@ -10092,20 +10128,20 @@ struct llm_build_context {
|
|||||||
// FF
|
// FF
|
||||||
{
|
{
|
||||||
ffn_output = llm_build_ffn(ctx0, attn_norm_output,
|
ffn_output = llm_build_ffn(ctx0, attn_norm_output,
|
||||||
model.layers[il].ffn_up, model.layers[il].ffn_up_b,
|
model.layers[il].ffn_up, model.layers[il].ffn_up_b, NULL,
|
||||||
NULL, NULL,
|
NULL, NULL, NULL,
|
||||||
model.layers[il].ffn_down, model.layers[il].ffn_down_b,
|
model.layers[il].ffn_down, model.layers[il].ffn_down_b, NULL,
|
||||||
NULL,
|
NULL,
|
||||||
LLM_FFN_GELU, LLM_FFN_SEQ, cb, il);
|
LLM_FFN_GELU, LLM_FFN_SEQ, cb, il);
|
||||||
cb(ffn_output, "ffn_out", il);
|
cb(ffn_output, "ffn_out", il);
|
||||||
}
|
}
|
||||||
|
|
||||||
cur = ggml_add(ctx0, cur, ffn_output);
|
cur = ggml_add(ctx0, cur, ffn_output);
|
||||||
cb(cur, "l_out", il);
|
|
||||||
|
|
||||||
cur = ggml_add(ctx0, cur, inpL);
|
cur = ggml_add(ctx0, cur, inpL);
|
||||||
|
cur = lctx.cvec.apply_to(ctx0, cur, il);
|
||||||
cb(cur, "l_out", il);
|
cb(cur, "l_out", il);
|
||||||
|
|
||||||
|
// input for next layer
|
||||||
inpL = cur;
|
inpL = cur;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -10237,8 +10273,10 @@ struct llm_build_context {
|
|||||||
}
|
}
|
||||||
|
|
||||||
cur = ggml_add(ctx0, residual, cur);
|
cur = ggml_add(ctx0, residual, cur);
|
||||||
|
cur = lctx.cvec.apply_to(ctx0, cur, il);
|
||||||
cb(cur, "l_out", il);
|
cb(cur, "l_out", il);
|
||||||
|
|
||||||
|
// input for next layer
|
||||||
inpL = cur;
|
inpL = cur;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -10328,18 +10366,17 @@ struct llm_build_context {
|
|||||||
// feed-forward network
|
// feed-forward network
|
||||||
{
|
{
|
||||||
cur = llm_build_ffn(ctx0, cur,
|
cur = llm_build_ffn(ctx0, cur,
|
||||||
model.layers[il].ffn_up, NULL,
|
model.layers[il].ffn_up, NULL, NULL,
|
||||||
model.layers[il].ffn_gate, NULL,
|
model.layers[il].ffn_gate, NULL, NULL,
|
||||||
model.layers[il].ffn_down, NULL,
|
model.layers[il].ffn_down, NULL, NULL,
|
||||||
NULL,
|
NULL,
|
||||||
LLM_FFN_SILU, LLM_FFN_PAR, cb, il);
|
LLM_FFN_SILU, LLM_FFN_PAR, cb, il);
|
||||||
cb(cur, "ffn_out", il);
|
cb(cur, "ffn_out", il);
|
||||||
}
|
}
|
||||||
|
|
||||||
cur = ggml_add(ctx0, cur, sa_out);
|
cur = ggml_add(ctx0, cur, sa_out);
|
||||||
cb(cur, "l_out", il);
|
|
||||||
|
|
||||||
cur = ggml_add(ctx0, cur, inpL);
|
cur = ggml_add(ctx0, cur, inpL);
|
||||||
|
cur = lctx.cvec.apply_to(ctx0, cur, il);
|
||||||
cb(cur, "l_out", il);
|
cb(cur, "l_out", il);
|
||||||
|
|
||||||
// input for next layer
|
// input for next layer
|
||||||
@ -10437,16 +10474,20 @@ struct llm_build_context {
|
|||||||
cb(cur, "ffn_norm", il);
|
cb(cur, "ffn_norm", il);
|
||||||
|
|
||||||
cur = llm_build_ffn(ctx0, cur,
|
cur = llm_build_ffn(ctx0, cur,
|
||||||
model.layers[il].ffn_up, model.layers[il].ffn_up_b,
|
model.layers[il].ffn_up, model.layers[il].ffn_up_b, NULL,
|
||||||
NULL, NULL,
|
NULL, NULL, NULL,
|
||||||
model.layers[il].ffn_down, model.layers[il].ffn_down_b,
|
model.layers[il].ffn_down, model.layers[il].ffn_down_b, NULL,
|
||||||
NULL,
|
NULL,
|
||||||
LLM_FFN_GELU, LLM_FFN_SEQ, cb, il);
|
LLM_FFN_GELU, LLM_FFN_SEQ, cb, il);
|
||||||
cb(cur, "ffn_out", il);
|
cb(cur, "ffn_out", il);
|
||||||
}
|
}
|
||||||
|
|
||||||
inpL = ggml_add(ctx0, cur, ffn_inp);
|
cur = ggml_add(ctx0, cur, ffn_inp);
|
||||||
cb(inpL, "l_out", il);
|
cur = lctx.cvec.apply_to(ctx0, cur, il);
|
||||||
|
cb(cur, "l_out", il);
|
||||||
|
|
||||||
|
// input for next layer
|
||||||
|
inpL = cur;
|
||||||
}
|
}
|
||||||
|
|
||||||
cur = llm_build_norm(ctx0, inpL, hparams,
|
cur = llm_build_norm(ctx0, inpL, hparams,
|
||||||
@ -10544,16 +10585,20 @@ struct llm_build_context {
|
|||||||
cb(cur, "ffn_norm", il);
|
cb(cur, "ffn_norm", il);
|
||||||
|
|
||||||
cur = llm_build_ffn(ctx0, cur,
|
cur = llm_build_ffn(ctx0, cur,
|
||||||
model.layers[il].ffn_up, model.layers[il].ffn_up_b,
|
model.layers[il].ffn_up, model.layers[il].ffn_up_b, NULL,
|
||||||
NULL, NULL,
|
NULL, NULL, NULL,
|
||||||
model.layers[il].ffn_down, model.layers[il].ffn_down_b,
|
model.layers[il].ffn_down, model.layers[il].ffn_down_b, NULL,
|
||||||
NULL,
|
NULL,
|
||||||
LLM_FFN_GELU, LLM_FFN_SEQ, cb, il);
|
LLM_FFN_GELU, LLM_FFN_SEQ, cb, il);
|
||||||
cb(cur, "ffn_out", il);
|
cb(cur, "ffn_out", il);
|
||||||
}
|
}
|
||||||
|
|
||||||
inpL = ggml_add(ctx0, cur, ffn_inp);
|
cur = ggml_add(ctx0, cur, ffn_inp);
|
||||||
cb(inpL, "l_out", il);
|
cur = lctx.cvec.apply_to(ctx0, cur, il);
|
||||||
|
cb(cur, "l_out", il);
|
||||||
|
|
||||||
|
// input for next layer
|
||||||
|
inpL = cur;
|
||||||
}
|
}
|
||||||
|
|
||||||
cur = llm_build_norm(ctx0, inpL, hparams,
|
cur = llm_build_norm(ctx0, inpL, hparams,
|
||||||
@ -10657,14 +10702,15 @@ struct llm_build_context {
|
|||||||
cb(cur, "ffn_norm", il);
|
cb(cur, "ffn_norm", il);
|
||||||
|
|
||||||
cur = llm_build_ffn(ctx0, cur,
|
cur = llm_build_ffn(ctx0, cur,
|
||||||
model.layers[il].ffn_up, NULL,
|
model.layers[il].ffn_up, NULL, NULL,
|
||||||
model.layers[il].ffn_gate, NULL,
|
model.layers[il].ffn_gate, NULL, NULL,
|
||||||
model.layers[il].ffn_down, NULL,
|
model.layers[il].ffn_down, NULL, NULL,
|
||||||
NULL,
|
NULL,
|
||||||
LLM_FFN_SILU, LLM_FFN_PAR, cb, il);
|
LLM_FFN_SILU, LLM_FFN_PAR, cb, il);
|
||||||
cb(cur, "ffn_out", il);
|
cb(cur, "ffn_out", il);
|
||||||
|
|
||||||
cur = ggml_add(ctx0, cur, ffn_inp);
|
cur = ggml_add(ctx0, cur, ffn_inp);
|
||||||
|
cur = lctx.cvec.apply_to(ctx0, cur, il);
|
||||||
cb(cur, "l_out", il);
|
cb(cur, "l_out", il);
|
||||||
|
|
||||||
// input for next layer
|
// input for next layer
|
||||||
@ -10774,14 +10820,15 @@ struct llm_build_context {
|
|||||||
cb(cur, "ffn_norm", il);
|
cb(cur, "ffn_norm", il);
|
||||||
|
|
||||||
cur = llm_build_ffn(ctx0, cur,
|
cur = llm_build_ffn(ctx0, cur,
|
||||||
model.layers[il].ffn_up, NULL,
|
model.layers[il].ffn_up, NULL, NULL,
|
||||||
model.layers[il].ffn_gate, NULL,
|
model.layers[il].ffn_gate, NULL, NULL,
|
||||||
model.layers[il].ffn_down, NULL,
|
model.layers[il].ffn_down, NULL, NULL,
|
||||||
NULL,
|
NULL,
|
||||||
LLM_FFN_SILU, LLM_FFN_PAR, cb, il);
|
LLM_FFN_SILU, LLM_FFN_PAR, cb, il);
|
||||||
cb(cur, "ffn_out", il);
|
cb(cur, "ffn_out", il);
|
||||||
|
|
||||||
cur = ggml_add(ctx0, cur, ffn_inp);
|
cur = ggml_add(ctx0, cur, ffn_inp);
|
||||||
|
cur = lctx.cvec.apply_to(ctx0, cur, il);
|
||||||
cb(cur, "l_out", il);
|
cb(cur, "l_out", il);
|
||||||
|
|
||||||
// input for next layer
|
// input for next layer
|
||||||
@ -10910,9 +10957,9 @@ struct llm_build_context {
|
|||||||
cb(cur, "ffn_norm", il);
|
cb(cur, "ffn_norm", il);
|
||||||
|
|
||||||
cur = llm_build_ffn(ctx0, cur,
|
cur = llm_build_ffn(ctx0, cur,
|
||||||
model.layers[il].ffn_up, NULL,
|
model.layers[il].ffn_up, NULL, NULL,
|
||||||
model.layers[il].ffn_gate, NULL,
|
model.layers[il].ffn_gate, NULL, NULL,
|
||||||
model.layers[il].ffn_down, NULL,
|
model.layers[il].ffn_down, NULL, NULL,
|
||||||
NULL,
|
NULL,
|
||||||
LLM_FFN_SILU, LLM_FFN_PAR, cb, il);
|
LLM_FFN_SILU, LLM_FFN_PAR, cb, il);
|
||||||
cb(cur, "ffn_out", il);
|
cb(cur, "ffn_out", il);
|
||||||
@ -10923,6 +10970,7 @@ struct llm_build_context {
|
|||||||
cb(cur, "hidden_scaled_ffn", -1);
|
cb(cur, "hidden_scaled_ffn", -1);
|
||||||
|
|
||||||
cur = ggml_add(ctx0, cur, ffn_inp);
|
cur = ggml_add(ctx0, cur, ffn_inp);
|
||||||
|
cur = lctx.cvec.apply_to(ctx0, cur, il);
|
||||||
cb(cur, "l_out", il);
|
cb(cur, "l_out", il);
|
||||||
|
|
||||||
// input for next layer
|
// input for next layer
|
||||||
@ -11026,15 +11074,16 @@ struct llm_build_context {
|
|||||||
// feed-forward network
|
// feed-forward network
|
||||||
{
|
{
|
||||||
cur = llm_build_ffn(ctx0, cur,
|
cur = llm_build_ffn(ctx0, cur,
|
||||||
model.layers[il].ffn_up, NULL,
|
model.layers[il].ffn_up, NULL, NULL,
|
||||||
model.layers[il].ffn_gate, NULL,
|
model.layers[il].ffn_gate, NULL, NULL,
|
||||||
model.layers[il].ffn_down, NULL,
|
model.layers[il].ffn_down, NULL, NULL,
|
||||||
NULL,
|
NULL,
|
||||||
LLM_FFN_GELU, LLM_FFN_PAR, cb, il);
|
LLM_FFN_GELU, LLM_FFN_PAR, cb, il);
|
||||||
cb(cur, "ffn_out", il);
|
cb(cur, "ffn_out", il);
|
||||||
}
|
}
|
||||||
|
|
||||||
cur = ggml_add(ctx0, cur, sa_out);
|
cur = ggml_add(ctx0, cur, sa_out);
|
||||||
|
cur = lctx.cvec.apply_to(ctx0, cur, il);
|
||||||
cb(cur, "l_out", il);
|
cb(cur, "l_out", il);
|
||||||
|
|
||||||
// input for next layer
|
// input for next layer
|
||||||
@ -11145,13 +11194,15 @@ struct llm_build_context {
|
|||||||
cb(cur, "ffn_norm", il);
|
cb(cur, "ffn_norm", il);
|
||||||
|
|
||||||
cur = llm_build_ffn(ctx0, cur,
|
cur = llm_build_ffn(ctx0, cur,
|
||||||
model.layers[il].ffn_up, model.layers[il].ffn_up_b,
|
model.layers[il].ffn_up, model.layers[il].ffn_up_b, NULL,
|
||||||
NULL, NULL,
|
NULL, NULL, NULL,
|
||||||
model.layers[il].ffn_down, model.layers[il].ffn_down_b,
|
model.layers[il].ffn_down, model.layers[il].ffn_down_b, NULL,
|
||||||
NULL,
|
NULL,
|
||||||
LLM_FFN_GELU, LLM_FFN_SEQ, cb, il);
|
LLM_FFN_GELU, LLM_FFN_SEQ, cb, il);
|
||||||
cb(cur, "ffn_out", il);
|
cb(cur, "ffn_out", il);
|
||||||
|
|
||||||
cur = ggml_add(ctx0, cur, ffn_inp);
|
cur = ggml_add(ctx0, cur, ffn_inp);
|
||||||
|
cur = lctx.cvec.apply_to(ctx0, cur, il);
|
||||||
cb(cur, "l_out", il);
|
cb(cur, "l_out", il);
|
||||||
|
|
||||||
// input for next layer
|
// input for next layer
|
||||||
@ -11300,6 +11351,7 @@ struct llm_build_context {
|
|||||||
|
|
||||||
// residual
|
// residual
|
||||||
cur = ggml_add(ctx0, cur, inpL);
|
cur = ggml_add(ctx0, cur, inpL);
|
||||||
|
cur = lctx.cvec.apply_to(ctx0, cur, il);
|
||||||
cb(cur, "l_out", il);
|
cb(cur, "l_out", il);
|
||||||
|
|
||||||
// input for next layer
|
// input for next layer
|
||||||
@ -11430,9 +11482,9 @@ struct llm_build_context {
|
|||||||
// feed-forward network
|
// feed-forward network
|
||||||
{
|
{
|
||||||
cur = llm_build_ffn(ctx0, ffn_inp,
|
cur = llm_build_ffn(ctx0, ffn_inp,
|
||||||
model.layers[il].ffn_up, NULL,
|
model.layers[il].ffn_up, NULL, NULL,
|
||||||
model.layers[il].ffn_gate, NULL,
|
model.layers[il].ffn_gate, NULL, NULL,
|
||||||
model.layers[il].ffn_down, NULL,
|
model.layers[il].ffn_down, NULL, NULL,
|
||||||
NULL,
|
NULL,
|
||||||
LLM_FFN_SILU, LLM_FFN_PAR, cb, il);
|
LLM_FFN_SILU, LLM_FFN_PAR, cb, il);
|
||||||
cb(cur, "ffn_out", il);
|
cb(cur, "ffn_out", il);
|
||||||
@ -11441,6 +11493,7 @@ struct llm_build_context {
|
|||||||
// add together residual + FFN + self-attention
|
// add together residual + FFN + self-attention
|
||||||
cur = ggml_add(ctx0, cur, inpL);
|
cur = ggml_add(ctx0, cur, inpL);
|
||||||
cur = ggml_add(ctx0, cur, attn_out);
|
cur = ggml_add(ctx0, cur, attn_out);
|
||||||
|
cur = lctx.cvec.apply_to(ctx0, cur, il);
|
||||||
cb(cur, "l_out", il);
|
cb(cur, "l_out", il);
|
||||||
|
|
||||||
// input for next layer
|
// input for next layer
|
||||||
@ -11566,9 +11619,9 @@ struct llm_build_context {
|
|||||||
cb(cur, "ffn_norm", il);
|
cb(cur, "ffn_norm", il);
|
||||||
|
|
||||||
cur = llm_build_ffn(ctx0, cur,
|
cur = llm_build_ffn(ctx0, cur,
|
||||||
model.layers[il].ffn_up, NULL,
|
model.layers[il].ffn_up, NULL, NULL,
|
||||||
model.layers[il].ffn_gate, NULL,
|
model.layers[il].ffn_gate, NULL, NULL,
|
||||||
model.layers[il].ffn_down, NULL,
|
model.layers[il].ffn_down, NULL, NULL,
|
||||||
NULL,
|
NULL,
|
||||||
LLM_FFN_SILU, LLM_FFN_PAR, cb, il);
|
LLM_FFN_SILU, LLM_FFN_PAR, cb, il);
|
||||||
cb(cur, "ffn_out", il);
|
cb(cur, "ffn_out", il);
|
||||||
@ -11576,10 +11629,7 @@ struct llm_build_context {
|
|||||||
cur = ggml_add(ctx0, cur, ffn_inp);
|
cur = ggml_add(ctx0, cur, ffn_inp);
|
||||||
cb(cur, "ffn_out", il);
|
cb(cur, "ffn_out", il);
|
||||||
|
|
||||||
ggml_tensor * layer_dir = lctx.cvec.tensor_for(il);
|
cur = lctx.cvec.apply_to(ctx0, cur, il);
|
||||||
if (layer_dir != nullptr) {
|
|
||||||
cur = ggml_add(ctx0, cur, layer_dir);
|
|
||||||
}
|
|
||||||
cb(cur, "l_out", il);
|
cb(cur, "l_out", il);
|
||||||
|
|
||||||
// input for next layer
|
// input for next layer
|
||||||
@ -11683,9 +11733,9 @@ struct llm_build_context {
|
|||||||
cb(cur, "ffn_norm", il);
|
cb(cur, "ffn_norm", il);
|
||||||
|
|
||||||
cur = llm_build_ffn(ctx0, cur,
|
cur = llm_build_ffn(ctx0, cur,
|
||||||
model.layers[il].ffn_up, model.layers[il].ffn_up_b,
|
model.layers[il].ffn_up, model.layers[il].ffn_up_b, NULL,
|
||||||
NULL, NULL,
|
NULL, NULL, NULL,
|
||||||
model.layers[il].ffn_down, model.layers[il].ffn_down_b,
|
model.layers[il].ffn_down, model.layers[il].ffn_down_b, NULL,
|
||||||
NULL,
|
NULL,
|
||||||
LLM_FFN_GELU, LLM_FFN_SEQ, cb, il);
|
LLM_FFN_GELU, LLM_FFN_SEQ, cb, il);
|
||||||
cb(cur, "ffn_out", il);
|
cb(cur, "ffn_out", il);
|
||||||
@ -11693,8 +11743,12 @@ struct llm_build_context {
|
|||||||
cur = ggml_add(ctx0, cur, inpL);
|
cur = ggml_add(ctx0, cur, inpL);
|
||||||
cb(cur, "ffn_out", il);
|
cb(cur, "ffn_out", il);
|
||||||
|
|
||||||
inpL = ggml_add(ctx0, cur, attn_out);
|
cur = ggml_add(ctx0, cur, attn_out);
|
||||||
cb(inpL, "l_out", il);
|
cur = lctx.cvec.apply_to(ctx0, cur, il);
|
||||||
|
cb(cur, "l_out", il);
|
||||||
|
|
||||||
|
// input for next layer
|
||||||
|
inpL = cur;
|
||||||
} else {
|
} else {
|
||||||
// attention and ffn are computed sequentially
|
// attention and ffn are computed sequentially
|
||||||
// x = x + attn(ln1(x))
|
// x = x + attn(ln1(x))
|
||||||
@ -11710,15 +11764,19 @@ struct llm_build_context {
|
|||||||
cb(cur, "ffn_norm", il);
|
cb(cur, "ffn_norm", il);
|
||||||
|
|
||||||
cur = llm_build_ffn(ctx0, cur,
|
cur = llm_build_ffn(ctx0, cur,
|
||||||
model.layers[il].ffn_up, model.layers[il].ffn_up_b,
|
model.layers[il].ffn_up, model.layers[il].ffn_up_b, NULL,
|
||||||
NULL, NULL,
|
NULL, NULL, NULL,
|
||||||
model.layers[il].ffn_down, model.layers[il].ffn_down_b,
|
model.layers[il].ffn_down, model.layers[il].ffn_down_b, NULL,
|
||||||
NULL,
|
NULL,
|
||||||
LLM_FFN_GELU, LLM_FFN_SEQ, cb, il);
|
LLM_FFN_GELU, LLM_FFN_SEQ, cb, il);
|
||||||
cb(cur, "ffn_out", il);
|
cb(cur, "ffn_out", il);
|
||||||
|
|
||||||
inpL = ggml_add(ctx0, cur, ffn_inp);
|
cur = ggml_add(ctx0, cur, ffn_inp);
|
||||||
cb(inpL, "l_out", il);
|
cur = lctx.cvec.apply_to(ctx0, cur, il);
|
||||||
|
cb(cur, "l_out", il);
|
||||||
|
|
||||||
|
// input for next layer
|
||||||
|
inpL = cur;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -11815,9 +11873,9 @@ struct llm_build_context {
|
|||||||
cb(cur, "ffn_norm", il);
|
cb(cur, "ffn_norm", il);
|
||||||
|
|
||||||
cur = llm_build_ffn(ctx0, cur,
|
cur = llm_build_ffn(ctx0, cur,
|
||||||
model.layers[il].ffn_up, NULL,
|
model.layers[il].ffn_up, NULL, NULL,
|
||||||
model.layers[il].ffn_gate, NULL,
|
model.layers[il].ffn_gate, NULL, NULL,
|
||||||
model.layers[il].ffn_down, NULL,
|
model.layers[il].ffn_down, NULL, NULL,
|
||||||
NULL,
|
NULL,
|
||||||
LLM_FFN_SILU, LLM_FFN_PAR, cb, il);
|
LLM_FFN_SILU, LLM_FFN_PAR, cb, il);
|
||||||
cb(cur, "ffn_out", il);
|
cb(cur, "ffn_out", il);
|
||||||
@ -11845,10 +11903,7 @@ struct llm_build_context {
|
|||||||
cur = ggml_add(ctx0, cur, ffn_out);
|
cur = ggml_add(ctx0, cur, ffn_out);
|
||||||
cb(cur, "ffn_out", il);
|
cb(cur, "ffn_out", il);
|
||||||
|
|
||||||
ggml_tensor * layer_dir = lctx.cvec.tensor_for(il);
|
cur = lctx.cvec.apply_to(ctx0, cur, il);
|
||||||
if (layer_dir != nullptr) {
|
|
||||||
cur = ggml_add(ctx0, cur, layer_dir);
|
|
||||||
}
|
|
||||||
cb(cur, "l_out", il);
|
cb(cur, "l_out", il);
|
||||||
|
|
||||||
// input for next layer
|
// input for next layer
|
||||||
@ -12040,9 +12095,9 @@ struct llm_build_context {
|
|||||||
cb(cur, "ffn_norm", il);
|
cb(cur, "ffn_norm", il);
|
||||||
|
|
||||||
cur = llm_build_ffn(ctx0, cur,
|
cur = llm_build_ffn(ctx0, cur,
|
||||||
model.layers[il].ffn_up, NULL,
|
model.layers[il].ffn_up, NULL, NULL,
|
||||||
model.layers[il].ffn_gate, NULL,
|
model.layers[il].ffn_gate, NULL, NULL,
|
||||||
model.layers[il].ffn_down, NULL,
|
model.layers[il].ffn_down, NULL, NULL,
|
||||||
NULL,
|
NULL,
|
||||||
LLM_FFN_SILU, LLM_FFN_PAR, cb, il);
|
LLM_FFN_SILU, LLM_FFN_PAR, cb, il);
|
||||||
cb(cur, "ffn_out", il);
|
cb(cur, "ffn_out", il);
|
||||||
@ -12068,9 +12123,9 @@ struct llm_build_context {
|
|||||||
// FFN shared expert
|
// FFN shared expert
|
||||||
{
|
{
|
||||||
ggml_tensor * ffn_shexp = llm_build_ffn(ctx0, cur,
|
ggml_tensor * ffn_shexp = llm_build_ffn(ctx0, cur,
|
||||||
model.layers[il].ffn_up_shexp, NULL,
|
model.layers[il].ffn_up_shexp, NULL, NULL,
|
||||||
model.layers[il].ffn_gate_shexp, NULL,
|
model.layers[il].ffn_gate_shexp, NULL, NULL,
|
||||||
model.layers[il].ffn_down_shexp, NULL,
|
model.layers[il].ffn_down_shexp, NULL, NULL,
|
||||||
NULL,
|
NULL,
|
||||||
LLM_FFN_SILU, LLM_FFN_PAR, cb, il);
|
LLM_FFN_SILU, LLM_FFN_PAR, cb, il);
|
||||||
cb(ffn_shexp, "ffn_shexp", il);
|
cb(ffn_shexp, "ffn_shexp", il);
|
||||||
@ -12081,6 +12136,7 @@ struct llm_build_context {
|
|||||||
}
|
}
|
||||||
|
|
||||||
cur = ggml_add(ctx0, cur, ffn_inp);
|
cur = ggml_add(ctx0, cur, ffn_inp);
|
||||||
|
cur = lctx.cvec.apply_to(ctx0, cur, il);
|
||||||
cb(cur, "l_out", il);
|
cb(cur, "l_out", il);
|
||||||
|
|
||||||
// input for next layer
|
// input for next layer
|
||||||
@ -12172,7 +12228,7 @@ struct llm_build_context {
|
|||||||
cb(Kcur, "Kcur", il);
|
cb(Kcur, "Kcur", il);
|
||||||
|
|
||||||
cur = llm_build_kv(ctx0, model, hparams, cparams, kv_self, gf,
|
cur = llm_build_kv(ctx0, model, hparams, cparams, kv_self, gf,
|
||||||
nullptr, nullptr,
|
NULL, NULL,
|
||||||
Kcur, Vcur, Qcur, KQ_mask, n_tokens, kv_head, n_kv, 1.0f/sqrtf(float(n_embd_head)), cb, il);
|
Kcur, Vcur, Qcur, KQ_mask, n_tokens, kv_head, n_kv, 1.0f/sqrtf(float(n_embd_head)), cb, il);
|
||||||
|
|
||||||
cur = llm_build_norm(ctx0, cur, hparams,
|
cur = llm_build_norm(ctx0, cur, hparams,
|
||||||
@ -12199,35 +12255,28 @@ struct llm_build_context {
|
|||||||
cb(ffn_inp, "ffn_inp", il);
|
cb(ffn_inp, "ffn_inp", il);
|
||||||
|
|
||||||
// feed-forward forward
|
// feed-forward forward
|
||||||
if (model.layers[il].ffn_gate_inp == nullptr) {
|
cur = llm_build_norm(ctx0, ffn_inp, hparams,
|
||||||
cur = llm_build_norm(ctx0, ffn_inp, hparams,
|
model.layers[il].ffn_norm, NULL,
|
||||||
model.layers[il].ffn_norm, NULL,
|
LLM_NORM_RMS, cb, il);
|
||||||
LLM_NORM_RMS, cb, il);
|
cb(cur, "ffn_norm", il);
|
||||||
cb(cur, "ffn_norm", il);
|
|
||||||
|
|
||||||
struct ggml_tensor *tmp = ggml_mul_mat(ctx0, model.layers[il].ffn_up, cur);
|
cur = llm_build_ffn(ctx0, cur,
|
||||||
tmp = ggml_mul(ctx0, tmp, model.layers[il].ffn_up_scale);
|
model.layers[il].ffn_up, NULL, model.layers[il].ffn_up_scale,
|
||||||
cb(tmp, "ffn_up", il);
|
model.layers[il].ffn_gate, NULL, model.layers[il].ffn_gate_scale,
|
||||||
|
NULL, NULL, NULL,
|
||||||
|
NULL,
|
||||||
|
LLM_FFN_SILU, LLM_FFN_PAR, cb, il);
|
||||||
|
cb(cur, "ffn_sub_out", il);
|
||||||
|
|
||||||
cur = ggml_mul_mat(ctx0, model.layers[il].ffn_gate, cur);
|
cur = llm_build_norm(ctx0, cur, hparams,
|
||||||
cur = ggml_mul(ctx0, cur, model.layers[il].ffn_gate_scale);
|
model.layers[il].ffn_sub_norm, NULL,
|
||||||
cb(cur, "ffn_gate", il);
|
LLM_NORM_RMS, cb, il);
|
||||||
|
cb(cur, "ffn_sub_norm", il);
|
||||||
|
|
||||||
cur = ggml_silu(ctx0, cur);
|
cur = ggml_mul_mat(ctx0, model.layers[il].ffn_down, cur);
|
||||||
cb(cur, "ffn_silu", il);
|
cur = ggml_mul(ctx0, cur, model.layers[il].ffn_down_scale);
|
||||||
|
cb(cur, "ffn_down", il);
|
||||||
|
|
||||||
cur = ggml_mul(ctx0, cur, tmp);
|
|
||||||
cb(cur, "ffn_gate_par", il);
|
|
||||||
|
|
||||||
cur = llm_build_norm(ctx0, cur, hparams,
|
|
||||||
model.layers[il].ffn_sub_norm, NULL,
|
|
||||||
LLM_NORM_RMS, cb, il);
|
|
||||||
cb(cur, "ffn_sub_norm", il);
|
|
||||||
|
|
||||||
cur = ggml_mul_mat(ctx0, model.layers[il].ffn_down, cur);
|
|
||||||
cur = ggml_mul(ctx0, cur, model.layers[il].ffn_down_scale);
|
|
||||||
cb(cur, "ffn_down", il);
|
|
||||||
}
|
|
||||||
cur = ggml_add(ctx0, cur, ffn_inp);
|
cur = ggml_add(ctx0, cur, ffn_inp);
|
||||||
cb(cur, "l_out", il);
|
cb(cur, "l_out", il);
|
||||||
|
|
||||||
|
@ -15,8 +15,6 @@
|
|||||||
|
|
||||||
using json = nlohmann::ordered_json;
|
using json = nlohmann::ordered_json;
|
||||||
|
|
||||||
//#define INCLUDE_FAILING_TESTS 1
|
|
||||||
|
|
||||||
static llama_grammar* build_grammar(const std::string & grammar_str) {
|
static llama_grammar* build_grammar(const std::string & grammar_str) {
|
||||||
auto parsed_grammar = grammar_parser::parse(grammar_str.c_str());
|
auto parsed_grammar = grammar_parser::parse(grammar_str.c_str());
|
||||||
|
|
||||||
@ -754,7 +752,7 @@ static void test_json_schema() {
|
|||||||
)""",
|
)""",
|
||||||
// Passing strings
|
// Passing strings
|
||||||
{
|
{
|
||||||
"{}",
|
R"""({})""",
|
||||||
R"""({"foo": "bar"})""",
|
R"""({"foo": "bar"})""",
|
||||||
},
|
},
|
||||||
// Failing strings
|
// Failing strings
|
||||||
@ -762,7 +760,7 @@ static void test_json_schema() {
|
|||||||
"",
|
"",
|
||||||
"[]",
|
"[]",
|
||||||
"null",
|
"null",
|
||||||
"\"\"",
|
R"""("")""",
|
||||||
"true",
|
"true",
|
||||||
}
|
}
|
||||||
);
|
);
|
||||||
@ -770,16 +768,14 @@ static void test_json_schema() {
|
|||||||
test_schema(
|
test_schema(
|
||||||
"exotic formats (list)",
|
"exotic formats (list)",
|
||||||
// Schema
|
// Schema
|
||||||
R"""(
|
R"""({
|
||||||
{
|
|
||||||
"items": [
|
"items": [
|
||||||
{ "format": "date" },
|
{ "format": "date" },
|
||||||
{ "format": "uuid" },
|
{ "format": "uuid" },
|
||||||
{ "format": "time" },
|
{ "format": "time" },
|
||||||
{ "format": "date-time" }
|
{ "format": "date-time" }
|
||||||
]
|
]
|
||||||
}
|
})""",
|
||||||
)""",
|
|
||||||
// Passing strings
|
// Passing strings
|
||||||
{
|
{
|
||||||
// "{}", // NOTE: This string passes for this schema on https://www.jsonschemavalidator.net/ -- should it?
|
// "{}", // NOTE: This string passes for this schema on https://www.jsonschemavalidator.net/ -- should it?
|
||||||
@ -798,125 +794,113 @@ static void test_json_schema() {
|
|||||||
test_schema(
|
test_schema(
|
||||||
"string",
|
"string",
|
||||||
// Schema
|
// Schema
|
||||||
R"""(
|
R"""({
|
||||||
{
|
"type": "string"
|
||||||
"type": "string"
|
})""",
|
||||||
}
|
|
||||||
)""",
|
|
||||||
// Passing strings
|
// Passing strings
|
||||||
{
|
{
|
||||||
"\"foo\"",
|
R"""("foo")""",
|
||||||
"\"bar\"",
|
R"""("bar")""",
|
||||||
"\"\"",
|
R"""("")""",
|
||||||
},
|
},
|
||||||
// Failing strings
|
// Failing strings
|
||||||
{
|
{
|
||||||
"{}",
|
R"""({})""",
|
||||||
"\"foo\": \"bar\"",
|
R"""("foo": "bar")""",
|
||||||
}
|
}
|
||||||
);
|
);
|
||||||
|
|
||||||
test_schema(
|
test_schema(
|
||||||
"string w/ min length 1",
|
"string w/ min length 1",
|
||||||
// Schema
|
// Schema
|
||||||
R"""(
|
R"""({
|
||||||
{
|
"type": "string",
|
||||||
"type": "string",
|
"minLength": 1
|
||||||
"minLength": 1
|
})""",
|
||||||
}
|
|
||||||
)""",
|
|
||||||
// Passing strings
|
// Passing strings
|
||||||
{
|
{
|
||||||
"\"foo\"",
|
R"""("foo")""",
|
||||||
"\"bar\"",
|
R"""("bar")""",
|
||||||
},
|
},
|
||||||
// Failing strings
|
// Failing strings
|
||||||
{
|
{
|
||||||
"\"\"",
|
R"""("")""",
|
||||||
"{}",
|
R"""({})""",
|
||||||
"\"foo\": \"bar\"",
|
R"""("foo": "bar")""",
|
||||||
}
|
}
|
||||||
);
|
);
|
||||||
|
|
||||||
test_schema(
|
test_schema(
|
||||||
"string w/ min length 3",
|
"string w/ min length 3",
|
||||||
// Schema
|
// Schema
|
||||||
R"""(
|
R"""({
|
||||||
{
|
|
||||||
"type": "string",
|
"type": "string",
|
||||||
"minLength": 3
|
"minLength": 3
|
||||||
}
|
})""",
|
||||||
)""",
|
|
||||||
// Passing strings
|
// Passing strings
|
||||||
{
|
{
|
||||||
"\"foo\"",
|
R"""("foo")""",
|
||||||
"\"bar\"",
|
R"""("bar")""",
|
||||||
"\"foobar\"",
|
R"""("foobar")""",
|
||||||
},
|
},
|
||||||
// Failing strings
|
// Failing strings
|
||||||
{
|
{
|
||||||
"\"\"",
|
R"""("")""",
|
||||||
"\"f\"",
|
R"""("f")""",
|
||||||
"\"fo\"",
|
R"""("fo")""",
|
||||||
}
|
}
|
||||||
);
|
);
|
||||||
|
|
||||||
test_schema(
|
test_schema(
|
||||||
"string w/ max length",
|
"string w/ max length",
|
||||||
// Schema
|
// Schema
|
||||||
R"""(
|
R"""({
|
||||||
{
|
"type": "string",
|
||||||
"type": "string",
|
"maxLength": 3
|
||||||
"maxLength": 3
|
})""",
|
||||||
}
|
|
||||||
)""",
|
|
||||||
// Passing strings
|
// Passing strings
|
||||||
{
|
{
|
||||||
"\"foo\"",
|
R"""("foo")""",
|
||||||
"\"bar\"",
|
R"""("bar")""",
|
||||||
"\"\"",
|
R"""("")""",
|
||||||
"\"f\"",
|
R"""("f")""",
|
||||||
"\"fo\"",
|
R"""("fo")""",
|
||||||
},
|
},
|
||||||
// Failing strings
|
// Failing strings
|
||||||
{
|
{
|
||||||
"\"foobar\"",
|
R"""("foobar")""",
|
||||||
}
|
}
|
||||||
);
|
);
|
||||||
|
|
||||||
test_schema(
|
test_schema(
|
||||||
"string w/ min & max length",
|
"string w/ min & max length",
|
||||||
// Schema
|
// Schema
|
||||||
R"""(
|
R"""({
|
||||||
{
|
"type": "string",
|
||||||
"type": "string",
|
"minLength": 1,
|
||||||
"minLength": 1,
|
"maxLength": 4
|
||||||
"maxLength": 4
|
})""",
|
||||||
}
|
|
||||||
)""",
|
|
||||||
// Passing strings
|
// Passing strings
|
||||||
{
|
{
|
||||||
"\"foo\"",
|
R"""("foo")""",
|
||||||
"\"bar\"",
|
R"""("bar")""",
|
||||||
"\"f\"",
|
R"""("f")""",
|
||||||
"\"barf\"",
|
R"""("barf")""",
|
||||||
},
|
},
|
||||||
// Failing strings
|
// Failing strings
|
||||||
{
|
{
|
||||||
"\"\"",
|
R"""("")""",
|
||||||
"\"barfo\"",
|
R"""("barfo")""",
|
||||||
"\"foobar\"",
|
R"""("foobar")""",
|
||||||
}
|
}
|
||||||
);
|
);
|
||||||
|
|
||||||
test_schema(
|
test_schema(
|
||||||
"boolean",
|
"boolean",
|
||||||
// Schema
|
// Schema
|
||||||
R"""(
|
R"""({
|
||||||
{
|
"type": "boolean"
|
||||||
"type": "boolean"
|
})""",
|
||||||
}
|
|
||||||
)""",
|
|
||||||
// Passing strings
|
// Passing strings
|
||||||
{
|
{
|
||||||
"true",
|
"true",
|
||||||
@ -924,122 +908,137 @@ static void test_json_schema() {
|
|||||||
},
|
},
|
||||||
// Failing strings
|
// Failing strings
|
||||||
{
|
{
|
||||||
"\"\"",
|
R"""("")""",
|
||||||
"\"true\"",
|
R"""("true")""",
|
||||||
"True",
|
R"""(True)""",
|
||||||
"FALSE",
|
R"""(FALSE)""",
|
||||||
}
|
}
|
||||||
);
|
);
|
||||||
|
|
||||||
test_schema(
|
test_schema(
|
||||||
"integer",
|
"integer",
|
||||||
// Schema
|
// Schema
|
||||||
R"""(
|
R"""({
|
||||||
{
|
"type": "integer"
|
||||||
"type": "integer"
|
})""",
|
||||||
}
|
|
||||||
)""",
|
|
||||||
// Passing strings
|
// Passing strings
|
||||||
{
|
{
|
||||||
"0",
|
R"""(0)""",
|
||||||
"12345",
|
R"""(12345)""",
|
||||||
"1234567890123456"
|
R"""(1234567890123456)""",
|
||||||
},
|
},
|
||||||
// Failing strings
|
// Failing strings
|
||||||
{
|
{
|
||||||
"",
|
R"""()""",
|
||||||
"01",
|
R"""(01)""",
|
||||||
"007",
|
R"""(007)""",
|
||||||
"12345678901234567"
|
R"""(12345678901234567 )""",
|
||||||
}
|
}
|
||||||
);
|
);
|
||||||
|
|
||||||
test_schema(
|
test_schema(
|
||||||
"string const",
|
"string const",
|
||||||
// Schema
|
// Schema
|
||||||
R"""(
|
R"""({
|
||||||
{
|
"const": "foo"
|
||||||
"const": "foo"
|
})""",
|
||||||
}
|
|
||||||
)""",
|
|
||||||
// Passing strings
|
// Passing strings
|
||||||
{
|
{
|
||||||
"\"foo\"",
|
R"""("foo")""",
|
||||||
},
|
},
|
||||||
// Failing strings
|
// Failing strings
|
||||||
{
|
{
|
||||||
"foo",
|
R"""(foo)""",
|
||||||
"\"bar\"",
|
R"""("bar")""",
|
||||||
}
|
}
|
||||||
);
|
);
|
||||||
|
|
||||||
test_schema(
|
test_schema(
|
||||||
"non-string const",
|
"non-string const",
|
||||||
// Schema
|
// Schema
|
||||||
R"""(
|
R"""({
|
||||||
{
|
"const": true
|
||||||
"const": true
|
})""",
|
||||||
}
|
|
||||||
)""",
|
|
||||||
// Passing strings
|
// Passing strings
|
||||||
{
|
{
|
||||||
"true",
|
R"""(true)""",
|
||||||
},
|
},
|
||||||
// Failing strings
|
// Failing strings
|
||||||
{
|
{
|
||||||
"",
|
R"""()""",
|
||||||
"foo",
|
R"""(foo)""",
|
||||||
"\"true\"",
|
R"""("true")""",
|
||||||
}
|
}
|
||||||
);
|
);
|
||||||
|
|
||||||
test_schema(
|
test_schema(
|
||||||
"non-string const",
|
"non-string const",
|
||||||
// Schema
|
// Schema
|
||||||
|
R"""({
|
||||||
|
"enum": ["red", "amber", "green", null, 42, ["foo"]]
|
||||||
|
})""",
|
||||||
|
// Passing strings
|
||||||
|
{
|
||||||
|
R"""("red")""",
|
||||||
|
R"""(null)""",
|
||||||
|
R"""(42)""",
|
||||||
|
R"""(["foo"])""",
|
||||||
|
},
|
||||||
|
// Failing strings
|
||||||
|
{
|
||||||
|
R"""()""",
|
||||||
|
R"""(420)""",
|
||||||
|
R"""(true)""",
|
||||||
|
R"""(foo)""",
|
||||||
|
}
|
||||||
|
);
|
||||||
|
|
||||||
|
test_schema(
|
||||||
|
"",
|
||||||
|
// Schema
|
||||||
R"""(
|
R"""(
|
||||||
{
|
{
|
||||||
"enum": ["red", "amber", "green", null, 42, ["foo"]]
|
"type": ["array", "null"],
|
||||||
|
"items": { "type": "string" }
|
||||||
}
|
}
|
||||||
)""",
|
)""",
|
||||||
// Passing strings
|
// Passing strings
|
||||||
{
|
{
|
||||||
"\"red\"",
|
|
||||||
"null",
|
"null",
|
||||||
"42",
|
"[]",
|
||||||
"[\"foo\"]",
|
"[\"123\"]",
|
||||||
|
"[\"foo\", \"bar\"]",
|
||||||
},
|
},
|
||||||
// Failing strings
|
// Failing strings
|
||||||
{
|
{
|
||||||
"",
|
"",
|
||||||
"420",
|
"[123]",
|
||||||
"true",
|
"\"foo\"",
|
||||||
"foo",
|
"[\"foo\", 42]",
|
||||||
}
|
}
|
||||||
);
|
);
|
||||||
|
|
||||||
test_schema(
|
test_schema(
|
||||||
"min+max items",
|
"min+max items",
|
||||||
// Schema
|
// Schema
|
||||||
R"""(
|
R"""({
|
||||||
{
|
"items": {
|
||||||
"items": {
|
"type": ["number", "integer"]
|
||||||
"type": ["number", "integer"]
|
},
|
||||||
},
|
"minItems": 3,
|
||||||
"minItems": 3,
|
"maxItems": 5
|
||||||
"maxItems": 5
|
})""",
|
||||||
}
|
|
||||||
)""",
|
|
||||||
// Passing strings
|
// Passing strings
|
||||||
{
|
{
|
||||||
"[1, 2, 3]",
|
R"""([1, 2, 3])""",
|
||||||
"[1, 2, 3, 4]",
|
R"""([1, 2, 3, 4])""",
|
||||||
"[1, 2, 3, 4, 5]",
|
R"""([1, 2, 3, 4, 5])""",
|
||||||
},
|
},
|
||||||
// Failing strings
|
// Failing strings
|
||||||
{
|
{
|
||||||
"[1, 2]",
|
R"""([1, 2])""",
|
||||||
"[1, 2, 3, 4, 5, 6]",
|
R"""([1, 2, 3, 4, 5, 6])""",
|
||||||
"1"
|
R"""(1)""",
|
||||||
}
|
}
|
||||||
);
|
);
|
||||||
|
|
||||||
@ -1047,16 +1046,14 @@ static void test_json_schema() {
|
|||||||
test_schema(
|
test_schema(
|
||||||
"object properties",
|
"object properties",
|
||||||
// Schema
|
// Schema
|
||||||
R"""(
|
R"""({
|
||||||
{
|
|
||||||
"type": "object",
|
"type": "object",
|
||||||
"properties": {
|
"properties": {
|
||||||
"number": { "type": "number" },
|
"number": { "type": "number" },
|
||||||
"street_name": { "type": "string" },
|
"street_name": { "type": "string" },
|
||||||
"street_type": { "enum": ["Street", "Avenue", "Boulevard"] }
|
"street_type": { "enum": ["Street", "Avenue", "Boulevard"] }
|
||||||
}
|
}
|
||||||
}
|
})""",
|
||||||
)""",
|
|
||||||
// Passing strings
|
// Passing strings
|
||||||
{
|
{
|
||||||
R"""({ "number": 1600, "street_name": "Pennsylvania", "street_type":"Avenue"})""",
|
R"""({ "number": 1600, "street_name": "Pennsylvania", "street_type":"Avenue"})""",
|
||||||
@ -1066,12 +1063,8 @@ static void test_json_schema() {
|
|||||||
// "By extension, even an empty object is valid"
|
// "By extension, even an empty object is valid"
|
||||||
R"""({})""",
|
R"""({})""",
|
||||||
// "By default, providing additional properties is valid"
|
// "By default, providing additional properties is valid"
|
||||||
#ifdef INCLUDE_FAILING_TESTS
|
|
||||||
// TODO: The following should pass, but currently FAILS. Additional properties should be permitted by default.
|
|
||||||
R"""({ "number": 1600, "street_name": "Pennsylvania", "street_type":"Avenue", "direction":"NW"})""",
|
R"""({ "number": 1600, "street_name": "Pennsylvania", "street_type":"Avenue", "direction":"NW"})""",
|
||||||
// TODO: Spaces should be permitted around enum values, but currently they fail to pass.
|
|
||||||
R"""({ "number": 1600, "street_name": "Pennsylvania", "street_type": "Avenue" })""",
|
R"""({ "number": 1600, "street_name": "Pennsylvania", "street_type": "Avenue" })""",
|
||||||
#endif
|
|
||||||
},
|
},
|
||||||
// Failing strings
|
// Failing strings
|
||||||
{
|
{
|
||||||
@ -1084,13 +1077,35 @@ static void test_json_schema() {
|
|||||||
}
|
}
|
||||||
);
|
);
|
||||||
|
|
||||||
|
test_schema(
|
||||||
|
"additional properties can't override other properties",
|
||||||
|
R"""({
|
||||||
|
"properties": {
|
||||||
|
"a": {"type": "integer"},
|
||||||
|
"b": {"type": "integer"}
|
||||||
|
},
|
||||||
|
"additionalProperties": true
|
||||||
|
})""",
|
||||||
|
// Passing strings
|
||||||
|
{
|
||||||
|
R"""({"a": 42})""",
|
||||||
|
R"""({"c": ""})""",
|
||||||
|
R"""({"a": 42, "c": ""})""",
|
||||||
|
R"""({"a_": ""})""",
|
||||||
|
},
|
||||||
|
// Failing strings
|
||||||
|
{
|
||||||
|
R"""()""",
|
||||||
|
R"""({"a": ""})""",
|
||||||
|
R"""({"a": "", "b": ""})""",
|
||||||
|
}
|
||||||
|
);
|
||||||
|
|
||||||
// Properties (from: https://json-schema.org/understanding-json-schema/reference/object#properties)
|
// Properties (from: https://json-schema.org/understanding-json-schema/reference/object#properties)
|
||||||
test_schema(
|
test_schema(
|
||||||
"object properties, additionalProperties: true",
|
"object properties, additionalProperties: true",
|
||||||
// Schema
|
// Schema
|
||||||
R"""(
|
R"""({
|
||||||
{
|
|
||||||
"type": "object",
|
"type": "object",
|
||||||
"properties": {
|
"properties": {
|
||||||
"number": { "type": "number" },
|
"number": { "type": "number" },
|
||||||
@ -1098,26 +1113,18 @@ static void test_json_schema() {
|
|||||||
"street_type": { "enum": ["Street", "Avenue", "Boulevard"] }
|
"street_type": { "enum": ["Street", "Avenue", "Boulevard"] }
|
||||||
},
|
},
|
||||||
"additionalProperties": true
|
"additionalProperties": true
|
||||||
}
|
})""",
|
||||||
)""",
|
|
||||||
// Passing strings
|
// Passing strings
|
||||||
{
|
{
|
||||||
// "By extension, even an empty object is valid"
|
// "By extension, even an empty object is valid"
|
||||||
R"""({})""",
|
R"""({})""",
|
||||||
#ifdef INCLUDE_FAILING_TESTS
|
|
||||||
// TODO: Following line should pass and doesn't
|
|
||||||
R"""({"number":1600,"street_name":"Pennsylvania","street_type":"Avenue"})""",
|
R"""({"number":1600,"street_name":"Pennsylvania","street_type":"Avenue"})""",
|
||||||
// "By default, leaving out properties is valid"
|
// "By default, leaving out properties is valid"
|
||||||
// TODO: Following line should pass and doesn't
|
|
||||||
R"""({ "street_name": "Pennsylvania" })""",
|
R"""({ "street_name": "Pennsylvania" })""",
|
||||||
// TODO: Following line should pass and doesn't
|
|
||||||
R"""({ "number": 1600, "street_name": "Pennsylvania" })""",
|
R"""({ "number": 1600, "street_name": "Pennsylvania" })""",
|
||||||
// "By default, providing additional properties is valid"
|
// "By default, providing additional properties is valid"
|
||||||
// TODO: The following should pass, but currently FAILS. Additional properties should be permitted by default.
|
|
||||||
R"""({ "number": 1600, "street_name": "Pennsylvania", "street_type":"Avenue", "direction":"NW"})""",
|
R"""({ "number": 1600, "street_name": "Pennsylvania", "street_type":"Avenue", "direction":"NW"})""",
|
||||||
// TODO: Spaces should be permitted around enum values, but currently they fail to pass.
|
|
||||||
R"""({ "number": 1600, "street_name": "Pennsylvania", "street_type": "Avenue" })""",
|
R"""({ "number": 1600, "street_name": "Pennsylvania", "street_type": "Avenue" })""",
|
||||||
#endif
|
|
||||||
},
|
},
|
||||||
// Failing strings
|
// Failing strings
|
||||||
{
|
{
|
||||||
@ -1132,8 +1139,7 @@ static void test_json_schema() {
|
|||||||
test_schema(
|
test_schema(
|
||||||
"required + optional props each in original order",
|
"required + optional props each in original order",
|
||||||
// Schema
|
// Schema
|
||||||
R"""(
|
R"""({
|
||||||
{
|
|
||||||
"type": "object",
|
"type": "object",
|
||||||
"properties": {
|
"properties": {
|
||||||
"number": { "type": "number" },
|
"number": { "type": "number" },
|
||||||
@ -1141,18 +1147,15 @@ static void test_json_schema() {
|
|||||||
"street_type": { "enum": ["Street", "Avenue", "Boulevard"] }
|
"street_type": { "enum": ["Street", "Avenue", "Boulevard"] }
|
||||||
},
|
},
|
||||||
"additionalProperties": false
|
"additionalProperties": false
|
||||||
}
|
})""",
|
||||||
)""",
|
|
||||||
// Passing strings
|
// Passing strings
|
||||||
{
|
{
|
||||||
R"""({ "street_name": "Pennsylvania" })""",
|
R"""({ "street_name": "Pennsylvania" })""",
|
||||||
R"""({ "number": 1600, "street_type":"Avenue"})""",
|
R"""({ "number": 1600, "street_type":"Avenue"})""",
|
||||||
R"""({ "number": 1600, "street_name": "Pennsylvania" })""",
|
R"""({ "number": 1600, "street_name": "Pennsylvania" })""",
|
||||||
R"""({ "number": 1600, "street_name": "Pennsylvania", "street_type":"Avenue"})""",
|
R"""({ "number": 1600, "street_name": "Pennsylvania", "street_type":"Avenue"})""",
|
||||||
#ifdef INCLUDE_FAILING_TESTS
|
// Spaces are permitted around enum values
|
||||||
// TODO: Spaces should be permitted around enum values, but currently they fail to pass.
|
|
||||||
R"""({ "number": 1600, "street_name": "Pennsylvania", "street_type": "Avenue" })""",
|
R"""({ "number": 1600, "street_name": "Pennsylvania", "street_type": "Avenue" })""",
|
||||||
#endif
|
|
||||||
},
|
},
|
||||||
// Failing strings
|
// Failing strings
|
||||||
{
|
{
|
||||||
@ -1166,18 +1169,16 @@ static void test_json_schema() {
|
|||||||
test_schema(
|
test_schema(
|
||||||
"required + optional props each in original order",
|
"required + optional props each in original order",
|
||||||
// Schema
|
// Schema
|
||||||
R"""(
|
R"""({
|
||||||
{
|
"properties": {
|
||||||
"properties": {
|
"b": {"type": "string"},
|
||||||
"b": {"type": "string"},
|
"a": {"type": "string"},
|
||||||
"a": {"type": "string"},
|
"d": {"type": "string"},
|
||||||
"d": {"type": "string"},
|
"c": {"type": "string"}
|
||||||
"c": {"type": "string"}
|
},
|
||||||
},
|
"required": ["a", "b"],
|
||||||
"required": ["a", "b"],
|
"additionalProperties": false
|
||||||
"additionalProperties": false
|
})""",
|
||||||
}
|
|
||||||
)""",
|
|
||||||
// Passing strings
|
// Passing strings
|
||||||
{
|
{
|
||||||
R"""({"b": "foo", "a": "bar"})""",
|
R"""({"b": "foo", "a": "bar"})""",
|
||||||
@ -1197,8 +1198,7 @@ static void test_json_schema() {
|
|||||||
test_schema(
|
test_schema(
|
||||||
"required props",
|
"required props",
|
||||||
// Schema
|
// Schema
|
||||||
R"""(
|
R"""({
|
||||||
{
|
|
||||||
"$schema": "https://json-schema.org/draft/2020-12/schema",
|
"$schema": "https://json-schema.org/draft/2020-12/schema",
|
||||||
"$id": "https://example.com/product.schema.json",
|
"$id": "https://example.com/product.schema.json",
|
||||||
"title": "Product",
|
"title": "Product",
|
||||||
@ -1244,8 +1244,7 @@ static void test_json_schema() {
|
|||||||
}
|
}
|
||||||
},
|
},
|
||||||
"required": [ "productId", "productName", "price" ]
|
"required": [ "productId", "productName", "price" ]
|
||||||
}
|
})""",
|
||||||
)""",
|
|
||||||
// Passing strings
|
// Passing strings
|
||||||
{
|
{
|
||||||
R"""({"productId": 1, "productName": "A green door", "price": 12.50})""",
|
R"""({"productId": 1, "productName": "A green door", "price": 12.50})""",
|
||||||
|
@ -473,7 +473,7 @@ static void test_all(const std::string & lang, std::function<void(const TestCase
|
|||||||
"const": "foo"
|
"const": "foo"
|
||||||
})""",
|
})""",
|
||||||
R"""(
|
R"""(
|
||||||
root ::= "\"foo\""
|
root ::= "\"foo\"" space
|
||||||
space ::= | " " | "\n" [ \t]{0,20}
|
space ::= | " " | "\n" [ \t]{0,20}
|
||||||
)"""
|
)"""
|
||||||
});
|
});
|
||||||
@ -485,7 +485,7 @@ static void test_all(const std::string & lang, std::function<void(const TestCase
|
|||||||
"const": 123
|
"const": 123
|
||||||
})""",
|
})""",
|
||||||
R"""(
|
R"""(
|
||||||
root ::= "123"
|
root ::= "123" space
|
||||||
space ::= | " " | "\n" [ \t]{0,20}
|
space ::= | " " | "\n" [ \t]{0,20}
|
||||||
)"""
|
)"""
|
||||||
});
|
});
|
||||||
@ -497,11 +497,43 @@ static void test_all(const std::string & lang, std::function<void(const TestCase
|
|||||||
"enum": ["red", "amber", "green", null, 42, ["foo"]]
|
"enum": ["red", "amber", "green", null, 42, ["foo"]]
|
||||||
})""",
|
})""",
|
||||||
R"""(
|
R"""(
|
||||||
root ::= "\"red\"" | "\"amber\"" | "\"green\"" | "null" | "42" | "[\"foo\"]"
|
root ::= ("\"red\"" | "\"amber\"" | "\"green\"" | "null" | "42" | "[\"foo\"]") space
|
||||||
space ::= | " " | "\n" [ \t]{0,20}
|
space ::= | " " | "\n" [ \t]{0,20}
|
||||||
)"""
|
)"""
|
||||||
});
|
});
|
||||||
|
|
||||||
|
test({
|
||||||
|
SUCCESS,
|
||||||
|
"string array",
|
||||||
|
R"""({
|
||||||
|
"type": "array",
|
||||||
|
"prefixItems": { "type": "string" }
|
||||||
|
})""",
|
||||||
|
R"""(
|
||||||
|
char ::= [^"\\\x7F\x00-\x1F] | [\\] (["\\bfnrt] | "u" [0-9a-fA-F]{4})
|
||||||
|
root ::= "[" space (string ("," space string)*)? "]" space
|
||||||
|
space ::= | " " | "\n" [ \t]{0,20}
|
||||||
|
string ::= "\"" char* "\"" space
|
||||||
|
)"""
|
||||||
|
});
|
||||||
|
|
||||||
|
test({
|
||||||
|
SUCCESS,
|
||||||
|
"nullable string array",
|
||||||
|
R"""({
|
||||||
|
"type": ["array", "null"],
|
||||||
|
"prefixItems": { "type": "string" }
|
||||||
|
})""",
|
||||||
|
R"""(
|
||||||
|
alternative-0 ::= "[" space (string ("," space string)*)? "]" space
|
||||||
|
char ::= [^"\\\x7F\x00-\x1F] | [\\] (["\\bfnrt] | "u" [0-9a-fA-F]{4})
|
||||||
|
null ::= "null" space
|
||||||
|
root ::= alternative-0 | null
|
||||||
|
space ::= | " " | "\n" [ \t]{0,20}
|
||||||
|
string ::= "\"" char* "\"" space
|
||||||
|
)"""
|
||||||
|
});
|
||||||
|
|
||||||
test({
|
test({
|
||||||
SUCCESS,
|
SUCCESS,
|
||||||
"tuple1",
|
"tuple1",
|
||||||
@ -816,13 +848,12 @@ static void test_all(const std::string & lang, std::function<void(const TestCase
|
|||||||
})""",
|
})""",
|
||||||
R"""(
|
R"""(
|
||||||
additional-kv ::= string ":" space additional-value
|
additional-kv ::= string ":" space additional-value
|
||||||
additional-kvs ::= additional-kv ( "," space additional-kv )*
|
|
||||||
additional-value ::= "[" space (number ("," space number)*)? "]" space
|
additional-value ::= "[" space (number ("," space number)*)? "]" space
|
||||||
char ::= [^"\\\x7F\x00-\x1F] | [\\] (["\\bfnrt] | "u" [0-9a-fA-F]{4})
|
char ::= [^"\\\x7F\x00-\x1F] | [\\] (["\\bfnrt] | "u" [0-9a-fA-F]{4})
|
||||||
decimal-part ::= [0-9]{1,16}
|
decimal-part ::= [0-9]{1,16}
|
||||||
integral-part ::= [0] | [1-9] [0-9]{0,15}
|
integral-part ::= [0] | [1-9] [0-9]{0,15}
|
||||||
number ::= ("-"? integral-part) ("." decimal-part)? ([eE] [-+]? integral-part)? space
|
number ::= ("-"? integral-part) ("." decimal-part)? ([eE] [-+]? integral-part)? space
|
||||||
root ::= "{" space (additional-kvs )? "}" space
|
root ::= "{" space (additional-kv ( "," space additional-kv )* )? "}" space
|
||||||
space ::= | " " | "\n" [ \t]{0,20}
|
space ::= | " " | "\n" [ \t]{0,20}
|
||||||
string ::= "\"" char* "\"" space
|
string ::= "\"" char* "\"" space
|
||||||
)"""
|
)"""
|
||||||
@ -899,13 +930,13 @@ static void test_all(const std::string & lang, std::function<void(const TestCase
|
|||||||
})""",
|
})""",
|
||||||
R"""(
|
R"""(
|
||||||
a-kv ::= "\"a\"" space ":" space number
|
a-kv ::= "\"a\"" space ":" space number
|
||||||
additional-kv ::= string ":" space string
|
additional-k ::= ["] ( [a] char+ | [^"a] char* )? ["] space
|
||||||
additional-kvs ::= additional-kv ( "," space additional-kv )*
|
additional-kv ::= additional-k ":" space string
|
||||||
char ::= [^"\\\x7F\x00-\x1F] | [\\] (["\\bfnrt] | "u" [0-9a-fA-F]{4})
|
char ::= [^"\\\x7F\x00-\x1F] | [\\] (["\\bfnrt] | "u" [0-9a-fA-F]{4})
|
||||||
decimal-part ::= [0-9]{1,16}
|
decimal-part ::= [0-9]{1,16}
|
||||||
integral-part ::= [0] | [1-9] [0-9]{0,15}
|
integral-part ::= [0] | [1-9] [0-9]{0,15}
|
||||||
number ::= ("-"? integral-part) ("." decimal-part)? ([eE] [-+]? integral-part)? space
|
number ::= ("-"? integral-part) ("." decimal-part)? ([eE] [-+]? integral-part)? space
|
||||||
root ::= "{" space a-kv ( "," space ( additional-kvs ) )? "}" space
|
root ::= "{" space a-kv ( "," space ( additional-kv ( "," space additional-kv )* ) )? "}" space
|
||||||
space ::= | " " | "\n" [ \t]{0,20}
|
space ::= | " " | "\n" [ \t]{0,20}
|
||||||
string ::= "\"" char* "\"" space
|
string ::= "\"" char* "\"" space
|
||||||
)"""
|
)"""
|
||||||
@ -923,16 +954,15 @@ static void test_all(const std::string & lang, std::function<void(const TestCase
|
|||||||
})""",
|
})""",
|
||||||
R"""(
|
R"""(
|
||||||
a-kv ::= "\"a\"" space ":" space number
|
a-kv ::= "\"a\"" space ":" space number
|
||||||
a-rest ::= additional-kvs
|
a-rest ::= ( "," space additional-kv )*
|
||||||
additional-kv ::= string ":" space number
|
additional-k ::= ["] ( [a] char+ | [^"a] char* )? ["] space
|
||||||
additional-kvs ::= additional-kv ( "," space additional-kv )*
|
additional-kv ::= additional-k ":" space number
|
||||||
char ::= [^"\\\x7F\x00-\x1F] | [\\] (["\\bfnrt] | "u" [0-9a-fA-F]{4})
|
char ::= [^"\\\x7F\x00-\x1F] | [\\] (["\\bfnrt] | "u" [0-9a-fA-F]{4})
|
||||||
decimal-part ::= [0-9]{1,16}
|
decimal-part ::= [0-9]{1,16}
|
||||||
integral-part ::= [0] | [1-9] [0-9]{0,15}
|
integral-part ::= [0] | [1-9] [0-9]{0,15}
|
||||||
number ::= ("-"? integral-part) ("." decimal-part)? ([eE] [-+]? integral-part)? space
|
number ::= ("-"? integral-part) ("." decimal-part)? ([eE] [-+]? integral-part)? space
|
||||||
root ::= "{" space (a-kv a-rest | additional-kvs )? "}" space
|
root ::= "{" space (a-kv a-rest | additional-kv ( "," space additional-kv )* )? "}" space
|
||||||
space ::= | " " | "\n" [ \t]{0,20}
|
space ::= | " " | "\n" [ \t]{0,20}
|
||||||
string ::= "\"" char* "\"" space
|
|
||||||
)"""
|
)"""
|
||||||
});
|
});
|
||||||
|
|
||||||
@ -942,25 +972,100 @@ static void test_all(const std::string & lang, std::function<void(const TestCase
|
|||||||
R"""({
|
R"""({
|
||||||
"type": "object",
|
"type": "object",
|
||||||
"properties": {
|
"properties": {
|
||||||
"a": {"type": "number"},
|
"and": {"type": "number"},
|
||||||
"b": {"type": "number"}
|
"also": {"type": "number"}
|
||||||
},
|
},
|
||||||
"required": ["a"],
|
"required": ["and"],
|
||||||
"additionalProperties": {"type": "number"}
|
"additionalProperties": {"type": "number"}
|
||||||
})""",
|
})""",
|
||||||
R"""(
|
R"""(
|
||||||
a-kv ::= "\"a\"" space ":" space number
|
additional-k ::= ["] ( [a] ([l] ([s] ([o] char+ | [^"o] char*) | [^"s] char*) | [n] ([d] char+ | [^"d] char*) | [^"ln] char*) | [^"a] char* )? ["] space
|
||||||
additional-kv ::= string ":" space number
|
additional-kv ::= additional-k ":" space number
|
||||||
additional-kvs ::= additional-kv ( "," space additional-kv )*
|
also-kv ::= "\"also\"" space ":" space number
|
||||||
b-kv ::= "\"b\"" space ":" space number
|
also-rest ::= ( "," space additional-kv )*
|
||||||
b-rest ::= additional-kvs
|
and-kv ::= "\"and\"" space ":" space number
|
||||||
char ::= [^"\\\x7F\x00-\x1F] | [\\] (["\\bfnrt] | "u" [0-9a-fA-F]{4})
|
char ::= [^"\\\x7F\x00-\x1F] | [\\] (["\\bfnrt] | "u" [0-9a-fA-F]{4})
|
||||||
decimal-part ::= [0-9]{1,16}
|
decimal-part ::= [0-9]{1,16}
|
||||||
integral-part ::= [0] | [1-9] [0-9]{0,15}
|
integral-part ::= [0] | [1-9] [0-9]{0,15}
|
||||||
number ::= ("-"? integral-part) ("." decimal-part)? ([eE] [-+]? integral-part)? space
|
number ::= ("-"? integral-part) ("." decimal-part)? ([eE] [-+]? integral-part)? space
|
||||||
root ::= "{" space a-kv ( "," space ( b-kv b-rest | additional-kvs ) )? "}" space
|
root ::= "{" space and-kv ( "," space ( also-kv also-rest | additional-kv ( "," space additional-kv )* ) )? "}" space
|
||||||
|
space ::= | " " | "\n" [ \t]{0,20}
|
||||||
|
)"""
|
||||||
|
});
|
||||||
|
|
||||||
|
test({
|
||||||
|
SUCCESS,
|
||||||
|
"optional props with empty name",
|
||||||
|
R"""({
|
||||||
|
"properties": {
|
||||||
|
"": {"type": "integer"},
|
||||||
|
"a": {"type": "integer"}
|
||||||
|
},
|
||||||
|
"additionalProperties": {"type": "integer"}
|
||||||
|
})""",
|
||||||
|
R"""(
|
||||||
|
-kv ::= "\"\"" space ":" space root
|
||||||
|
-rest ::= ( "," space a-kv )? a-rest
|
||||||
|
a-kv ::= "\"a\"" space ":" space integer
|
||||||
|
a-rest ::= ( "," space additional-kv )*
|
||||||
|
additional-k ::= ["] ( [a] char+ | [^"a] char* ) ["] space
|
||||||
|
additional-kv ::= additional-k ":" space integer
|
||||||
|
char ::= [^"\\\x7F\x00-\x1F] | [\\] (["\\bfnrt] | "u" [0-9a-fA-F]{4})
|
||||||
|
integer ::= ("-"? integral-part) space
|
||||||
|
integral-part ::= [0] | [1-9] [0-9]{0,15}
|
||||||
|
root ::= ("-"? integral-part) space
|
||||||
|
root0 ::= "{" space (-kv -rest | a-kv a-rest | additional-kv ( "," space additional-kv )* )? "}" space
|
||||||
|
space ::= | " " | "\n" [ \t]{0,20}
|
||||||
|
)"""
|
||||||
|
});
|
||||||
|
|
||||||
|
test({
|
||||||
|
SUCCESS,
|
||||||
|
"optional props with nested names",
|
||||||
|
R"""({
|
||||||
|
"properties": {
|
||||||
|
"a": {"type": "integer"},
|
||||||
|
"aa": {"type": "integer"}
|
||||||
|
},
|
||||||
|
"additionalProperties": {"type": "integer"}
|
||||||
|
})""",
|
||||||
|
R"""(
|
||||||
|
a-kv ::= "\"a\"" space ":" space integer
|
||||||
|
a-rest ::= ( "," space aa-kv )? aa-rest
|
||||||
|
aa-kv ::= "\"aa\"" space ":" space integer
|
||||||
|
aa-rest ::= ( "," space additional-kv )*
|
||||||
|
additional-k ::= ["] ( [a] ([a] char+ | [^"a] char*) | [^"a] char* )? ["] space
|
||||||
|
additional-kv ::= additional-k ":" space integer
|
||||||
|
char ::= [^"\\\x7F\x00-\x1F] | [\\] (["\\bfnrt] | "u" [0-9a-fA-F]{4})
|
||||||
|
integer ::= ("-"? integral-part) space
|
||||||
|
integral-part ::= [0] | [1-9] [0-9]{0,15}
|
||||||
|
root ::= "{" space (a-kv a-rest | aa-kv aa-rest | additional-kv ( "," space additional-kv )* )? "}" space
|
||||||
|
space ::= | " " | "\n" [ \t]{0,20}
|
||||||
|
)"""
|
||||||
|
});
|
||||||
|
|
||||||
|
test({
|
||||||
|
SUCCESS,
|
||||||
|
"optional props with common prefix",
|
||||||
|
R"""({
|
||||||
|
"properties": {
|
||||||
|
"ab": {"type": "integer"},
|
||||||
|
"ac": {"type": "integer"}
|
||||||
|
},
|
||||||
|
"additionalProperties": {"type": "integer"}
|
||||||
|
})""",
|
||||||
|
R"""(
|
||||||
|
ab-kv ::= "\"ab\"" space ":" space integer
|
||||||
|
ab-rest ::= ( "," space ac-kv )? ac-rest
|
||||||
|
ac-kv ::= "\"ac\"" space ":" space integer
|
||||||
|
ac-rest ::= ( "," space additional-kv )*
|
||||||
|
additional-k ::= ["] ( [a] ([b] char+ | [c] char+ | [^"bc] char*) | [^"a] char* )? ["] space
|
||||||
|
additional-kv ::= additional-k ":" space integer
|
||||||
|
char ::= [^"\\\x7F\x00-\x1F] | [\\] (["\\bfnrt] | "u" [0-9a-fA-F]{4})
|
||||||
|
integer ::= ("-"? integral-part) space
|
||||||
|
integral-part ::= [0] | [1-9] [0-9]{0,15}
|
||||||
|
root ::= "{" space (ab-kv ab-rest | ac-kv ac-rest | additional-kv ( "," space additional-kv )* )? "}" space
|
||||||
space ::= | " " | "\n" [ \t]{0,20}
|
space ::= | " " | "\n" [ \t]{0,20}
|
||||||
string ::= "\"" char* "\"" space
|
|
||||||
)"""
|
)"""
|
||||||
});
|
});
|
||||||
|
|
||||||
@ -1015,15 +1120,28 @@ static void test_all(const std::string & lang, std::function<void(const TestCase
|
|||||||
R"""(
|
R"""(
|
||||||
alternative-0 ::= foo
|
alternative-0 ::= foo
|
||||||
alternative-1 ::= bar
|
alternative-1 ::= bar
|
||||||
bar ::= "{" space (bar-b-kv )? "}" space
|
array ::= "[" space ( value ("," space value)* )? "]" space
|
||||||
|
bar ::= "{" space (bar-b-kv bar-b-rest | bar-additional-kv ( "," space bar-additional-kv )* )? "}" space
|
||||||
|
bar-additional-k ::= ["] ( [b] char+ | [^"b] char* )? ["] space
|
||||||
|
bar-additional-kv ::= bar-additional-k ":" space value
|
||||||
bar-b-kv ::= "\"b\"" space ":" space number
|
bar-b-kv ::= "\"b\"" space ":" space number
|
||||||
|
bar-b-rest ::= ( "," space bar-additional-kv )*
|
||||||
|
boolean ::= ("true" | "false") space
|
||||||
|
char ::= [^"\\\x7F\x00-\x1F] | [\\] (["\\bfnrt] | "u" [0-9a-fA-F]{4})
|
||||||
decimal-part ::= [0-9]{1,16}
|
decimal-part ::= [0-9]{1,16}
|
||||||
foo ::= "{" space (foo-a-kv )? "}" space
|
foo ::= "{" space (foo-a-kv foo-a-rest | foo-additional-kv ( "," space foo-additional-kv )* )? "}" space
|
||||||
foo-a-kv ::= "\"a\"" space ":" space number
|
foo-a-kv ::= "\"a\"" space ":" space number
|
||||||
|
foo-a-rest ::= ( "," space foo-additional-kv )*
|
||||||
|
foo-additional-k ::= ["] ( [a] char+ | [^"a] char* )? ["] space
|
||||||
|
foo-additional-kv ::= foo-additional-k ":" space value
|
||||||
integral-part ::= [0] | [1-9] [0-9]{0,15}
|
integral-part ::= [0] | [1-9] [0-9]{0,15}
|
||||||
|
null ::= "null" space
|
||||||
number ::= ("-"? integral-part) ("." decimal-part)? ([eE] [-+]? integral-part)? space
|
number ::= ("-"? integral-part) ("." decimal-part)? ([eE] [-+]? integral-part)? space
|
||||||
|
object ::= "{" space ( string ":" space value ("," space string ":" space value)* )? "}" space
|
||||||
root ::= alternative-0 | alternative-1
|
root ::= alternative-0 | alternative-1
|
||||||
space ::= | " " | "\n" [ \t]{0,20}
|
space ::= | " " | "\n" [ \t]{0,20}
|
||||||
|
string ::= "\"" char* "\"" space
|
||||||
|
value ::= object | array | string | number | boolean | null
|
||||||
)"""
|
)"""
|
||||||
});
|
});
|
||||||
|
|
||||||
@ -1059,15 +1177,25 @@ static void test_all(const std::string & lang, std::function<void(const TestCase
|
|||||||
})""",
|
})""",
|
||||||
R"""(
|
R"""(
|
||||||
a-kv ::= "\"a\"" space ":" space number
|
a-kv ::= "\"a\"" space ":" space number
|
||||||
|
additional-k ::= ["] ( [a] char+ | [b] char+ | [c] char+ | [d] char+ | [^"abcd] char* )? ["] space
|
||||||
|
additional-kv ::= additional-k ":" space value
|
||||||
|
array ::= "[" space ( value ("," space value)* )? "]" space
|
||||||
b-kv ::= "\"b\"" space ":" space number
|
b-kv ::= "\"b\"" space ":" space number
|
||||||
|
boolean ::= ("true" | "false") space
|
||||||
c-kv ::= "\"c\"" space ":" space number
|
c-kv ::= "\"c\"" space ":" space number
|
||||||
|
c-rest ::= ( "," space additional-kv )*
|
||||||
|
char ::= [^"\\\x7F\x00-\x1F] | [\\] (["\\bfnrt] | "u" [0-9a-fA-F]{4})
|
||||||
d-kv ::= "\"d\"" space ":" space number
|
d-kv ::= "\"d\"" space ":" space number
|
||||||
d-rest ::= ( "," space c-kv )?
|
d-rest ::= ( "," space c-kv )? c-rest
|
||||||
decimal-part ::= [0-9]{1,16}
|
decimal-part ::= [0-9]{1,16}
|
||||||
integral-part ::= [0] | [1-9] [0-9]{0,15}
|
integral-part ::= [0] | [1-9] [0-9]{0,15}
|
||||||
|
null ::= "null" space
|
||||||
number ::= ("-"? integral-part) ("." decimal-part)? ([eE] [-+]? integral-part)? space
|
number ::= ("-"? integral-part) ("." decimal-part)? ([eE] [-+]? integral-part)? space
|
||||||
root ::= "{" space a-kv "," space b-kv ( "," space ( d-kv d-rest | c-kv ) )? "}" space
|
object ::= "{" space ( string ":" space value ("," space string ":" space value)* )? "}" space
|
||||||
|
root ::= "{" space a-kv "," space b-kv ( "," space ( d-kv d-rest | c-kv c-rest | additional-kv ( "," space additional-kv )* ) )? "}" space
|
||||||
space ::= | " " | "\n" [ \t]{0,20}
|
space ::= | " " | "\n" [ \t]{0,20}
|
||||||
|
string ::= "\"" char* "\"" space
|
||||||
|
value ::= object | array | string | number | boolean | null
|
||||||
)"""
|
)"""
|
||||||
});
|
});
|
||||||
|
|
||||||
|
Loading…
Reference in New Issue
Block a user