Compare commits
9 commits
master
...
test/dsl-h
Author | SHA1 | Date | |
---|---|---|---|
|
e11a72be16 | ||
|
51e77feee8 | ||
|
d77046d404 | ||
|
7a14f48bd7 | ||
|
c95ff6e188 | ||
|
12b9a7a1be | ||
|
63d7e13e11 | ||
|
2a4a29c051 | ||
|
3219555785 |
2
.gitignore
vendored
Normal file
2
.gitignore
vendored
Normal file
|
@ -0,0 +1,2 @@
|
||||||
|
*.ttf
|
||||||
|
shenikan_server
|
29
Makefile
Normal file
29
Makefile
Normal file
|
@ -0,0 +1,29 @@
|
||||||
|
.PHONY: default clean
|
||||||
|
|
||||||
|
EXPORT_FORMAT?=json
|
||||||
|
|
||||||
|
default: tools fonts
|
||||||
|
|
||||||
|
FONT_DIRS=$(wildcard fonts/*.sfdir)
|
||||||
|
FONT_LIST=$(patsubst fonts/%.sfdir,%,$(FONT_DIRS))
|
||||||
|
FONT_TTFS=$(patsubst %,%.ttf,$(FONT_LIST))
|
||||||
|
|
||||||
|
fonts: $(FONT_TTFS)
|
||||||
|
|
||||||
|
%.ttf: fonts/%.sfdir
|
||||||
|
fontforge -lang=ff -c 'Open($$1); Generate($$2)' $^ $@
|
||||||
|
|
||||||
|
tools: shenikan_server
|
||||||
|
|
||||||
|
shenikan_server:
|
||||||
|
go -C tools/server build -o ../../shenikan_server
|
||||||
|
|
||||||
|
dump-dictionary:
|
||||||
|
@cue export --out $(EXPORT_FORMAT) -p shenikan
|
||||||
|
|
||||||
|
print-%:
|
||||||
|
@echo '$* = $($*)'
|
||||||
|
|
||||||
|
clean:
|
||||||
|
-rm $(FONT_TTFS)
|
||||||
|
-rm shenikan_server
|
270
dictionary.cue
Normal file
270
dictionary.cue
Normal file
|
@ -0,0 +1,270 @@
|
||||||
|
package shenikan
|
||||||
|
|
||||||
|
// Tips: in python run
|
||||||
|
//
|
||||||
|
// import gensim.downloader
|
||||||
|
// model = gensim.downloader.load("glove-wiki-gigaword-50") # or other models @https://github.com/piskvorky/gensim-data
|
||||||
|
//
|
||||||
|
// Then find words related to what you want in model[] and do math on them.
|
||||||
|
// Then search through model to find nearby vectors using model.most_similar.
|
||||||
|
// e.g. model.most_similar(model['aunt'] - model['woman'] + model['man'])
|
||||||
|
|
||||||
|
// TODO: try generating a penta for each non-syntactic word in toki pona.
|
||||||
|
// TODO: make more AvB BvC AvBvC penta/icosa sets
|
||||||
|
|
||||||
|
_dictionary: {
|
||||||
|
glyphs: {
|
||||||
|
vowel: "i": ["outer"]
|
||||||
|
vowel: "e": ["outer", "slashed"]
|
||||||
|
vowel: "a": ["slashed"]
|
||||||
|
vowel: "o": ["inner", "slashed"]
|
||||||
|
vowel: "u": ["inner"]
|
||||||
|
vowel: "y": ["inner", "outer"]
|
||||||
|
|
||||||
|
cluster: "θ": ["left", "top"]
|
||||||
|
cluster: "∫": ["left", "middle"]
|
||||||
|
cluster: "x": ["left", "bottom"]
|
||||||
|
cluster: "n": ["center", "middle"]
|
||||||
|
cluster: "p": ["right", "top"]
|
||||||
|
cluster: "t": ["right", "middle"]
|
||||||
|
cluster: "k": ["right", "bottom"]
|
||||||
|
cluster: "θl": ["left", "top", "wide"]
|
||||||
|
cluster: "∫l": ["left", "middle", "wide"]
|
||||||
|
cluster: "xl": ["left", "bottom", "wide"]
|
||||||
|
cluster: "nl": ["center", "middle", "wide"] // use sparingly?
|
||||||
|
cluster: "l": ["right", "top", "wide"] // allophone of pl
|
||||||
|
cluster: "tl": ["right", "middle", "wide"]
|
||||||
|
cluster: "kl": ["right", "bottom", "wide"]
|
||||||
|
cluster: "θr": ["left", "top", "tall"]
|
||||||
|
cluster: "∫r": ["left", "middle", "tall"]
|
||||||
|
cluster: "xr": ["left", "bottom", "tall"]
|
||||||
|
cluster: "nr": ["center", "middle", "tall"] // use sparingly?
|
||||||
|
cluster: "r": ["right", "top", "tall"] // allophone of pr
|
||||||
|
cluster: "tr": ["right", "middle", "tall"]
|
||||||
|
cluster: "kr": ["right", "bottom", "tall"]
|
||||||
|
cluster: "sθ": ["left", "top", "both"]
|
||||||
|
cluster: "s∫": ["left", "middle", "both"]
|
||||||
|
cluster: "sx": ["left", "bottom", "both"]
|
||||||
|
cluster: "sn": ["center", "middle", "both"]
|
||||||
|
cluster: "sp": ["right", "top", "both"]
|
||||||
|
cluster: "st": ["right", "middle", "both"]
|
||||||
|
cluster: "sk": ["right", "bottom", "both"]
|
||||||
|
|
||||||
|
punctuation: "«": ["left"]
|
||||||
|
punctuation: ".": []
|
||||||
|
punctuation: "»": ["right"]
|
||||||
|
|
||||||
|
numeric: "0": ["circle"]
|
||||||
|
numeric: "1": ["dash"]
|
||||||
|
numeric: "2": ["vee"]
|
||||||
|
numeric: "3": ["hump"]
|
||||||
|
numeric: "4": ["dash", "hump"]
|
||||||
|
numeric: "5": ["vee", "hump"]
|
||||||
|
numeric: ",": ["dot"]
|
||||||
|
}
|
||||||
|
|
||||||
|
dialects: {
|
||||||
|
// TODO: make these per-consonant-cluster
|
||||||
|
"shenkian": {
|
||||||
|
"l": "pl"
|
||||||
|
"r": "pr"
|
||||||
|
}
|
||||||
|
|
||||||
|
"jukashenikan": {
|
||||||
|
"x": "ç"
|
||||||
|
"p": "j"
|
||||||
|
}
|
||||||
|
|
||||||
|
"gazhenigan": {
|
||||||
|
"k": "g"
|
||||||
|
"∫": "ʒ"
|
||||||
|
"s": "z"
|
||||||
|
"θ": "ð"
|
||||||
|
"t": "d"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
dms: {
|
||||||
|
"-at": noun: "place of"
|
||||||
|
"plaxi-": noun: "study of"
|
||||||
|
}
|
||||||
|
|
||||||
|
words: {
|
||||||
|
"t": {
|
||||||
|
noun: "thing": "See t - dmPenta for better meaning."
|
||||||
|
verb: "be"
|
||||||
|
}
|
||||||
|
"tit": {
|
||||||
|
noun: "cool"
|
||||||
|
from: "t": via: "-t"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pentas: {
|
||||||
|
"n x": {
|
||||||
|
name: "Pronouns"
|
||||||
|
extremes: {i: "Fully proximal", u: "Fully distal"}
|
||||||
|
|
||||||
|
i: pronoun: "I/me"
|
||||||
|
o: {}
|
||||||
|
}
|
||||||
|
|
||||||
|
"p ls": {
|
||||||
|
name: "Coordinating Conjunctions"
|
||||||
|
extremes: {i: "Fully constrasting", u: "Fully non-contrasting"}
|
||||||
|
|
||||||
|
i: conjunction: "but/yet"
|
||||||
|
e: conjunction: "exclusive or"
|
||||||
|
a: conjunction: "or": "inclusive or"
|
||||||
|
o: conjunction: "implies": "logical implication (not p or q)"
|
||||||
|
u: conjunction: "and"
|
||||||
|
}
|
||||||
|
|
||||||
|
"sp ns": {
|
||||||
|
name: "Group Clusivity"
|
||||||
|
extremes: {i: "Fully included", u: "Fully excluded"}
|
||||||
|
|
||||||
|
i: adposition: "among"
|
||||||
|
e: adposition: "with"
|
||||||
|
a: adposition: "throughout"
|
||||||
|
o: adposition: "without"
|
||||||
|
u: adposition: "out"
|
||||||
|
}
|
||||||
|
|
||||||
|
"r ks": {
|
||||||
|
name: "Process"
|
||||||
|
extremes: {i: "Beginning", u: "Ending"}
|
||||||
|
|
||||||
|
i: {verb: "start", noun: "start"}
|
||||||
|
e: {verb: "resume", noun: "resumption"}
|
||||||
|
a: {verb: "continue", noun: "continuation"}
|
||||||
|
o: {verb: "stop", noun: "stop"}
|
||||||
|
u: {verb: "finish", noun: "finish"}
|
||||||
|
}
|
||||||
|
|
||||||
|
"sp t": {
|
||||||
|
name: "Truth"
|
||||||
|
extremes: {i: "True", u: "False"}
|
||||||
|
|
||||||
|
i: {adjective: "true", noun: "truth"}
|
||||||
|
e: {adjective: "valid", noun: "validity"}
|
||||||
|
a: {
|
||||||
|
adjective: unknown: "Truth value unknown"
|
||||||
|
noun: unknown: "Unknown truth value"
|
||||||
|
}
|
||||||
|
o: {adjective: "invalid", noun: "invalidity"}
|
||||||
|
u: {adjective: "false", noun: "falsehood"}
|
||||||
|
}
|
||||||
|
|
||||||
|
"sp shs": {
|
||||||
|
name: "Biomes"
|
||||||
|
extremes: {i: "Wet Biomes", u: "Dry Biomes"}
|
||||||
|
|
||||||
|
i: noun: ["swamp", "wetlands"]
|
||||||
|
e: noun: "rainforest"
|
||||||
|
a: noun: "forest"
|
||||||
|
o: noun: "grassland"
|
||||||
|
u: noun: "desert"
|
||||||
|
}
|
||||||
|
|
||||||
|
"sth ps": {
|
||||||
|
name: "Generic Plants"
|
||||||
|
extremes: {i: "Trees", u: "Grasses"}
|
||||||
|
|
||||||
|
i: noun: "tree"
|
||||||
|
e: noun: "vine"
|
||||||
|
a: noun: "bush"
|
||||||
|
o: noun: "crop"
|
||||||
|
u: noun: "grass"
|
||||||
|
}
|
||||||
|
|
||||||
|
"k ks": {
|
||||||
|
name: "Queries"
|
||||||
|
extremes: {i: "Abstract", u: "Concrete"}
|
||||||
|
|
||||||
|
i: noun: "why"
|
||||||
|
e: noun: "how"
|
||||||
|
a: noun: "when"
|
||||||
|
o: noun: "where"
|
||||||
|
u: noun: ["what", "which", "who"]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
icosas: {
|
||||||
|
"sth sp shs": {
|
||||||
|
name: "Plants"
|
||||||
|
extremes: {
|
||||||
|
ix: "Trees"
|
||||||
|
ux: "Grasses"
|
||||||
|
xi: "Wet"
|
||||||
|
xu: "Dry"
|
||||||
|
}
|
||||||
|
|
||||||
|
ix: "Trees"
|
||||||
|
ex: "Vines"
|
||||||
|
ax: "Bushes"
|
||||||
|
ox: "Crops"
|
||||||
|
ux: "Grasses"
|
||||||
|
|
||||||
|
xi: "Swamp Plants"
|
||||||
|
xe: "Rainforest Plants"
|
||||||
|
xa: "Forest Plants"
|
||||||
|
xo: "Grassland Plants"
|
||||||
|
xu: "Desert Plants"
|
||||||
|
|
||||||
|
ii: noun: "conifer tree"
|
||||||
|
ie: noun: "deciduous tree"
|
||||||
|
ia: noun: "temperate tree"
|
||||||
|
io: noun: "palm tree"
|
||||||
|
iu: noun: "desert tree": "Acacia for example"
|
||||||
|
ei: noun: "lily"
|
||||||
|
ee: noun: "vine"
|
||||||
|
ea: noun: "ivy"
|
||||||
|
eo: noun: "vine flower": "Rose for example"
|
||||||
|
eu: noun: "thornbush"
|
||||||
|
ai: noun: "seaweed"
|
||||||
|
ae: noun: "fern"
|
||||||
|
aa: noun: "bush"
|
||||||
|
ao: noun: "shrub"
|
||||||
|
au: noun: "cactus"
|
||||||
|
oi: noun: "fruit tree"
|
||||||
|
oe: noun: "berry bush"
|
||||||
|
oa: noun: "legume"
|
||||||
|
oo: noun: "cabbage": "Or similar grassland crop"
|
||||||
|
ou: noun: "grain"
|
||||||
|
ui: noun: "moss"
|
||||||
|
ue: noun: "flower": "Specifically ground or \"low\" flowers"
|
||||||
|
ua: noun: "grass"
|
||||||
|
uo: noun: "corn/maize"
|
||||||
|
uu: noun: "lichen"
|
||||||
|
}
|
||||||
|
|
||||||
|
"sth sp sshat": {
|
||||||
|
name: "Biomes"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
dmPentas: {
|
||||||
|
"t -": {
|
||||||
|
name: "Articles"
|
||||||
|
extremes: {i: "Specific", u: "General"}
|
||||||
|
|
||||||
|
i: syntax: "proper noun indicator"
|
||||||
|
e: syntax: "the"
|
||||||
|
a: syntax: "a"
|
||||||
|
o: syntax: ["any", "some"]
|
||||||
|
u: syntax: "all"
|
||||||
|
}
|
||||||
|
|
||||||
|
"k -": {
|
||||||
|
name: "Listing Prefixes"
|
||||||
|
extremes: {i: "Additive", u: "Reductive"}
|
||||||
|
|
||||||
|
i: syntax: "or": "Inclusive or"
|
||||||
|
e: syntax: "or": "Exclusive or"
|
||||||
|
a: syntax: "and"
|
||||||
|
o: syntax: "xnor": "Exclusive nor"
|
||||||
|
u: syntax: "nor": "Neither nor"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
249
dictionary.kdl
249
dictionary.kdl
|
@ -1,249 +0,0 @@
|
||||||
// vim: fdm=marker fmr={,} fileencoding=utf8
|
|
||||||
|
|
||||||
glyphs { // in font encoding and alphabetical order
|
|
||||||
// bare vowels
|
|
||||||
(vowel)glyph "i" dashed="outer"
|
|
||||||
(vowel)glyph "e" dashed="outer" slashed=true
|
|
||||||
(vowel)glyph "a" slashed=true
|
|
||||||
(vowel)glyph "o" dashed="inner" slashed=true
|
|
||||||
(vowel)glyph "u" dashed="inner"
|
|
||||||
(vowel)glyph "y" dashed="both"
|
|
||||||
|
|
||||||
// consonant clusters (note: both core can be written with a dot in most cases)
|
|
||||||
(cluster)glyph "θ" side="left" height="top"
|
|
||||||
(cluster)glyph "∫" side="left" height="middle"
|
|
||||||
(cluster)glyph "x" side="left" height="bottom"
|
|
||||||
(cluster)glyph "n" side="center" height="middle"
|
|
||||||
(cluster)glyph "p" side="right" height="top"
|
|
||||||
(cluster)glyph "t" side="right" height="middle"
|
|
||||||
(cluster)glyph "k" side="right" height="bottom"
|
|
||||||
(cluster)glyph "θl" side="left" height="top" core="tall"
|
|
||||||
(cluster)glyph "∫l" side="left" height="middle" core="tall"
|
|
||||||
(cluster)glyph "xl" side="left" height="bottom" core="tall"
|
|
||||||
(cluster)glyph "nl" side="center" height="middle" core="tall"
|
|
||||||
(cluster)glyph "pl" side="right" height="top" core="tall"
|
|
||||||
(cluster)glyph "tl" side="right" height="middle" core="tall"
|
|
||||||
(cluster)glyph "kl" side="right" height="bottom" core="tall"
|
|
||||||
(cluster)glyph "θr" side="left" height="top" core="wide"
|
|
||||||
(cluster)glyph "∫r" side="left" height="middle" core="wide"
|
|
||||||
(cluster)glyph "xr" side="left" height="bottom" core="wide"
|
|
||||||
(cluster)glyph "nr" side="center" height="middle" core="wide"
|
|
||||||
(cluster)glyph "pr" side="right" height="top" core="wide"
|
|
||||||
(cluster)glyph "tr" side="right" height="middle" core="wide"
|
|
||||||
(cluster)glyph "kr" side="right" height="bottom" core="wide"
|
|
||||||
(cluster)glyph "sθ" side="left" height="top" core="both"
|
|
||||||
(cluster)glyph "s∫" side="left" height="middle" core="both"
|
|
||||||
(cluster)glyph "sx" side="left" height="bottom" core="both"
|
|
||||||
(cluster)glyph "sn" side="center" height="middle" core="both"
|
|
||||||
(cluster)glyph "sp" side="right" height="top" core="both"
|
|
||||||
(cluster)glyph "st" side="right" height="middle" core="both"
|
|
||||||
(cluster)glyph "sk" side="right" height="bottom" core="both"
|
|
||||||
|
|
||||||
// syllables (generated with vim macros [main=@s]: j=«`cYpdWdi"k^Wp^ci(syllable??a0j^dWkJj^`cjmc» q=«Y27pk28@j`c{jjmc» s=«?vowels
n0jV}kyp{{jjmc6@q») }}
|
|
||||||
(syllable)glyph "θi" dashed="outer" side="left" height="top"
|
|
||||||
(syllable)glyph "∫i" dashed="outer" side="left" height="middle"
|
|
||||||
(syllable)glyph "xi" dashed="outer" side="left" height="bottom"
|
|
||||||
(syllable)glyph "ni" dashed="outer" side="center" height="middle"
|
|
||||||
(syllable)glyph "pi" dashed="outer" side="right" height="top"
|
|
||||||
(syllable)glyph "ti" dashed="outer" side="right" height="middle"
|
|
||||||
(syllable)glyph "ki" dashed="outer" side="right" height="bottom"
|
|
||||||
(syllable)glyph "θli" dashed="outer" side="left" height="top" core="tall"
|
|
||||||
(syllable)glyph "∫li" dashed="outer" side="left" height="middle" core="tall"
|
|
||||||
(syllable)glyph "xli" dashed="outer" side="left" height="bottom" core="tall"
|
|
||||||
(syllable)glyph "nli" dashed="outer" side="center" height="middle" core="tall"
|
|
||||||
(syllable)glyph "pli" dashed="outer" side="right" height="top" core="tall"
|
|
||||||
(syllable)glyph "tli" dashed="outer" side="right" height="middle" core="tall"
|
|
||||||
(syllable)glyph "kli" dashed="outer" side="right" height="bottom" core="tall"
|
|
||||||
(syllable)glyph "θri" dashed="outer" side="left" height="top" core="wide"
|
|
||||||
(syllable)glyph "∫ri" dashed="outer" side="left" height="middle" core="wide"
|
|
||||||
(syllable)glyph "xri" dashed="outer" side="left" height="bottom" core="wide"
|
|
||||||
(syllable)glyph "nri" dashed="outer" side="center" height="middle" core="wide"
|
|
||||||
(syllable)glyph "pri" dashed="outer" side="right" height="top" core="wide"
|
|
||||||
(syllable)glyph "tri" dashed="outer" side="right" height="middle" core="wide"
|
|
||||||
(syllable)glyph "kri" dashed="outer" side="right" height="bottom" core="wide"
|
|
||||||
(syllable)glyph "sθi" dashed="outer" side="left" height="top" core="both"
|
|
||||||
(syllable)glyph "s∫i" dashed="outer" side="left" height="middle" core="both"
|
|
||||||
(syllable)glyph "sxi" dashed="outer" side="left" height="bottom" core="both"
|
|
||||||
(syllable)glyph "sni" dashed="outer" side="center" height="middle" core="both"
|
|
||||||
(syllable)glyph "spi" dashed="outer" side="right" height="top" core="both"
|
|
||||||
(syllable)glyph "sti" dashed="outer" side="right" height="middle" core="both"
|
|
||||||
(syllable)glyph "ski" dashed="outer" side="right" height="bottom" core="both"
|
|
||||||
(syllable)glyph "θe" dashed="outer" slashed=true side="left" height="top"
|
|
||||||
(syllable)glyph "∫e" dashed="outer" slashed=true side="left" height="middle"
|
|
||||||
(syllable)glyph "xe" dashed="outer" slashed=true side="left" height="bottom"
|
|
||||||
(syllable)glyph "ne" dashed="outer" slashed=true side="center" height="middle"
|
|
||||||
(syllable)glyph "pe" dashed="outer" slashed=true side="right" height="top"
|
|
||||||
(syllable)glyph "te" dashed="outer" slashed=true side="right" height="middle"
|
|
||||||
(syllable)glyph "ke" dashed="outer" slashed=true side="right" height="bottom"
|
|
||||||
(syllable)glyph "θle" dashed="outer" slashed=true side="left" height="top" core="tall"
|
|
||||||
(syllable)glyph "∫le" dashed="outer" slashed=true side="left" height="middle" core="tall"
|
|
||||||
(syllable)glyph "xle" dashed="outer" slashed=true side="left" height="bottom" core="tall"
|
|
||||||
(syllable)glyph "nle" dashed="outer" slashed=true side="center" height="middle" core="tall"
|
|
||||||
(syllable)glyph "ple" dashed="outer" slashed=true side="right" height="top" core="tall"
|
|
||||||
(syllable)glyph "tle" dashed="outer" slashed=true side="right" height="middle" core="tall"
|
|
||||||
(syllable)glyph "kle" dashed="outer" slashed=true side="right" height="bottom" core="tall"
|
|
||||||
(syllable)glyph "θre" dashed="outer" slashed=true side="left" height="top" core="wide"
|
|
||||||
(syllable)glyph "∫re" dashed="outer" slashed=true side="left" height="middle" core="wide"
|
|
||||||
(syllable)glyph "xre" dashed="outer" slashed=true side="left" height="bottom" core="wide"
|
|
||||||
(syllable)glyph "nre" dashed="outer" slashed=true side="center" height="middle" core="wide"
|
|
||||||
(syllable)glyph "pre" dashed="outer" slashed=true side="right" height="top" core="wide"
|
|
||||||
(syllable)glyph "tre" dashed="outer" slashed=true side="right" height="middle" core="wide"
|
|
||||||
(syllable)glyph "kre" dashed="outer" slashed=true side="right" height="bottom" core="wide"
|
|
||||||
(syllable)glyph "sθe" dashed="outer" slashed=true side="left" height="top" core="both"
|
|
||||||
(syllable)glyph "s∫e" dashed="outer" slashed=true side="left" height="middle" core="both"
|
|
||||||
(syllable)glyph "sxe" dashed="outer" slashed=true side="left" height="bottom" core="both"
|
|
||||||
(syllable)glyph "sne" dashed="outer" slashed=true side="center" height="middle" core="both"
|
|
||||||
(syllable)glyph "spe" dashed="outer" slashed=true side="right" height="top" core="both"
|
|
||||||
(syllable)glyph "ste" dashed="outer" slashed=true side="right" height="middle" core="both"
|
|
||||||
(syllable)glyph "ske" dashed="outer" slashed=true side="right" height="bottom" core="both"
|
|
||||||
(syllable)glyph "θa" slashed=true side="left" height="top"
|
|
||||||
(syllable)glyph "∫a" slashed=true side="left" height="middle"
|
|
||||||
(syllable)glyph "xa" slashed=true side="left" height="bottom"
|
|
||||||
(syllable)glyph "na" slashed=true side="center" height="middle"
|
|
||||||
(syllable)glyph "pa" slashed=true side="right" height="top"
|
|
||||||
(syllable)glyph "ta" slashed=true side="right" height="middle"
|
|
||||||
(syllable)glyph "ka" slashed=true side="right" height="bottom"
|
|
||||||
(syllable)glyph "θla" slashed=true side="left" height="top" core="tall"
|
|
||||||
(syllable)glyph "∫la" slashed=true side="left" height="middle" core="tall"
|
|
||||||
(syllable)glyph "xla" slashed=true side="left" height="bottom" core="tall"
|
|
||||||
(syllable)glyph "nla" slashed=true side="center" height="middle" core="tall"
|
|
||||||
(syllable)glyph "pla" slashed=true side="right" height="top" core="tall"
|
|
||||||
(syllable)glyph "tla" slashed=true side="right" height="middle" core="tall"
|
|
||||||
(syllable)glyph "kla" slashed=true side="right" height="bottom" core="tall"
|
|
||||||
(syllable)glyph "θra" slashed=true side="left" height="top" core="wide"
|
|
||||||
(syllable)glyph "∫ra" slashed=true side="left" height="middle" core="wide"
|
|
||||||
(syllable)glyph "xra" slashed=true side="left" height="bottom" core="wide"
|
|
||||||
(syllable)glyph "nra" slashed=true side="center" height="middle" core="wide"
|
|
||||||
(syllable)glyph "pra" slashed=true side="right" height="top" core="wide"
|
|
||||||
(syllable)glyph "tra" slashed=true side="right" height="middle" core="wide"
|
|
||||||
(syllable)glyph "kra" slashed=true side="right" height="bottom" core="wide"
|
|
||||||
(syllable)glyph "sθa" slashed=true side="left" height="top" core="both"
|
|
||||||
(syllable)glyph "s∫a" slashed=true side="left" height="middle" core="both"
|
|
||||||
(syllable)glyph "sxa" slashed=true side="left" height="bottom" core="both"
|
|
||||||
(syllable)glyph "sna" slashed=true side="center" height="middle" core="both"
|
|
||||||
(syllable)glyph "spa" slashed=true side="right" height="top" core="both"
|
|
||||||
(syllable)glyph "sta" slashed=true side="right" height="middle" core="both"
|
|
||||||
(syllable)glyph "ska" slashed=true side="right" height="bottom" core="both"
|
|
||||||
(syllable)glyph "θo" dashed="inner" slashed=true side="left" height="top"
|
|
||||||
(syllable)glyph "∫o" dashed="inner" slashed=true side="left" height="middle"
|
|
||||||
(syllable)glyph "xo" dashed="inner" slashed=true side="left" height="bottom"
|
|
||||||
(syllable)glyph "no" dashed="inner" slashed=true side="center" height="middle"
|
|
||||||
(syllable)glyph "po" dashed="inner" slashed=true side="right" height="top"
|
|
||||||
(syllable)glyph "to" dashed="inner" slashed=true side="right" height="middle"
|
|
||||||
(syllable)glyph "ko" dashed="inner" slashed=true side="right" height="bottom"
|
|
||||||
(syllable)glyph "θlo" dashed="inner" slashed=true side="left" height="top" core="tall"
|
|
||||||
(syllable)glyph "∫lo" dashed="inner" slashed=true side="left" height="middle" core="tall"
|
|
||||||
(syllable)glyph "xlo" dashed="inner" slashed=true side="left" height="bottom" core="tall"
|
|
||||||
(syllable)glyph "nlo" dashed="inner" slashed=true side="center" height="middle" core="tall"
|
|
||||||
(syllable)glyph "plo" dashed="inner" slashed=true side="right" height="top" core="tall"
|
|
||||||
(syllable)glyph "tlo" dashed="inner" slashed=true side="right" height="middle" core="tall"
|
|
||||||
(syllable)glyph "klo" dashed="inner" slashed=true side="right" height="bottom" core="tall"
|
|
||||||
(syllable)glyph "θro" dashed="inner" slashed=true side="left" height="top" core="wide"
|
|
||||||
(syllable)glyph "∫ro" dashed="inner" slashed=true side="left" height="middle" core="wide"
|
|
||||||
(syllable)glyph "xro" dashed="inner" slashed=true side="left" height="bottom" core="wide"
|
|
||||||
(syllable)glyph "nro" dashed="inner" slashed=true side="center" height="middle" core="wide"
|
|
||||||
(syllable)glyph "pro" dashed="inner" slashed=true side="right" height="top" core="wide"
|
|
||||||
(syllable)glyph "tro" dashed="inner" slashed=true side="right" height="middle" core="wide"
|
|
||||||
(syllable)glyph "kro" dashed="inner" slashed=true side="right" height="bottom" core="wide"
|
|
||||||
(syllable)glyph "sθo" dashed="inner" slashed=true side="left" height="top" core="both"
|
|
||||||
(syllable)glyph "s∫o" dashed="inner" slashed=true side="left" height="middle" core="both"
|
|
||||||
(syllable)glyph "sxo" dashed="inner" slashed=true side="left" height="bottom" core="both"
|
|
||||||
(syllable)glyph "sno" dashed="inner" slashed=true side="center" height="middle" core="both"
|
|
||||||
(syllable)glyph "spo" dashed="inner" slashed=true side="right" height="top" core="both"
|
|
||||||
(syllable)glyph "sto" dashed="inner" slashed=true side="right" height="middle" core="both"
|
|
||||||
(syllable)glyph "sko" dashed="inner" slashed=true side="right" height="bottom" core="both"
|
|
||||||
(syllable)glyph "θu" dashed="inner" side="left" height="top"
|
|
||||||
(syllable)glyph "∫u" dashed="inner" side="left" height="middle"
|
|
||||||
(syllable)glyph "xu" dashed="inner" side="left" height="bottom"
|
|
||||||
(syllable)glyph "nu" dashed="inner" side="center" height="middle"
|
|
||||||
(syllable)glyph "pu" dashed="inner" side="right" height="top"
|
|
||||||
(syllable)glyph "tu" dashed="inner" side="right" height="middle"
|
|
||||||
(syllable)glyph "ku" dashed="inner" side="right" height="bottom"
|
|
||||||
(syllable)glyph "θlu" dashed="inner" side="left" height="top" core="tall"
|
|
||||||
(syllable)glyph "∫lu" dashed="inner" side="left" height="middle" core="tall"
|
|
||||||
(syllable)glyph "xlu" dashed="inner" side="left" height="bottom" core="tall"
|
|
||||||
(syllable)glyph "nlu" dashed="inner" side="center" height="middle" core="tall"
|
|
||||||
(syllable)glyph "plu" dashed="inner" side="right" height="top" core="tall"
|
|
||||||
(syllable)glyph "tlu" dashed="inner" side="right" height="middle" core="tall"
|
|
||||||
(syllable)glyph "klu" dashed="inner" side="right" height="bottom" core="tall"
|
|
||||||
(syllable)glyph "θru" dashed="inner" side="left" height="top" core="wide"
|
|
||||||
(syllable)glyph "∫ru" dashed="inner" side="left" height="middle" core="wide"
|
|
||||||
(syllable)glyph "xru" dashed="inner" side="left" height="bottom" core="wide"
|
|
||||||
(syllable)glyph "nru" dashed="inner" side="center" height="middle" core="wide"
|
|
||||||
(syllable)glyph "pru" dashed="inner" side="right" height="top" core="wide"
|
|
||||||
(syllable)glyph "tru" dashed="inner" side="right" height="middle" core="wide"
|
|
||||||
(syllable)glyph "kru" dashed="inner" side="right" height="bottom" core="wide"
|
|
||||||
(syllable)glyph "sθu" dashed="inner" side="left" height="top" core="both"
|
|
||||||
(syllable)glyph "s∫u" dashed="inner" side="left" height="middle" core="both"
|
|
||||||
(syllable)glyph "sxu" dashed="inner" side="left" height="bottom" core="both"
|
|
||||||
(syllable)glyph "snu" dashed="inner" side="center" height="middle" core="both"
|
|
||||||
(syllable)glyph "spu" dashed="inner" side="right" height="top" core="both"
|
|
||||||
(syllable)glyph "stu" dashed="inner" side="right" height="middle" core="both"
|
|
||||||
(syllable)glyph "sku" dashed="inner" side="right" height="bottom" core="both"
|
|
||||||
(syllable)glyph "θy" dashed="both" side="left" height="top"
|
|
||||||
(syllable)glyph "∫y" dashed="both" side="left" height="middle"
|
|
||||||
(syllable)glyph "xy" dashed="both" side="left" height="bottom"
|
|
||||||
(syllable)glyph "ny" dashed="both" side="center" height="middle"
|
|
||||||
(syllable)glyph "py" dashed="both" side="right" height="top"
|
|
||||||
(syllable)glyph "ty" dashed="both" side="right" height="middle"
|
|
||||||
(syllable)glyph "ky" dashed="both" side="right" height="bottom"
|
|
||||||
(syllable)glyph "θly" dashed="both" side="left" height="top" core="tall"
|
|
||||||
(syllable)glyph "∫ly" dashed="both" side="left" height="middle" core="tall"
|
|
||||||
(syllable)glyph "xly" dashed="both" side="left" height="bottom" core="tall"
|
|
||||||
(syllable)glyph "nly" dashed="both" side="center" height="middle" core="tall"
|
|
||||||
(syllable)glyph "ply" dashed="both" side="right" height="top" core="tall"
|
|
||||||
(syllable)glyph "tly" dashed="both" side="right" height="middle" core="tall"
|
|
||||||
(syllable)glyph "kly" dashed="both" side="right" height="bottom" core="tall"
|
|
||||||
(syllable)glyph "θry" dashed="both" side="left" height="top" core="wide"
|
|
||||||
(syllable)glyph "∫ry" dashed="both" side="left" height="middle" core="wide"
|
|
||||||
(syllable)glyph "xry" dashed="both" side="left" height="bottom" core="wide"
|
|
||||||
(syllable)glyph "nry" dashed="both" side="center" height="middle" core="wide"
|
|
||||||
(syllable)glyph "pry" dashed="both" side="right" height="top" core="wide"
|
|
||||||
(syllable)glyph "try" dashed="both" side="right" height="middle" core="wide"
|
|
||||||
(syllable)glyph "kry" dashed="both" side="right" height="bottom" core="wide"
|
|
||||||
(syllable)glyph "sθy" dashed="both" side="left" height="top" core="both"
|
|
||||||
(syllable)glyph "s∫y" dashed="both" side="left" height="middle" core="both"
|
|
||||||
(syllable)glyph "sxy" dashed="both" side="left" height="bottom" core="both"
|
|
||||||
(syllable)glyph "sny" dashed="both" side="center" height="middle" core="both"
|
|
||||||
(syllable)glyph "spy" dashed="both" side="right" height="top" core="both"
|
|
||||||
(syllable)glyph "sty" dashed="both" side="right" height="middle" core="both"
|
|
||||||
(syllable)glyph "sky" dashed="both" side="right" height="bottom" core="both"
|
|
||||||
|
|
||||||
// punctuation
|
|
||||||
(punctuation)glyph "«" dir="left"
|
|
||||||
(punctuation)glyph "."
|
|
||||||
(punctuation)glyph "»" dir="right"
|
|
||||||
|
|
||||||
// mathematics
|
|
||||||
(math)glyph "0" shape="circle"
|
|
||||||
(math)glyph "1" shape="dash"
|
|
||||||
(math)glyph "2" shape="vee"
|
|
||||||
(math)glyph "3" shape="hump"
|
|
||||||
(math)glyph "4" shape="dash-hump"
|
|
||||||
(math)glyph "5" shape="dash-hump-dash"
|
|
||||||
(math)glyph "." shape="dot"
|
|
||||||
}
|
|
||||||
|
|
||||||
dialect "jukashenikan" {
|
|
||||||
replace "x" with="ç"
|
|
||||||
replace "p" with="j"
|
|
||||||
// ...
|
|
||||||
}
|
|
||||||
|
|
||||||
dialect "gazhenigan" {
|
|
||||||
replace "k" with="g"
|
|
||||||
replace "∫" with="ʒ"
|
|
||||||
replace "s" with="z"
|
|
||||||
replace "θ" with="ð"
|
|
||||||
replace "t" with="d"
|
|
||||||
|
|
||||||
// ...
|
|
||||||
}
|
|
||||||
|
|
||||||
romanization {
|
|
||||||
// TODO
|
|
||||||
}
|
|
||||||
|
|
||||||
dictionary {
|
|
||||||
|
|
||||||
}
|
|
120
dsl.cue
Normal file
120
dsl.cue
Normal file
|
@ -0,0 +1,120 @@
|
||||||
|
// CUE code for defining the DSL
|
||||||
|
package shenikan
|
||||||
|
|
||||||
|
// Generate json output via `cue export -p shenikan`
|
||||||
|
|
||||||
|
import "strings"
|
||||||
|
|
||||||
|
_#ShortDefinition: string
|
||||||
|
_#LongDefinition: {[string]: string}
|
||||||
|
_#AnyDefinition: _#ShortDefinition | _#LongDefinition
|
||||||
|
_#RawDefinition: _#AnyDefinition | [..._#AnyDefinition] | *""
|
||||||
|
|
||||||
|
_dictionary: {
|
||||||
|
glyphs: vowel: [Orthography=_]: [...#VowelAttribute] | *null
|
||||||
|
glyphs: cluster: [Orthography=_]: [...#ClusterAttribute] | *null
|
||||||
|
glyphs: punctuation: [Orthography=_]: [...#PunctuationAttribute] | *null
|
||||||
|
glyphs: numeric: [Orthography=_]: [...#NumericAttribute] | *null
|
||||||
|
|
||||||
|
dialects: [Name=_]: [Old=_]: string
|
||||||
|
|
||||||
|
_#DMDSL: {[#PartOfSpeech]: _#RawDefinition}
|
||||||
|
dms: [Orthography=_]: _#DMDSL
|
||||||
|
dmPentas: [Orthography=_]: {
|
||||||
|
name?: string
|
||||||
|
extremes: {i?: string, u?: string}
|
||||||
|
for v in _vowels {"\(v)": _#DMDSL | *null}
|
||||||
|
}
|
||||||
|
dms: {
|
||||||
|
for o, dmPenta in dmPentas {
|
||||||
|
for v in _vowels
|
||||||
|
let ortho = strings.Replace(o, " ", v, 1)
|
||||||
|
if dmPenta["\(v)"] != null {
|
||||||
|
(ortho): dmPenta["\(v)"]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#Derivable: from?: [string]: via: string | [...string]
|
||||||
|
_#WordDSL: {[#PartOfSpeech]: _#RawDefinition, #Derivable}
|
||||||
|
words: [Orthography=_]: _#WordDSL
|
||||||
|
|
||||||
|
_#PentaDSL: {
|
||||||
|
name?: string
|
||||||
|
extremes: {i?: string, u?: string}
|
||||||
|
for v in _vowels {"\(v)": _#WordDSL | *null}
|
||||||
|
#Derivable
|
||||||
|
}
|
||||||
|
pentas: [Orthography=_]: _#PentaDSL
|
||||||
|
words: {
|
||||||
|
for o, penta in pentas {
|
||||||
|
for v in _vowels
|
||||||
|
let ortho = strings.Replace(o, " ", v, 1)
|
||||||
|
if penta["\(v)"] != null {
|
||||||
|
(ortho): penta["\(v)"] & {
|
||||||
|
if penta.from != _|_ {
|
||||||
|
from: {
|
||||||
|
for r, v in penta.from {
|
||||||
|
(ortho): v
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
icosas: [Orthography=_]: {
|
||||||
|
name?: string
|
||||||
|
extremes: {ix?: string, ux?: string, xi?: string, xu?: string}
|
||||||
|
for v1 in _vowels
|
||||||
|
for v2 in _vowels {
|
||||||
|
"\(v1)\(v2)": _#WordDSL
|
||||||
|
}
|
||||||
|
for v in _vowels {
|
||||||
|
"\(v)x": string
|
||||||
|
"x\(v)": string
|
||||||
|
}
|
||||||
|
#Derivable
|
||||||
|
}
|
||||||
|
pentas: {
|
||||||
|
for o, icosa in icosas {
|
||||||
|
for v in _vowels {
|
||||||
|
(strings.Replace(o, " ", v, 1)): {
|
||||||
|
if icosa["\(v)x"] != _|_ {name: icosa["\(v)x"]}
|
||||||
|
extremes: {
|
||||||
|
if icosa.extremes.xi != _|_ {i: icosa.extremes.xi}
|
||||||
|
if icosa.extremes.xu != _|_ {u: icosa.extremes.xu}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
let o1 = strings.Replace(o, " ", "~", 1)
|
||||||
|
let o2 = strings.Replace(o1, " ", v, 1)
|
||||||
|
(strings.Replace(o2, "~", " ", 1)): {
|
||||||
|
if icosa["x\(v)"] != _|_ {name: icosa["x\(v)"]}
|
||||||
|
extremes: {
|
||||||
|
if icosa.extremes.ix != _|_ {i: icosa.extremes.ix}
|
||||||
|
if icosa.extremes.ux != _|_ {u: icosa.extremes.ux}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
words: {
|
||||||
|
for o, icosa in icosas {
|
||||||
|
for v1 in _vowels
|
||||||
|
for v2 in _vowels
|
||||||
|
let ortho = strings.Replace(strings.Replace(o, " ", v1, 1), " ", v2, 1)
|
||||||
|
if icosa["\(v1)\(v2)"] != null {
|
||||||
|
(ortho): icosa["\(v1)\(v2)"] & {
|
||||||
|
if icosa.from != _|_ {
|
||||||
|
from: {
|
||||||
|
for r, v in icosa.from {
|
||||||
|
(ortho): v
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
133
generation.cue
Normal file
133
generation.cue
Normal file
|
@ -0,0 +1,133 @@
|
||||||
|
// CUE code for actually generating the output dictionary
|
||||||
|
package shenikan
|
||||||
|
|
||||||
|
// create { _defn: ..., { _#Definition } } to populate a proper definition
|
||||||
|
_#Definition: {
|
||||||
|
_defn: string | {[string]: string}
|
||||||
|
short: _defn & string | [for brief, long in _defn {brief}][0]
|
||||||
|
_long: *([for brief, long in _defn {long}][0]) | null
|
||||||
|
if _long != null {long: _long}
|
||||||
|
}
|
||||||
|
|
||||||
|
dictionary: {
|
||||||
|
glyphs: [
|
||||||
|
for k, datum in _dictionary.glyphs
|
||||||
|
for o, glyph in datum {
|
||||||
|
kind: k
|
||||||
|
ortho: o
|
||||||
|
attrs: glyph
|
||||||
|
},
|
||||||
|
|
||||||
|
for v, vglyph in _dictionary.glyphs.vowel
|
||||||
|
for c, cglyph in _dictionary.glyphs.cluster {
|
||||||
|
kind: "syllable"
|
||||||
|
ortho: c + v
|
||||||
|
attrs: vglyph + cglyph
|
||||||
|
},
|
||||||
|
]
|
||||||
|
|
||||||
|
dialects: [
|
||||||
|
for d, dialect in _dictionary.dialects {
|
||||||
|
name: d
|
||||||
|
replacements: [
|
||||||
|
for o, n in dialect {
|
||||||
|
old: o
|
||||||
|
new: n
|
||||||
|
},
|
||||||
|
]
|
||||||
|
},
|
||||||
|
]
|
||||||
|
|
||||||
|
dms: [
|
||||||
|
for o, dm in _dictionary.dms {
|
||||||
|
spelling: o
|
||||||
|
definitions: [
|
||||||
|
for p, defn in dm
|
||||||
|
if (defn & _#AnyDefinition) != _|_ {
|
||||||
|
_defn: defn
|
||||||
|
pos: p
|
||||||
|
{_#Definition}
|
||||||
|
},
|
||||||
|
for p, defns in dm
|
||||||
|
if (defns & [..._#AnyDefinition]) != _|_
|
||||||
|
for def in defns {
|
||||||
|
_defn: def
|
||||||
|
pos: p
|
||||||
|
{_#Definition}
|
||||||
|
},
|
||||||
|
]
|
||||||
|
},
|
||||||
|
]
|
||||||
|
|
||||||
|
dmPentas: [
|
||||||
|
for o, dmPenta in _dictionary.dmPentas {
|
||||||
|
spelling: o
|
||||||
|
if dmPenta.name != _|_ {name: dmPenta.name}
|
||||||
|
if dmPenta.extremes != _|_ {
|
||||||
|
extremes: {
|
||||||
|
if dmPenta.extremes.i != _|_ {i: dmPenta.extremes.i}
|
||||||
|
if dmPenta.extremes.u != _|_ {u: dmPenta.extremes.u}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
]
|
||||||
|
|
||||||
|
words: [
|
||||||
|
for o, word in _dictionary.words {
|
||||||
|
spelling: o
|
||||||
|
definitions: [
|
||||||
|
for p, defn in word
|
||||||
|
if (defn & _#AnyDefinition) != _|_
|
||||||
|
if p != "from" {
|
||||||
|
_defn: defn
|
||||||
|
pos: p
|
||||||
|
{_#Definition}
|
||||||
|
},
|
||||||
|
for p, defns in word
|
||||||
|
if p != "from"
|
||||||
|
if (defns & [..._#AnyDefinition]) != _|_
|
||||||
|
for def in defns {
|
||||||
|
_defn: def
|
||||||
|
pos: p
|
||||||
|
{_#Definition}
|
||||||
|
},
|
||||||
|
]
|
||||||
|
if word.from != _|_ {
|
||||||
|
derivations: [
|
||||||
|
for r, v in word.from {
|
||||||
|
root: r
|
||||||
|
via: [v.via & string] | v.via
|
||||||
|
},
|
||||||
|
]
|
||||||
|
}
|
||||||
|
},
|
||||||
|
]
|
||||||
|
|
||||||
|
pentas: [
|
||||||
|
for o, penta in _dictionary.pentas {
|
||||||
|
spelling: o
|
||||||
|
if penta.name != _|_ {name: penta.name}
|
||||||
|
if penta.extremes != _|_ {
|
||||||
|
extremes: {
|
||||||
|
if penta.extremes.i != _|_ {i: penta.extremes.i}
|
||||||
|
if penta.extremes.u != _|_ {u: penta.extremes.u}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
]
|
||||||
|
|
||||||
|
icosas: [
|
||||||
|
for o, icosa in _dictionary.icosas {
|
||||||
|
spelling: o
|
||||||
|
if icosa.name != _|_ {name: icosa.name}
|
||||||
|
if icosa.extremes != _|_ {
|
||||||
|
extremes: {
|
||||||
|
if icosa.extremes.ix != _|_ {ix: icosa.extremes.ix}
|
||||||
|
if icosa.extremes.ux != _|_ {ux: icosa.extremes.ux}
|
||||||
|
if icosa.extremes.xi != _|_ {xi: icosa.extremes.xi}
|
||||||
|
if icosa.extremes.xu != _|_ {xu: icosa.extremes.xu}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
]
|
||||||
|
}
|
6
shenikan.go
Normal file
6
shenikan.go
Normal file
|
@ -0,0 +1,6 @@
|
||||||
|
package shenikandata
|
||||||
|
|
||||||
|
import "embed"
|
||||||
|
|
||||||
|
//go:embed *.cue
|
||||||
|
var Cues embed.FS
|
199
tools/esh.py
Executable file
199
tools/esh.py
Executable file
|
@ -0,0 +1,199 @@
|
||||||
|
#!/usr/bin/env python3
|
||||||
|
|
||||||
|
"""
|
||||||
|
This losely does similar behaviour to asking ChatGPT the following:
|
||||||
|
|
||||||
|
Can you help me find words in your embedding space? I want to give you a basic
|
||||||
|
arithmetic expression involving words to find relationships between words in
|
||||||
|
your embedding model. For example king minus man plus woman should probably be
|
||||||
|
something like queen. Please give me 10 options each time. Are you ready?
|
||||||
|
"""
|
||||||
|
|
||||||
|
import cmd
|
||||||
|
import re
|
||||||
|
import os
|
||||||
|
|
||||||
|
from gensim import downloader
|
||||||
|
from thefuzz import process
|
||||||
|
|
||||||
|
EMBEDDING_TOKENS = [
|
||||||
|
('NUMBER', r'\d+(\.\d*)?'), # an integer or decimal number
|
||||||
|
('WORD', r'\w+'), # a word
|
||||||
|
('PAREN', r'[()]'), # a parenthesis
|
||||||
|
('OP', r'[+\-*/~]'), # an arithmetic operator
|
||||||
|
('COMMA', r','), # a comma
|
||||||
|
('WS', r'\s+'), # whitespace
|
||||||
|
('ERROR', r'.'), # anything else
|
||||||
|
]
|
||||||
|
EMBEDDING_TOKENIZATION_RE = re.compile('|'.join(
|
||||||
|
f'(?P<{x[0]}>{x[1]})' for x in EMBEDDING_TOKENS
|
||||||
|
))
|
||||||
|
|
||||||
|
|
||||||
|
def tokenize_embedding_expr(expr):
|
||||||
|
""" Generates (token_kind, token) for each token in expr. """
|
||||||
|
for mo in EMBEDDING_TOKENIZATION_RE.finditer(expr):
|
||||||
|
yield (mo.lastgroup, mo.group())
|
||||||
|
|
||||||
|
|
||||||
|
def token_precedence(token):
|
||||||
|
"""
|
||||||
|
Returns the precedence of the token.
|
||||||
|
Negative precedences are right-associative
|
||||||
|
"""
|
||||||
|
if token in {'+', '-', '~'}:
|
||||||
|
return 1
|
||||||
|
|
||||||
|
if token in {'*', '/'}:
|
||||||
|
return 2
|
||||||
|
|
||||||
|
return 0
|
||||||
|
|
||||||
|
|
||||||
|
def _goes_first(a, b):
|
||||||
|
ap = token_precedence(a)
|
||||||
|
bp = token_precedence(b)
|
||||||
|
aap = abs(ap)
|
||||||
|
abp = abs(bp)
|
||||||
|
|
||||||
|
if aap > abp:
|
||||||
|
return True
|
||||||
|
|
||||||
|
if aap == abp and bp > 0:
|
||||||
|
return True
|
||||||
|
|
||||||
|
return False
|
||||||
|
|
||||||
|
|
||||||
|
def shunt_embedding_tokens(tokens):
|
||||||
|
"""
|
||||||
|
Tokens are (kind, value) where kind is:
|
||||||
|
|
||||||
|
w - word to be looked up in model and converted to embedding vector
|
||||||
|
s - scalar value
|
||||||
|
o - operator
|
||||||
|
"""
|
||||||
|
stack = [] # operator stack, just the op itself!
|
||||||
|
|
||||||
|
for (kind, tok) in tokens:
|
||||||
|
if kind == 'WORD':
|
||||||
|
yield ('w', tok)
|
||||||
|
|
||||||
|
elif kind == 'NUMBER':
|
||||||
|
yield ('s', tok)
|
||||||
|
|
||||||
|
elif kind == 'OP':
|
||||||
|
while stack and stack[-1] != '(' and _goes_first(stack[-1], tok):
|
||||||
|
yield ('o', stack.pop())
|
||||||
|
stack.append(tok)
|
||||||
|
|
||||||
|
elif kind == 'PAREN':
|
||||||
|
if tok == '(':
|
||||||
|
stack.append(tok)
|
||||||
|
else:
|
||||||
|
while stack and stack[-1] != '(':
|
||||||
|
yield ('o', stack.pop())
|
||||||
|
|
||||||
|
if stack:
|
||||||
|
stack.pop() # remove the '('
|
||||||
|
|
||||||
|
while stack:
|
||||||
|
yield ('o', stack.pop())
|
||||||
|
|
||||||
|
|
||||||
|
def evaluate_embedding_shunt(shunt, model):
|
||||||
|
""" Evaluates shunt using model. """
|
||||||
|
stack = []
|
||||||
|
|
||||||
|
for (kind, x) in shunt:
|
||||||
|
if kind == 'w':
|
||||||
|
if x[0] == '_':
|
||||||
|
if x[1:] in model:
|
||||||
|
stack.append(-model[x[1:]])
|
||||||
|
else:
|
||||||
|
most_similar = process.extractOne(x[1:], model.key_to_index.keys())[0]
|
||||||
|
stack.append(-model[most_similar])
|
||||||
|
|
||||||
|
if x in model:
|
||||||
|
stack.append(model[x])
|
||||||
|
else:
|
||||||
|
most_similar = process.extractOne(x, model.key_to_index.keys())[0]
|
||||||
|
stack.append(model[most_similar])
|
||||||
|
|
||||||
|
elif kind == 's':
|
||||||
|
stack.append(float(x))
|
||||||
|
|
||||||
|
elif kind == 'o':
|
||||||
|
if x == '+':
|
||||||
|
a = stack.pop()
|
||||||
|
b = stack.pop()
|
||||||
|
stack.append(a + b)
|
||||||
|
|
||||||
|
elif x == '-':
|
||||||
|
a = stack.pop()
|
||||||
|
b = stack.pop()
|
||||||
|
stack.append(b - a)
|
||||||
|
|
||||||
|
elif x == '*':
|
||||||
|
a = stack.pop()
|
||||||
|
b = stack.pop()
|
||||||
|
stack.append(a * b)
|
||||||
|
|
||||||
|
elif x == '/':
|
||||||
|
a = stack.pop()
|
||||||
|
b = stack.pop()
|
||||||
|
stack.append(b / a)
|
||||||
|
|
||||||
|
elif x == '~':
|
||||||
|
a = stack.pop()
|
||||||
|
b = stack.pop()
|
||||||
|
stack.append((a + b) / 2)
|
||||||
|
|
||||||
|
return stack[-1]
|
||||||
|
|
||||||
|
|
||||||
|
class EmbeddingShell(cmd.Cmd):
|
||||||
|
""" Actual embedding shell wrapper. """
|
||||||
|
intro = 'Welcome to the embedding shell. Enter words in an equation to see similar embeddings. Type :help for more information'
|
||||||
|
prompt = '(Ʃ) '
|
||||||
|
|
||||||
|
def __init__(self, *args, model='glove-wiki-gigaword-300', **kwargs):
|
||||||
|
super().__init__(completekey='tab', stdin=None, stdout=None, *args, **kwargs)
|
||||||
|
print('Loading model...', end='', flush=True)
|
||||||
|
self._model = downloader.load(model)
|
||||||
|
self._keys = self._model.key_to_index.keys()
|
||||||
|
print(' DONE')
|
||||||
|
|
||||||
|
def do_exec(self, arg):
|
||||||
|
""" Test """
|
||||||
|
try:
|
||||||
|
result = evaluate_embedding_shunt(shunt_embedding_tokens(tokenize_embedding_expr(arg)), self._model)
|
||||||
|
|
||||||
|
for (word, sim) in self._model.most_similar(result, restrict_vocab=10000):
|
||||||
|
(w, _) = os.get_terminal_size()
|
||||||
|
bar = '-' * int((w - 20) * sim)
|
||||||
|
print(f'{word:10} {bar}')
|
||||||
|
except Exception as e:
|
||||||
|
print("Could not evaluate expression:", e)
|
||||||
|
|
||||||
|
|
||||||
|
def do_shunt(self, arg):
|
||||||
|
for x in shunt_embedding_tokens(tokenize_embedding_expr(arg)):
|
||||||
|
print(x)
|
||||||
|
|
||||||
|
def do_quit(self, arg):
|
||||||
|
""" Exit the embedding shell. """
|
||||||
|
return True
|
||||||
|
|
||||||
|
def precmd(self, line):
|
||||||
|
if not line:
|
||||||
|
return line
|
||||||
|
if line[0] == ':':
|
||||||
|
return line[1:]
|
||||||
|
return 'exec ' + line
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
EmbeddingShell().cmdloop()
|
13
tools/listencoding.sh
Executable file
13
tools/listencoding.sh
Executable file
|
@ -0,0 +1,13 @@
|
||||||
|
#!/bin/bash
|
||||||
|
|
||||||
|
shopt -s extglob
|
||||||
|
|
||||||
|
: "${OFFSET:=0}"
|
||||||
|
|
||||||
|
codepoint="$((0xF3A00 + OFFSET * 256))"
|
||||||
|
while read -r glyph_json; do
|
||||||
|
ascii_ortho="$(jq -r '.ortho | gsub("θ"; "th") | gsub("∫"; "sh") | @uri | gsub("%"; "q")' <<<"$glyph_json")"
|
||||||
|
|
||||||
|
printf '%s\t=\t%x\n' "$ascii_ortho" "$codepoint"
|
||||||
|
codepoint="$((codepoint + 1))"
|
||||||
|
done < <(cue export -p shenikan | jq -c '.dictionary.glyphs[]')
|
107
tools/mkemptyfont.sh
Executable file
107
tools/mkemptyfont.sh
Executable file
|
@ -0,0 +1,107 @@
|
||||||
|
#!/bin/bash
|
||||||
|
|
||||||
|
shopt -s extglob
|
||||||
|
|
||||||
|
fontdir="$1.sfdir"
|
||||||
|
|
||||||
|
DEFAULT_GLYPH_WIDTH=555
|
||||||
|
|
||||||
|
read -r -p "Font name:" fontname
|
||||||
|
read -r -p "Full name:" fullname
|
||||||
|
read -r -p "Unicode page offset (/256, 0 is 0xF3Axx):" offset
|
||||||
|
|
||||||
|
mkdir "$fontdir"
|
||||||
|
|
||||||
|
cat <<EOF > "$fontdir/font.props"
|
||||||
|
SplineFontDB: 3.2
|
||||||
|
FontName: $fontname
|
||||||
|
FullName: $fullname
|
||||||
|
Weight: Book
|
||||||
|
Copyright: Copyright (C) $(date +%Y), $(id -un)
|
||||||
|
UComments: "$(date +%Y-%M-%d): Created with mkemptyfont.sh"
|
||||||
|
Version: 001.000
|
||||||
|
ItalicAngle: 0
|
||||||
|
UnderlinePosition: -150
|
||||||
|
UnderlineWidth: 50
|
||||||
|
Ascent: 800
|
||||||
|
Descent: 200
|
||||||
|
sfntRevision: 0x00010000
|
||||||
|
LayerCount: 2
|
||||||
|
Layer: 0 0 "Back" 1
|
||||||
|
Layer: 1 0 "Fore" 0
|
||||||
|
DisplaySize: -48
|
||||||
|
AntiAlias: 1
|
||||||
|
FitToEm: 0
|
||||||
|
Encoding: Custom
|
||||||
|
CreationTime: $(date +%s)
|
||||||
|
ModificationTime: $(date +%s)
|
||||||
|
DEI: 91125
|
||||||
|
Lookup: 4 0 1 "'liga' Standard Ligatures in Latin lookup 0" { "'liga' Standard Ligatures in Latin lookup 0-1" } ['liga' ('DFLT' <'dflt' > 'latn' <'dflt' > ) ]
|
||||||
|
EOF
|
||||||
|
|
||||||
|
ord() {
|
||||||
|
LC_CTYPE=C printf '%d' "'$1"
|
||||||
|
}
|
||||||
|
|
||||||
|
codepoint="$((0xF3A00 + offset * 256))"
|
||||||
|
encodingidx=0
|
||||||
|
|
||||||
|
# generate the 'raw' replacement glyphs
|
||||||
|
while read -r glyph_json; do
|
||||||
|
while read -r liga_code_point; do
|
||||||
|
glyph_file="$fontdir/raw$liga_code_point.glyph"
|
||||||
|
if [ ! -e "$glyph_file" ]; then
|
||||||
|
echo -n "Generating raw$liga_code_point..."
|
||||||
|
|
||||||
|
cat <<EOF > "$glyph_file"
|
||||||
|
StartChar: raw$liga_code_point
|
||||||
|
Encoding: $encodingidx $liga_code_point $encodingidx
|
||||||
|
Width: $DEFAULT_GLYPH_WIDTH
|
||||||
|
LayerCount: 2
|
||||||
|
Comment: "Raw glyph for ligature replacement"
|
||||||
|
Colour: ff0000
|
||||||
|
EndChar
|
||||||
|
EOF
|
||||||
|
encodingidx=$((encodingidx + 1))
|
||||||
|
|
||||||
|
echo ' DONE'
|
||||||
|
fi
|
||||||
|
done < <(jq -r '.ortho | gsub("θ"; "th") | gsub("∫"; "sh") | explode[]' <<<"$glyph_json")
|
||||||
|
done < <(cue export -p shenikan | jq -c '.dictionary.glyphs[]')
|
||||||
|
|
||||||
|
# generate the 'real' ligature glyphs
|
||||||
|
while read -r glyph_json; do
|
||||||
|
liga=( )
|
||||||
|
while read -r liga_code_point; do
|
||||||
|
liga+=( "raw$liga_code_point" )
|
||||||
|
done < <(jq -r '.ortho | gsub("θ"; "th") | gsub("∫"; "sh") | explode[]' <<<"$glyph_json")
|
||||||
|
|
||||||
|
ortho="$(jq -r '.ortho' <<<"$glyph_json")"
|
||||||
|
ascii_ortho="$(jq -r '.ortho | gsub("θ"; "th") | gsub("∫"; "sh") | @uri | gsub("%"; "q")' <<<"$glyph_json")"
|
||||||
|
echo -n "Generating $ortho..."
|
||||||
|
|
||||||
|
glyph_file="$fontdir/sh$ascii_ortho.glyph"
|
||||||
|
cat <<EOF > "$glyph_file"
|
||||||
|
StartChar: sh$ascii_ortho
|
||||||
|
Encoding: $encodingidx $codepoint $encodingidx
|
||||||
|
Width: $DEFAULT_GLYPH_WIDTH
|
||||||
|
LayerCount: 2
|
||||||
|
Comment: "Shenikan $ascii_ortho glyph"
|
||||||
|
Ligature2: "'liga' Standard Ligatures in Latin lookup 0-1" ${liga[*]}
|
||||||
|
EOF
|
||||||
|
|
||||||
|
if [ "$(jq '.ortho | length' <<<"$glyph_json")" -gt 1 ]; then
|
||||||
|
liga=( )
|
||||||
|
while read -r liga_code_point; do
|
||||||
|
liga+=( "raw$liga_code_point" )
|
||||||
|
done < <(jq -r '.ortho | explode | reverse | implode | gsub("θ"; "th") | gsub("∫"; "sh") | explode[]' <<<"$glyph_json")
|
||||||
|
|
||||||
|
echo "Ligature2: \"'liga' Standard Ligatures in Latin lookup 0-1\" ${liga[*]}" >> "$glyph_file"
|
||||||
|
fi
|
||||||
|
|
||||||
|
echo 'EndChar' >> "$glyph_file"
|
||||||
|
encodingidx=$((encodingidx + 1))
|
||||||
|
codepoint=$((codepoint + 1))
|
||||||
|
|
||||||
|
echo ' DONE'
|
||||||
|
done < <(cue export -p shenikan | jq -c '.dictionary.glyphs[]')
|
38
tools/server/go.mod
Normal file
38
tools/server/go.mod
Normal file
|
@ -0,0 +1,38 @@
|
||||||
|
module labprogramming.net/shenikan_server
|
||||||
|
|
||||||
|
go 1.23.1
|
||||||
|
|
||||||
|
replace labprogramming.net/shenikan => ../shenikan
|
||||||
|
|
||||||
|
replace labprogramming.net/shenikandata => ../..
|
||||||
|
|
||||||
|
require (
|
||||||
|
github.com/99designs/gqlgen v0.17.64
|
||||||
|
github.com/vektah/gqlparser/v2 v2.5.22
|
||||||
|
labprogramming.net/shenikan v0.0.0-00010101000000-000000000000
|
||||||
|
)
|
||||||
|
|
||||||
|
require (
|
||||||
|
cuelabs.dev/go/oci/ociregistry v0.0.0-20240807094312-a32ad29eed79 // indirect
|
||||||
|
cuelang.org/go v0.10.0 // indirect
|
||||||
|
github.com/agnivade/levenshtein v1.2.0 // indirect
|
||||||
|
github.com/cockroachdb/apd/v3 v3.2.1 // indirect
|
||||||
|
github.com/emicklei/proto v1.13.2 // indirect
|
||||||
|
github.com/go-viper/mapstructure/v2 v2.2.1 // indirect
|
||||||
|
github.com/google/uuid v1.6.0 // indirect
|
||||||
|
github.com/gorilla/websocket v1.5.0 // indirect
|
||||||
|
github.com/hashicorp/golang-lru/v2 v2.0.7 // indirect
|
||||||
|
github.com/mitchellh/go-wordwrap v1.0.1 // indirect
|
||||||
|
github.com/opencontainers/go-digest v1.0.0 // indirect
|
||||||
|
github.com/opencontainers/image-spec v1.1.0 // indirect
|
||||||
|
github.com/pelletier/go-toml/v2 v2.2.2 // indirect
|
||||||
|
github.com/protocolbuffers/txtpbfmt v0.0.0-20230328191034-3462fbc510c0 // indirect
|
||||||
|
github.com/rogpeppe/go-internal v1.12.1-0.20240709150035-ccf4b4329d21 // indirect
|
||||||
|
github.com/sosodev/duration v1.3.1 // indirect
|
||||||
|
golang.org/x/mod v0.20.0 // indirect
|
||||||
|
golang.org/x/net v0.33.0 // indirect
|
||||||
|
golang.org/x/oauth2 v0.22.0 // indirect
|
||||||
|
golang.org/x/text v0.21.0 // indirect
|
||||||
|
gopkg.in/yaml.v3 v3.0.1 // indirect
|
||||||
|
labprogramming.net/shenikandata v0.0.0-00010101000000-000000000000 // indirect
|
||||||
|
)
|
95
tools/server/go.sum
Normal file
95
tools/server/go.sum
Normal file
|
@ -0,0 +1,95 @@
|
||||||
|
cuelabs.dev/go/oci/ociregistry v0.0.0-20240807094312-a32ad29eed79 h1:EceZITBGET3qHneD5xowSTY/YHbNybvMWGh62K2fG/M=
|
||||||
|
cuelabs.dev/go/oci/ociregistry v0.0.0-20240807094312-a32ad29eed79/go.mod h1:5A4xfTzHTXfeVJBU6RAUf+QrlfTCW+017q/QiW+sMLg=
|
||||||
|
cuelang.org/go v0.10.0 h1:Y1Pu4wwga5HkXfLFK1sWAYaSWIBdcsr5Cb5AWj2pOuE=
|
||||||
|
cuelang.org/go v0.10.0/go.mod h1:HzlaqqqInHNiqE6slTP6+UtxT9hN6DAzgJgdbNxXvX8=
|
||||||
|
github.com/99designs/gqlgen v0.17.64 h1:BzpqO5ofQXyy2XOa93Q6fP1BHLRjTOeU35ovTEsbYlw=
|
||||||
|
github.com/99designs/gqlgen v0.17.64/go.mod h1:kaxLetFxPGeBBwiuKk75NxuI1fe9HRvob17In74v/Zc=
|
||||||
|
github.com/PuerkitoBio/goquery v1.9.3 h1:mpJr/ikUA9/GNJB/DBZcGeFDXUtosHRyRrwh7KGdTG0=
|
||||||
|
github.com/PuerkitoBio/goquery v1.9.3/go.mod h1:1ndLHPdTz+DyQPICCWYlYQMPl0oXZj0G6D4LCYA6u4U=
|
||||||
|
github.com/agnivade/levenshtein v1.2.0 h1:U9L4IOT0Y3i0TIlUIDJ7rVUziKi/zPbrJGaFrtYH3SY=
|
||||||
|
github.com/agnivade/levenshtein v1.2.0/go.mod h1:QVVI16kDrtSuwcpd0p1+xMC6Z/VfhtCyDIjcwga4/DU=
|
||||||
|
github.com/andreyvit/diff v0.0.0-20170406064948-c7f18ee00883 h1:bvNMNQO63//z+xNgfBlViaCIJKLlCJ6/fmUseuG0wVQ=
|
||||||
|
github.com/andreyvit/diff v0.0.0-20170406064948-c7f18ee00883/go.mod h1:rCTlJbsFo29Kk6CurOXKm700vrz8f0KW0JNfpkRJY/8=
|
||||||
|
github.com/andybalholm/cascadia v1.3.2 h1:3Xi6Dw5lHF15JtdcmAHD3i1+T8plmv7BQ/nsViSLyss=
|
||||||
|
github.com/andybalholm/cascadia v1.3.2/go.mod h1:7gtRlve5FxPPgIgX36uWBX58OdBsSS6lUvCFb+h7KvU=
|
||||||
|
github.com/arbovm/levenshtein v0.0.0-20160628152529-48b4e1c0c4d0 h1:jfIu9sQUG6Ig+0+Ap1h4unLjW6YQJpKZVmUzxsD4E/Q=
|
||||||
|
github.com/arbovm/levenshtein v0.0.0-20160628152529-48b4e1c0c4d0/go.mod h1:t2tdKJDJF9BV14lnkjHmOQgcvEKgtqs5a1N3LNdJhGE=
|
||||||
|
github.com/cockroachdb/apd/v3 v3.2.1 h1:U+8j7t0axsIgvQUqthuNm82HIrYXodOV2iWLWtEaIwg=
|
||||||
|
github.com/cockroachdb/apd/v3 v3.2.1/go.mod h1:klXJcjp+FffLTHlhIG69tezTDvdP065naDsHzKhYSqc=
|
||||||
|
github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
|
||||||
|
github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c=
|
||||||
|
github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
|
||||||
|
github.com/dgryski/trifles v0.0.0-20230903005119-f50d829f2e54 h1:SG7nF6SRlWhcT7cNTs5R6Hk4V2lcmLz2NsG2VnInyNo=
|
||||||
|
github.com/dgryski/trifles v0.0.0-20230903005119-f50d829f2e54/go.mod h1:if7Fbed8SFyPtHLHbg49SI7NAdJiC5WIA09pe59rfAA=
|
||||||
|
github.com/emicklei/proto v1.13.2 h1:z/etSFO3uyXeuEsVPzfl56WNgzcvIr42aQazXaQmFZY=
|
||||||
|
github.com/emicklei/proto v1.13.2/go.mod h1:rn1FgRS/FANiZdD2djyH7TMA9jdRDcYQ9IEN9yvjX0A=
|
||||||
|
github.com/go-quicktest/qt v1.101.0 h1:O1K29Txy5P2OK0dGo59b7b0LR6wKfIhttaAhHUyn7eI=
|
||||||
|
github.com/go-quicktest/qt v1.101.0/go.mod h1:14Bz/f7NwaXPtdYEgzsx46kqSxVwTbzVZsDC26tQJow=
|
||||||
|
github.com/go-viper/mapstructure/v2 v2.2.1 h1:ZAaOCxANMuZx5RCeg0mBdEZk7DZasvvZIxtHqx8aGss=
|
||||||
|
github.com/go-viper/mapstructure/v2 v2.2.1/go.mod h1:oJDH3BJKyqBA2TXFhDsKDGDTlndYOZ6rGS0BRZIxGhM=
|
||||||
|
github.com/google/go-cmp v0.6.0 h1:ofyhxvXcZhMsU5ulbFiLKl/XBFqE1GSq7atu8tAmTRI=
|
||||||
|
github.com/google/go-cmp v0.6.0/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY=
|
||||||
|
github.com/google/uuid v1.6.0 h1:NIvaJDMOsjHA8n1jAhLSgzrAzy1Hgr+hNrb57e+94F0=
|
||||||
|
github.com/google/uuid v1.6.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo=
|
||||||
|
github.com/gorilla/websocket v1.5.0 h1:PPwGk2jz7EePpoHN/+ClbZu8SPxiqlu12wZP/3sWmnc=
|
||||||
|
github.com/gorilla/websocket v1.5.0/go.mod h1:YR8l580nyteQvAITg2hZ9XVh4b55+EU/adAjf1fMHhE=
|
||||||
|
github.com/hashicorp/golang-lru/v2 v2.0.7 h1:a+bsQ5rvGLjzHuww6tVxozPZFVghXaHOwFs4luLUK2k=
|
||||||
|
github.com/hashicorp/golang-lru/v2 v2.0.7/go.mod h1:QeFd9opnmA6QUJc5vARoKUSoFhyfM2/ZepoAG6RGpeM=
|
||||||
|
github.com/kr/pretty v0.3.1 h1:flRD4NNwYAUpkphVc1HcthR4KEIFJ65n8Mw5qdRn3LE=
|
||||||
|
github.com/kr/pretty v0.3.1/go.mod h1:hoEshYVHaxMs3cyo3Yncou5ZscifuDolrwPKZanG3xk=
|
||||||
|
github.com/kr/text v0.2.0 h1:5Nx0Ya0ZqY2ygV366QzturHI13Jq95ApcVaJBhpS+AY=
|
||||||
|
github.com/kr/text v0.2.0/go.mod h1:eLer722TekiGuMkidMxC/pM04lWEeraHUUmBw8l2grE=
|
||||||
|
github.com/kylelemons/godebug v1.1.0 h1:RPNrshWIDI6G2gRW9EHilWtl7Z6Sb1BR0xunSBf0SNc=
|
||||||
|
github.com/kylelemons/godebug v1.1.0/go.mod h1:9/0rRGxNHcop5bhtWyNeEfOS8JIWk580+fNqagV/RAw=
|
||||||
|
github.com/lib/pq v1.10.7 h1:p7ZhMD+KsSRozJr34udlUrhboJwWAgCg34+/ZZNvZZw=
|
||||||
|
github.com/lib/pq v1.10.7/go.mod h1:AlVN5x4E4T544tWzH6hKfbfQvm3HdbOxrmggDNAPY9o=
|
||||||
|
github.com/mitchellh/go-wordwrap v1.0.1 h1:TLuKupo69TCn6TQSyGxwI1EblZZEsQ0vMlAFQflz0v0=
|
||||||
|
github.com/mitchellh/go-wordwrap v1.0.1/go.mod h1:R62XHJLzvMFRBbcrT7m7WgmE1eOyTSsCt+hzestvNj0=
|
||||||
|
github.com/opencontainers/go-digest v1.0.0 h1:apOUWs51W5PlhuyGyz9FCeeBIOUDA/6nW8Oi/yOhh5U=
|
||||||
|
github.com/opencontainers/go-digest v1.0.0/go.mod h1:0JzlMkj0TRzQZfJkVvzbP0HBR3IKzErnv2BNG4W4MAM=
|
||||||
|
github.com/opencontainers/image-spec v1.1.0 h1:8SG7/vwALn54lVB/0yZ/MMwhFrPYtpEHQb2IpWsCzug=
|
||||||
|
github.com/opencontainers/image-spec v1.1.0/go.mod h1:W4s4sFTMaBeK1BQLXbG4AdM2szdn85PY75RI83NrTrM=
|
||||||
|
github.com/pelletier/go-toml/v2 v2.2.2 h1:aYUidT7k73Pcl9nb2gScu7NSrKCSHIDE89b3+6Wq+LM=
|
||||||
|
github.com/pelletier/go-toml/v2 v2.2.2/go.mod h1:1t835xjRzz80PqgE6HHgN2JOsmgYu/h4qDAS4n929Rs=
|
||||||
|
github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM=
|
||||||
|
github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
|
||||||
|
github.com/protocolbuffers/txtpbfmt v0.0.0-20230328191034-3462fbc510c0 h1:sadMIsgmHpEOGbUs6VtHBXRR1OHevnj7hLx9ZcdNGW4=
|
||||||
|
github.com/protocolbuffers/txtpbfmt v0.0.0-20230328191034-3462fbc510c0/go.mod h1:jgxiZysxFPM+iWKwQwPR+y+Jvo54ARd4EisXxKYpB5c=
|
||||||
|
github.com/rogpeppe/go-internal v1.12.1-0.20240709150035-ccf4b4329d21 h1:igWZJluD8KtEtAgRyF4x6lqcxDry1ULztksMJh2mnQE=
|
||||||
|
github.com/rogpeppe/go-internal v1.12.1-0.20240709150035-ccf4b4329d21/go.mod h1:RMRJLmBOqWacUkmJHRMiPKh1S1m3PA7Zh4W80/kWPpg=
|
||||||
|
github.com/sergi/go-diff v1.3.1 h1:xkr+Oxo4BOQKmkn/B9eMK0g5Kg/983T9DqqPHwYqD+8=
|
||||||
|
github.com/sergi/go-diff v1.3.1/go.mod h1:aMJSSKb2lpPvRNec0+w3fl7LP9IOFzdc9Pa4NFbPK1I=
|
||||||
|
github.com/sosodev/duration v1.3.1 h1:qtHBDMQ6lvMQsL15g4aopM4HEfOaYuhWBw3NPTtlqq4=
|
||||||
|
github.com/sosodev/duration v1.3.1/go.mod h1:RQIBBX0+fMLc/D9+Jb/fwvVmo0eZvDDEERAikUR6SDg=
|
||||||
|
github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME=
|
||||||
|
github.com/stretchr/objx v0.4.0/go.mod h1:YvHI0jy2hoMjB+UWwv71VJQ9isScKT/TqJzVSSt89Yw=
|
||||||
|
github.com/stretchr/objx v0.5.0/go.mod h1:Yh+to48EsGEfYuaHDzXPcE3xhTkx73EhmCGUpEOglKo=
|
||||||
|
github.com/stretchr/objx v0.5.2/go.mod h1:FRsXN1f5AsAjCGJKqEizvkpNtU+EGNCLh3NxZ/8L+MA=
|
||||||
|
github.com/stretchr/testify v1.7.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg=
|
||||||
|
github.com/stretchr/testify v1.8.0/go.mod h1:yNjHg4UonilssWZ8iaSj1OCr/vHnekPRkoO+kdMU+MU=
|
||||||
|
github.com/stretchr/testify v1.8.4/go.mod h1:sz/lmYIOXD/1dqDmKjjqLyZ2RngseejIcXlSw2iwfAo=
|
||||||
|
github.com/stretchr/testify v1.9.0/go.mod h1:r2ic/lqez/lEtzL7wO/rwa5dbSLXVDPFyf8C91i36aY=
|
||||||
|
github.com/stretchr/testify v1.10.0 h1:Xv5erBjTwe/5IxqUQTdXv5kgmIvbHo3QQyRwhJsOfJA=
|
||||||
|
github.com/stretchr/testify v1.10.0/go.mod h1:r2ic/lqez/lEtzL7wO/rwa5dbSLXVDPFyf8C91i36aY=
|
||||||
|
github.com/vektah/gqlparser/v2 v2.5.22 h1:yaaeJ0fu+nv1vUMW0Hl+aS1eiv1vMfapBNjpffAda1I=
|
||||||
|
github.com/vektah/gqlparser/v2 v2.5.22/go.mod h1:xMl+ta8a5M1Yo1A1Iwt/k7gSpscwSnHZdw7tfhEGfTM=
|
||||||
|
golang.org/x/mod v0.20.0 h1:utOm6MM3R3dnawAiJgn0y+xvuYRsm1RKM/4giyfDgV0=
|
||||||
|
golang.org/x/mod v0.20.0/go.mod h1:hTbmBsO62+eylJbnUtE2MGJUyE7QWk4xUqPFrRgJ+7c=
|
||||||
|
golang.org/x/net v0.33.0 h1:74SYHlV8BIgHIFC/LrYkOGIwL19eTYXQ5wc6TBuO36I=
|
||||||
|
golang.org/x/net v0.33.0/go.mod h1:HXLR5J+9DxmrqMwG9qjGCxZ+zKXxBru04zlTvWlWuN4=
|
||||||
|
golang.org/x/oauth2 v0.22.0 h1:BzDx2FehcG7jJwgWLELCdmLuxk2i+x9UDpSiss2u0ZA=
|
||||||
|
golang.org/x/oauth2 v0.22.0/go.mod h1:XYTD2NtWslqkgxebSiOHnXEap4TF09sJSc7H1sXbhtI=
|
||||||
|
golang.org/x/sync v0.10.0 h1:3NQrjDixjgGwUOCaF8w2+VYHv0Ve/vGYSbdkTa98gmQ=
|
||||||
|
golang.org/x/sync v0.10.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk=
|
||||||
|
golang.org/x/sys v0.29.0 h1:TPYlXGxvx1MGTn2GiZDhnjPA9wZzZeGKHHmKhHYvgaU=
|
||||||
|
golang.org/x/sys v0.29.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
|
||||||
|
golang.org/x/text v0.21.0 h1:zyQAAkrwaneQ066sspRyJaG9VNi/YJ1NfzcGB3hZ/qo=
|
||||||
|
golang.org/x/text v0.21.0/go.mod h1:4IBbMaMmOPCJ8SecivzSH54+73PCFmPWxNTLm+vZkEQ=
|
||||||
|
golang.org/x/tools v0.24.0 h1:J1shsA93PJUEVaUSaay7UXAyE8aimq3GW0pjlolpa24=
|
||||||
|
golang.org/x/tools v0.24.0/go.mod h1:YhNqVBIfWHdzvTLs0d8LCuMhkKUgSUKldakyV7W/WDQ=
|
||||||
|
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
|
||||||
|
gopkg.in/check.v1 v1.0.0-20180628173108-788fd7840127 h1:qIbj1fsPNlZgppZ+VLlY7N33q108Sa+fhmuc+sWQYwY=
|
||||||
|
gopkg.in/check.v1 v1.0.0-20180628173108-788fd7840127/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
|
||||||
|
gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
|
||||||
|
gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA=
|
||||||
|
gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
|
155
tools/server/gqlgen.yml
Normal file
155
tools/server/gqlgen.yml
Normal file
|
@ -0,0 +1,155 @@
|
||||||
|
# Where are all the schema files located? globs are supported eg src/**/*.graphqls
|
||||||
|
schema:
|
||||||
|
- graph/*.graphqls
|
||||||
|
|
||||||
|
# Where should the generated server code go?
|
||||||
|
exec:
|
||||||
|
package: graph
|
||||||
|
layout: single-file # Only other option is "follow-schema," ie multi-file.
|
||||||
|
|
||||||
|
# Only for single-file layout:
|
||||||
|
filename: graph/generated.go
|
||||||
|
|
||||||
|
# Only for follow-schema layout:
|
||||||
|
# dir: graph
|
||||||
|
# filename_template: "{name}.generated.go"
|
||||||
|
|
||||||
|
# Optional: Maximum number of goroutines in concurrency to use per child resolvers(default: unlimited)
|
||||||
|
# worker_limit: 1000
|
||||||
|
|
||||||
|
# Uncomment to enable federation
|
||||||
|
# federation:
|
||||||
|
# filename: graph/federation.go
|
||||||
|
# package: graph
|
||||||
|
# version: 2
|
||||||
|
# options:
|
||||||
|
# computed_requires: true
|
||||||
|
|
||||||
|
# Where should any generated models go?
|
||||||
|
model:
|
||||||
|
filename: graph/model/models_gen.go
|
||||||
|
package: model
|
||||||
|
|
||||||
|
# Optional: Pass in a path to a new gotpl template to use for generating the models
|
||||||
|
# model_template: [your/path/model.gotpl]
|
||||||
|
|
||||||
|
# Where should the resolver implementations go?
|
||||||
|
resolver:
|
||||||
|
package: graph
|
||||||
|
layout: follow-schema # Only other option is "single-file."
|
||||||
|
|
||||||
|
# Only for single-file layout:
|
||||||
|
# filename: graph/resolver.go
|
||||||
|
|
||||||
|
# Only for follow-schema layout:
|
||||||
|
dir: graph
|
||||||
|
filename_template: "{name}.resolvers.go"
|
||||||
|
|
||||||
|
# Optional: turn on to not generate template comments above resolvers
|
||||||
|
# omit_template_comment: false
|
||||||
|
# Optional: Pass in a path to a new gotpl template to use for generating resolvers
|
||||||
|
# resolver_template: [your/path/resolver.gotpl]
|
||||||
|
# Optional: turn on to avoid rewriting existing resolver(s) when generating
|
||||||
|
# preserve_resolver: false
|
||||||
|
|
||||||
|
# Optional: turn on use ` + "`" + `gqlgen:"fieldName"` + "`" + ` tags in your models
|
||||||
|
# struct_tag: json
|
||||||
|
|
||||||
|
# Optional: turn on to use []Thing instead of []*Thing
|
||||||
|
omit_slice_element_pointers: true
|
||||||
|
|
||||||
|
# Optional: turn on to omit Is<Name>() methods to interface and unions
|
||||||
|
# omit_interface_checks: true
|
||||||
|
|
||||||
|
# Optional: turn on to skip generation of ComplexityRoot struct content and Complexity function
|
||||||
|
# omit_complexity: false
|
||||||
|
|
||||||
|
# Optional: turn on to not generate any file notice comments in generated files
|
||||||
|
# omit_gqlgen_file_notice: false
|
||||||
|
|
||||||
|
# Optional: turn on to exclude the gqlgen version in the generated file notice. No effect if `omit_gqlgen_file_notice` is true.
|
||||||
|
# omit_gqlgen_version_in_file_notice: false
|
||||||
|
|
||||||
|
# Optional: turn on to exclude root models such as Query and Mutation from the generated models file.
|
||||||
|
# omit_root_models: false
|
||||||
|
|
||||||
|
# Optional: turn on to exclude resolver fields from the generated models file.
|
||||||
|
# omit_resolver_fields: false
|
||||||
|
|
||||||
|
# Optional: turn off to make struct-type struct fields not use pointers
|
||||||
|
# e.g. type Thing struct { FieldA OtherThing } instead of { FieldA *OtherThing }
|
||||||
|
# struct_fields_always_pointers: true
|
||||||
|
|
||||||
|
# Optional: turn off to make resolvers return values instead of pointers for structs
|
||||||
|
# resolvers_always_return_pointers: true
|
||||||
|
|
||||||
|
# Optional: turn on to return pointers instead of values in unmarshalInput
|
||||||
|
# return_pointers_in_unmarshalinput: false
|
||||||
|
|
||||||
|
# Optional: wrap nullable input fields with Omittable
|
||||||
|
# nullable_input_omittable: true
|
||||||
|
|
||||||
|
# Optional: set to speed up generation time by not performing a final validation pass.
|
||||||
|
# skip_validation: true
|
||||||
|
|
||||||
|
# Optional: set to skip running `go mod tidy` when generating server code
|
||||||
|
# skip_mod_tidy: true
|
||||||
|
|
||||||
|
# Optional: if this is set to true, argument directives that
|
||||||
|
# decorate a field with a null value will still be called.
|
||||||
|
#
|
||||||
|
# This enables argumment directives to not just mutate
|
||||||
|
# argument values but to set them even if they're null.
|
||||||
|
call_argument_directives_with_null: true
|
||||||
|
|
||||||
|
# Optional: set build tags that will be used to load packages
|
||||||
|
# go_build_tags:
|
||||||
|
# - private
|
||||||
|
# - enterprise
|
||||||
|
|
||||||
|
# Optional: set to modify the initialisms regarded for Go names
|
||||||
|
# go_initialisms:
|
||||||
|
# replace_defaults: false # if true, the default initialisms will get dropped in favor of the new ones instead of being added
|
||||||
|
# initialisms: # List of initialisms to for Go names
|
||||||
|
# - 'CC'
|
||||||
|
# - 'BCC'
|
||||||
|
|
||||||
|
# gqlgen will search for any type names in the schema in these go packages
|
||||||
|
# if they match it will use them, otherwise it will generate them.
|
||||||
|
autobind:
|
||||||
|
# - "labprogramming.net/shenikan_server/graph/model"
|
||||||
|
|
||||||
|
# This section declares type mapping between the GraphQL and go type systems
|
||||||
|
#
|
||||||
|
# The first line in each type will be used as defaults for resolver arguments and
|
||||||
|
# modelgen, the others will be allowed when binding to fields. Configure them to
|
||||||
|
# your liking
|
||||||
|
models:
|
||||||
|
ID:
|
||||||
|
model:
|
||||||
|
- github.com/99designs/gqlgen/graphql.ID
|
||||||
|
- github.com/99designs/gqlgen/graphql.Int
|
||||||
|
- github.com/99designs/gqlgen/graphql.Int64
|
||||||
|
- github.com/99designs/gqlgen/graphql.Int32
|
||||||
|
# gqlgen provides a default GraphQL UUID convenience wrapper for github.com/google/uuid
|
||||||
|
# but you can override this to provide your own GraphQL UUID implementation
|
||||||
|
UUID:
|
||||||
|
model:
|
||||||
|
- github.com/99designs/gqlgen/graphql.UUID
|
||||||
|
|
||||||
|
# The GraphQL spec explicitly states that the Int type is a signed 32-bit
|
||||||
|
# integer. Using Go int or int64 to represent it can lead to unexpected
|
||||||
|
# behavior, and some GraphQL tools like Apollo Router will fail when
|
||||||
|
# communicating numbers that overflow 32-bits.
|
||||||
|
#
|
||||||
|
# You may choose to use the custom, built-in Int64 scalar to represent 64-bit
|
||||||
|
# integers, or ignore the spec and bind Int to graphql.Int / graphql.Int64
|
||||||
|
# (the default behavior of gqlgen). This is fine in simple use cases when you
|
||||||
|
# do not need to worry about interoperability and only expect small numbers.
|
||||||
|
Int:
|
||||||
|
model:
|
||||||
|
- github.com/99designs/gqlgen/graphql.Int32
|
||||||
|
Int64:
|
||||||
|
model:
|
||||||
|
- github.com/99designs/gqlgen/graphql.Int
|
||||||
|
- github.com/99designs/gqlgen/graphql.Int64
|
6758
tools/server/graph/generated.go
Normal file
6758
tools/server/graph/generated.go
Normal file
File diff suppressed because it is too large
Load diff
299
tools/server/graph/model/models_gen.go
Normal file
299
tools/server/graph/model/models_gen.go
Normal file
|
@ -0,0 +1,299 @@
|
||||||
|
// Code generated by github.com/99designs/gqlgen, DO NOT EDIT.
|
||||||
|
|
||||||
|
package model
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
"io"
|
||||||
|
"strconv"
|
||||||
|
)
|
||||||
|
|
||||||
|
// A pentascale of derivational morphologies
|
||||||
|
type DMPenta struct {
|
||||||
|
Spelling string `json:"spelling"`
|
||||||
|
Name *string `json:"name,omitempty"`
|
||||||
|
Extremes *Extremes `json:"extremes,omitempty"`
|
||||||
|
}
|
||||||
|
|
||||||
|
// A shenikan-english definition
|
||||||
|
type Definition struct {
|
||||||
|
Pos PartOfSpeech `json:"pos"`
|
||||||
|
Short string `json:"short"`
|
||||||
|
Long *string `json:"long,omitempty"`
|
||||||
|
}
|
||||||
|
|
||||||
|
// A shenikan derivation
|
||||||
|
type Derivation struct {
|
||||||
|
Root string `json:"root"`
|
||||||
|
Via []string `json:"via"`
|
||||||
|
}
|
||||||
|
|
||||||
|
// A shenikan derivational morphology
|
||||||
|
type DerivationalMorphology struct {
|
||||||
|
Spelling string `json:"spelling"`
|
||||||
|
Definitions []Definition `json:"definitions"`
|
||||||
|
}
|
||||||
|
|
||||||
|
// A shenikan dialect
|
||||||
|
type Dialect struct {
|
||||||
|
Name string `json:"name"`
|
||||||
|
Replacements []Replacement `json:"replacements"`
|
||||||
|
}
|
||||||
|
|
||||||
|
// A whole shenikan dictionary.
|
||||||
|
type Dictionary struct {
|
||||||
|
Glyphs []Glyph `json:"glyphs"`
|
||||||
|
Dialects []Dialect `json:"dialects"`
|
||||||
|
Dms []DerivationalMorphology `json:"dms"`
|
||||||
|
DmPentas []DMPenta `json:"dmPentas"`
|
||||||
|
Words []Word `json:"words"`
|
||||||
|
Pentas []Penta `json:"pentas"`
|
||||||
|
Icosas []Icosa `json:"icosas"`
|
||||||
|
}
|
||||||
|
|
||||||
|
// The extremes of an icosapentascale
|
||||||
|
type DoubleExtremes struct {
|
||||||
|
Ix *string `json:"ix,omitempty"`
|
||||||
|
Ux *string `json:"ux,omitempty"`
|
||||||
|
Xi *string `json:"xi,omitempty"`
|
||||||
|
Xu *string `json:"xu,omitempty"`
|
||||||
|
}
|
||||||
|
|
||||||
|
// The extremes of a pentascale
|
||||||
|
type Extremes struct {
|
||||||
|
I *string `json:"i,omitempty"`
|
||||||
|
U *string `json:"u,omitempty"`
|
||||||
|
}
|
||||||
|
|
||||||
|
// A glyph in a shenikan dictionary.
|
||||||
|
type Glyph struct {
|
||||||
|
Kind GlyphKind `json:"kind"`
|
||||||
|
Ortho string `json:"ortho"`
|
||||||
|
Attrs []GlyphAttribute `json:"attrs"`
|
||||||
|
}
|
||||||
|
|
||||||
|
// An icosapentascale of words
|
||||||
|
type Icosa struct {
|
||||||
|
Spelling string `json:"spelling"`
|
||||||
|
Name *string `json:"name,omitempty"`
|
||||||
|
Extremes *DoubleExtremes `json:"extremes,omitempty"`
|
||||||
|
}
|
||||||
|
|
||||||
|
// A pentascale of words
|
||||||
|
type Penta struct {
|
||||||
|
Spelling string `json:"spelling"`
|
||||||
|
Name *string `json:"name,omitempty"`
|
||||||
|
Extremes *Extremes `json:"extremes,omitempty"`
|
||||||
|
}
|
||||||
|
|
||||||
|
type Query struct {
|
||||||
|
}
|
||||||
|
|
||||||
|
// A consonant cluster replacement for a dialect
|
||||||
|
type Replacement struct {
|
||||||
|
Old string `json:"old"`
|
||||||
|
New string `json:"new"`
|
||||||
|
}
|
||||||
|
|
||||||
|
// A shenikan word
|
||||||
|
type Word struct {
|
||||||
|
Spelling string `json:"spelling"`
|
||||||
|
Definitions []Definition `json:"definitions"`
|
||||||
|
Derivations []Derivation `json:"derivations"`
|
||||||
|
}
|
||||||
|
|
||||||
|
// The determining attributes of a glyph.
|
||||||
|
type GlyphAttribute string
|
||||||
|
|
||||||
|
const (
|
||||||
|
// Marks an outer vowel. (valid for VOWEL/SYLLABLE glyphs)
|
||||||
|
GlyphAttributeOuter GlyphAttribute = "OUTER"
|
||||||
|
// Marks a slashed vowel. (valid for VOWEL/SYLLABLE glyphs)
|
||||||
|
GlyphAttributeSLAShed GlyphAttribute = "SLASHED"
|
||||||
|
// Marks an inner vowel. (valid for VOWEL/SYLLABLE glyphs)
|
||||||
|
GlyphAttributeInner GlyphAttribute = "INNER"
|
||||||
|
// Marks a left consonant cluster or punctuation. (valid for CLUSTER/VOWEL/PUNCTUATION glyphs)
|
||||||
|
GlyphAttributeLeft GlyphAttribute = "LEFT"
|
||||||
|
// Marks a center consonant cluster. (valid for CLUSTER/VOWEL glyphs)
|
||||||
|
GlyphAttributeCenter GlyphAttribute = "CENTER"
|
||||||
|
// Marks a right consonant cluster or punctuation. (valid for CLUSTER/VOWEL/PUNCTUATION glyphs)
|
||||||
|
GlyphAttributeRight GlyphAttribute = "RIGHT"
|
||||||
|
// Marks a top consonant cluster. (valid for CLUSTER/VOWEL glyphs)
|
||||||
|
GlyphAttributeTop GlyphAttribute = "TOP"
|
||||||
|
// Marks a middle consonant cluster. (valid for CLUSTER/VOWEL glyphs)
|
||||||
|
GlyphAttributeMiddle GlyphAttribute = "MIDDLE"
|
||||||
|
// Marks a bottom consonant cluster. (valid for CLUSTER/VOWEL glyphs)
|
||||||
|
GlyphAttributeBottom GlyphAttribute = "BOTTOM"
|
||||||
|
// Marks a tall consonant cluster. (valid for CLUSTER/VOWEL glyphs)
|
||||||
|
GlyphAttributeTall GlyphAttribute = "TALL"
|
||||||
|
// Marks a wide consonant cluster. (valid for CLUSTER/VOWEL glyphs)
|
||||||
|
GlyphAttributeWide GlyphAttribute = "WIDE"
|
||||||
|
// Marks a tall and wide consonant cluster. (valid for CLUSTER/VOWEL glyphs)
|
||||||
|
GlyphAttributeBoth GlyphAttribute = "BOTH"
|
||||||
|
// Marks a circular number. (valid for NUMERIC glyphs)
|
||||||
|
GlyphAttributeCircle GlyphAttribute = "CIRCLE"
|
||||||
|
// Marks a dash number. (valid for NUMERIC glyphs)
|
||||||
|
GlyphAttributeDash GlyphAttribute = "DASH"
|
||||||
|
// Marks a vee number. (valid for NUMERIC glyphs)
|
||||||
|
GlyphAttributeVee GlyphAttribute = "VEE"
|
||||||
|
// Marks a hump number. (valid for NUMERIC glyphs)
|
||||||
|
GlyphAttributeHump GlyphAttribute = "HUMP"
|
||||||
|
// Marks a dot number. (valid for NUMERIC glyphs)
|
||||||
|
GlyphAttributeDot GlyphAttribute = "DOT"
|
||||||
|
)
|
||||||
|
|
||||||
|
var AllGlyphAttribute = []GlyphAttribute{
|
||||||
|
GlyphAttributeOuter,
|
||||||
|
GlyphAttributeSLAShed,
|
||||||
|
GlyphAttributeInner,
|
||||||
|
GlyphAttributeLeft,
|
||||||
|
GlyphAttributeCenter,
|
||||||
|
GlyphAttributeRight,
|
||||||
|
GlyphAttributeTop,
|
||||||
|
GlyphAttributeMiddle,
|
||||||
|
GlyphAttributeBottom,
|
||||||
|
GlyphAttributeTall,
|
||||||
|
GlyphAttributeWide,
|
||||||
|
GlyphAttributeBoth,
|
||||||
|
GlyphAttributeCircle,
|
||||||
|
GlyphAttributeDash,
|
||||||
|
GlyphAttributeVee,
|
||||||
|
GlyphAttributeHump,
|
||||||
|
GlyphAttributeDot,
|
||||||
|
}
|
||||||
|
|
||||||
|
func (e GlyphAttribute) IsValid() bool {
|
||||||
|
switch e {
|
||||||
|
case GlyphAttributeOuter, GlyphAttributeSLAShed, GlyphAttributeInner, GlyphAttributeLeft, GlyphAttributeCenter, GlyphAttributeRight, GlyphAttributeTop, GlyphAttributeMiddle, GlyphAttributeBottom, GlyphAttributeTall, GlyphAttributeWide, GlyphAttributeBoth, GlyphAttributeCircle, GlyphAttributeDash, GlyphAttributeVee, GlyphAttributeHump, GlyphAttributeDot:
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
func (e GlyphAttribute) String() string {
|
||||||
|
return string(e)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (e *GlyphAttribute) UnmarshalGQL(v any) error {
|
||||||
|
str, ok := v.(string)
|
||||||
|
if !ok {
|
||||||
|
return fmt.Errorf("enums must be strings")
|
||||||
|
}
|
||||||
|
|
||||||
|
*e = GlyphAttribute(str)
|
||||||
|
if !e.IsValid() {
|
||||||
|
return fmt.Errorf("%s is not a valid GlyphAttribute", str)
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (e GlyphAttribute) MarshalGQL(w io.Writer) {
|
||||||
|
fmt.Fprint(w, strconv.Quote(e.String()))
|
||||||
|
}
|
||||||
|
|
||||||
|
// The kind of a glyph.
|
||||||
|
type GlyphKind string
|
||||||
|
|
||||||
|
const (
|
||||||
|
// A bare vowel cluster.
|
||||||
|
GlyphKindVowel GlyphKind = "VOWEL"
|
||||||
|
// A consonant cluster.
|
||||||
|
GlyphKindCluster GlyphKind = "CLUSTER"
|
||||||
|
// A punctuation mark.
|
||||||
|
GlyphKindPunctuation GlyphKind = "PUNCTUATION"
|
||||||
|
// A numeric or symbolic glyph.
|
||||||
|
GlyphKindNumeric GlyphKind = "NUMERIC"
|
||||||
|
// A complete syllable cluster.
|
||||||
|
GlyphKindSyllable GlyphKind = "SYLLABLE"
|
||||||
|
)
|
||||||
|
|
||||||
|
var AllGlyphKind = []GlyphKind{
|
||||||
|
GlyphKindVowel,
|
||||||
|
GlyphKindCluster,
|
||||||
|
GlyphKindPunctuation,
|
||||||
|
GlyphKindNumeric,
|
||||||
|
GlyphKindSyllable,
|
||||||
|
}
|
||||||
|
|
||||||
|
func (e GlyphKind) IsValid() bool {
|
||||||
|
switch e {
|
||||||
|
case GlyphKindVowel, GlyphKindCluster, GlyphKindPunctuation, GlyphKindNumeric, GlyphKindSyllable:
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
func (e GlyphKind) String() string {
|
||||||
|
return string(e)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (e *GlyphKind) UnmarshalGQL(v any) error {
|
||||||
|
str, ok := v.(string)
|
||||||
|
if !ok {
|
||||||
|
return fmt.Errorf("enums must be strings")
|
||||||
|
}
|
||||||
|
|
||||||
|
*e = GlyphKind(str)
|
||||||
|
if !e.IsValid() {
|
||||||
|
return fmt.Errorf("%s is not a valid GlyphKind", str)
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (e GlyphKind) MarshalGQL(w io.Writer) {
|
||||||
|
fmt.Fprint(w, strconv.Quote(e.String()))
|
||||||
|
}
|
||||||
|
|
||||||
|
// A part of speech
|
||||||
|
type PartOfSpeech string
|
||||||
|
|
||||||
|
const (
|
||||||
|
PartOfSpeechNoun PartOfSpeech = "NOUN"
|
||||||
|
PartOfSpeechPronoun PartOfSpeech = "PRONOUN"
|
||||||
|
PartOfSpeechVerb PartOfSpeech = "VERB"
|
||||||
|
PartOfSpeechAdjective PartOfSpeech = "ADJECTIVE"
|
||||||
|
PartOfSpeechAdverb PartOfSpeech = "ADVERB"
|
||||||
|
PartOfSpeechAdposition PartOfSpeech = "ADPOSITION"
|
||||||
|
PartOfSpeechConjunction PartOfSpeech = "CONJUNCTION"
|
||||||
|
PartOfSpeechSyntax PartOfSpeech = "SYNTAX"
|
||||||
|
)
|
||||||
|
|
||||||
|
var AllPartOfSpeech = []PartOfSpeech{
|
||||||
|
PartOfSpeechNoun,
|
||||||
|
PartOfSpeechPronoun,
|
||||||
|
PartOfSpeechVerb,
|
||||||
|
PartOfSpeechAdjective,
|
||||||
|
PartOfSpeechAdverb,
|
||||||
|
PartOfSpeechAdposition,
|
||||||
|
PartOfSpeechConjunction,
|
||||||
|
PartOfSpeechSyntax,
|
||||||
|
}
|
||||||
|
|
||||||
|
func (e PartOfSpeech) IsValid() bool {
|
||||||
|
switch e {
|
||||||
|
case PartOfSpeechNoun, PartOfSpeechPronoun, PartOfSpeechVerb, PartOfSpeechAdjective, PartOfSpeechAdverb, PartOfSpeechAdposition, PartOfSpeechConjunction, PartOfSpeechSyntax:
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
func (e PartOfSpeech) String() string {
|
||||||
|
return string(e)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (e *PartOfSpeech) UnmarshalGQL(v any) error {
|
||||||
|
str, ok := v.(string)
|
||||||
|
if !ok {
|
||||||
|
return fmt.Errorf("enums must be strings")
|
||||||
|
}
|
||||||
|
|
||||||
|
*e = PartOfSpeech(str)
|
||||||
|
if !e.IsValid() {
|
||||||
|
return fmt.Errorf("%s is not a valid PartOfSpeech", str)
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (e PartOfSpeech) MarshalGQL(w io.Writer) {
|
||||||
|
fmt.Fprint(w, strconv.Quote(e.String()))
|
||||||
|
}
|
7
tools/server/graph/resolver.go
Normal file
7
tools/server/graph/resolver.go
Normal file
|
@ -0,0 +1,7 @@
|
||||||
|
package graph
|
||||||
|
|
||||||
|
// This file will not be regenerated automatically.
|
||||||
|
//
|
||||||
|
// It serves as dependency injection for your app, add any dependencies you require here.
|
||||||
|
|
||||||
|
type Resolver struct{}
|
142
tools/server/graph/schema.graphqls
Normal file
142
tools/server/graph/schema.graphqls
Normal file
|
@ -0,0 +1,142 @@
|
||||||
|
type Query {
|
||||||
|
dictionary: Dictionary!
|
||||||
|
}
|
||||||
|
|
||||||
|
"A whole shenikan dictionary."
|
||||||
|
type Dictionary {
|
||||||
|
glyphs: [Glyph!]!
|
||||||
|
dialects: [Dialect!]!
|
||||||
|
dms: [DerivationalMorphology!]!
|
||||||
|
dmPentas: [DMPenta!]!
|
||||||
|
words: [Word!]!
|
||||||
|
pentas: [Penta!]!
|
||||||
|
icosas: [Icosa!]!
|
||||||
|
}
|
||||||
|
|
||||||
|
"A glyph in a shenikan dictionary."
|
||||||
|
type Glyph {
|
||||||
|
kind: GlyphKind!
|
||||||
|
ortho: String!
|
||||||
|
attrs: [GlyphAttribute!]!
|
||||||
|
}
|
||||||
|
|
||||||
|
"The kind of a glyph."
|
||||||
|
enum GlyphKind {
|
||||||
|
"A bare vowel cluster." VOWEL
|
||||||
|
"A consonant cluster." CLUSTER
|
||||||
|
"A punctuation mark." PUNCTUATION
|
||||||
|
"A numeric or symbolic glyph." NUMERIC
|
||||||
|
"A complete syllable cluster." SYLLABLE
|
||||||
|
}
|
||||||
|
|
||||||
|
# TODO: group these into, e.g. consonant height, to enforce mutex appropriately?
|
||||||
|
"The determining attributes of a glyph."
|
||||||
|
enum GlyphAttribute {
|
||||||
|
# Vowel Attributes
|
||||||
|
"Marks an outer vowel. (valid for VOWEL/SYLLABLE glyphs)" OUTER
|
||||||
|
"Marks a slashed vowel. (valid for VOWEL/SYLLABLE glyphs)" SLASHED
|
||||||
|
"Marks an inner vowel. (valid for VOWEL/SYLLABLE glyphs)" INNER
|
||||||
|
|
||||||
|
# Consonant Cluster Attributes (and Punctuation Attributes???)
|
||||||
|
"Marks a left consonant cluster or punctuation. (valid for CLUSTER/VOWEL/PUNCTUATION glyphs)" LEFT
|
||||||
|
"Marks a center consonant cluster. (valid for CLUSTER/VOWEL glyphs)" CENTER
|
||||||
|
"Marks a right consonant cluster or punctuation. (valid for CLUSTER/VOWEL/PUNCTUATION glyphs)" RIGHT
|
||||||
|
"Marks a top consonant cluster. (valid for CLUSTER/VOWEL glyphs)" TOP
|
||||||
|
"Marks a middle consonant cluster. (valid for CLUSTER/VOWEL glyphs)" MIDDLE
|
||||||
|
"Marks a bottom consonant cluster. (valid for CLUSTER/VOWEL glyphs)" BOTTOM
|
||||||
|
"Marks a tall consonant cluster. (valid for CLUSTER/VOWEL glyphs)" TALL
|
||||||
|
"Marks a wide consonant cluster. (valid for CLUSTER/VOWEL glyphs)" WIDE
|
||||||
|
"Marks a tall and wide consonant cluster. (valid for CLUSTER/VOWEL glyphs)" BOTH
|
||||||
|
|
||||||
|
# Numeric Attributes
|
||||||
|
"Marks a circular number. (valid for NUMERIC glyphs)" CIRCLE
|
||||||
|
"Marks a dash number. (valid for NUMERIC glyphs)" DASH
|
||||||
|
"Marks a vee number. (valid for NUMERIC glyphs)" VEE
|
||||||
|
"Marks a hump number. (valid for NUMERIC glyphs)" HUMP
|
||||||
|
"Marks a dot number. (valid for NUMERIC glyphs)" DOT
|
||||||
|
}
|
||||||
|
|
||||||
|
"A shenikan dialect"
|
||||||
|
type Dialect {
|
||||||
|
name: String!
|
||||||
|
replacements: [Replacement!]!
|
||||||
|
}
|
||||||
|
|
||||||
|
"A consonant cluster replacement for a dialect"
|
||||||
|
type Replacement {
|
||||||
|
old: String!
|
||||||
|
new: String!
|
||||||
|
}
|
||||||
|
|
||||||
|
"A shenikan derivational morphology"
|
||||||
|
type DerivationalMorphology {
|
||||||
|
spelling: String!
|
||||||
|
definitions: [Definition!]!
|
||||||
|
}
|
||||||
|
|
||||||
|
"A shenikan-english definition"
|
||||||
|
type Definition {
|
||||||
|
pos: PartOfSpeech!
|
||||||
|
short: String!
|
||||||
|
long: String
|
||||||
|
}
|
||||||
|
|
||||||
|
"A part of speech"
|
||||||
|
enum PartOfSpeech {
|
||||||
|
NOUN
|
||||||
|
PRONOUN
|
||||||
|
VERB
|
||||||
|
ADJECTIVE
|
||||||
|
ADVERB
|
||||||
|
ADPOSITION
|
||||||
|
CONJUNCTION
|
||||||
|
SYNTAX
|
||||||
|
}
|
||||||
|
|
||||||
|
"A pentascale of derivational morphologies"
|
||||||
|
type DMPenta {
|
||||||
|
spelling: String!
|
||||||
|
name: String
|
||||||
|
extremes: Extremes
|
||||||
|
}
|
||||||
|
|
||||||
|
"The extremes of a pentascale"
|
||||||
|
type Extremes {
|
||||||
|
i: String
|
||||||
|
u: String
|
||||||
|
}
|
||||||
|
|
||||||
|
"A shenikan word"
|
||||||
|
type Word {
|
||||||
|
spelling: String!
|
||||||
|
definitions: [Definition!]!
|
||||||
|
derivations: [Derivation!]!
|
||||||
|
}
|
||||||
|
|
||||||
|
"A shenikan derivation"
|
||||||
|
type Derivation {
|
||||||
|
root: String!
|
||||||
|
via: [String!]!
|
||||||
|
}
|
||||||
|
|
||||||
|
"A pentascale of words"
|
||||||
|
type Penta {
|
||||||
|
spelling: String!
|
||||||
|
name: String
|
||||||
|
extremes: Extremes
|
||||||
|
}
|
||||||
|
|
||||||
|
"An icosapentascale of words"
|
||||||
|
type Icosa {
|
||||||
|
spelling: String!
|
||||||
|
name: String
|
||||||
|
extremes: DoubleExtremes
|
||||||
|
}
|
||||||
|
|
||||||
|
"The extremes of an icosapentascale"
|
||||||
|
type DoubleExtremes {
|
||||||
|
ix: String
|
||||||
|
ux: String
|
||||||
|
xi: String
|
||||||
|
xu: String
|
||||||
|
}
|
39
tools/server/graph/schema.resolvers.go
Normal file
39
tools/server/graph/schema.resolvers.go
Normal file
|
@ -0,0 +1,39 @@
|
||||||
|
package graph
|
||||||
|
|
||||||
|
// This file will be automatically regenerated based on the schema, any resolver implementations
|
||||||
|
// will be copied through when generating and any unknown code will be moved to the end.
|
||||||
|
// Code generated by github.com/99designs/gqlgen version v0.17.64
|
||||||
|
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
|
||||||
|
"labprogramming.net/shenikan"
|
||||||
|
"labprogramming.net/shenikan_server/graph/model"
|
||||||
|
)
|
||||||
|
|
||||||
|
// Dictionary is the resolver for the dictionary field.
|
||||||
|
func (r *queryResolver) Dictionary(ctx context.Context) (m *model.Dictionary, err error) {
|
||||||
|
if shenikan.Dict == nil {
|
||||||
|
return nil, shenikan.DictError
|
||||||
|
}
|
||||||
|
|
||||||
|
m = &model.Dictionary{}
|
||||||
|
m.Glyphs = make([]model.Glyph, len(shenikan.Dict.Glyphs))
|
||||||
|
for i, g := range shenikan.Dict.Glyphs {
|
||||||
|
m.Glyphs[i] = model.Glyph{
|
||||||
|
Kind: model.GlyphKind(g.Kind),
|
||||||
|
Ortho: g.Ortho,
|
||||||
|
Attrs: make([]model.GlyphAttribute, len(g.Attrs)),
|
||||||
|
}
|
||||||
|
for j, a := range g.Attrs {
|
||||||
|
m.Glyphs[i].Attrs[j] = model.GlyphAttribute(a)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
// Query returns QueryResolver implementation.
|
||||||
|
func (r *Resolver) Query() QueryResolver { return &queryResolver{r} }
|
||||||
|
|
||||||
|
type queryResolver struct{ *Resolver }
|
3
tools/server/regenerate_gql.sh
Normal file
3
tools/server/regenerate_gql.sh
Normal file
|
@ -0,0 +1,3 @@
|
||||||
|
#!/bin/bash
|
||||||
|
|
||||||
|
go run github.com/99designs/gqlgen generate
|
142
tools/server/schema.graphqls
Normal file
142
tools/server/schema.graphqls
Normal file
|
@ -0,0 +1,142 @@
|
||||||
|
type Query {
|
||||||
|
dictionary: Dictionary!
|
||||||
|
}
|
||||||
|
|
||||||
|
"A whole shenikan dictionary."
|
||||||
|
type Dictionary {
|
||||||
|
glyphs: [Glyph!]!
|
||||||
|
dialects: [Dialect!]!
|
||||||
|
dms: [DerivationalMorphology!]!
|
||||||
|
dmPentas: [DMPenta!]!
|
||||||
|
words: [Word!]!
|
||||||
|
pentas: [Penta!]!
|
||||||
|
icosas: [Icosa!]!
|
||||||
|
}
|
||||||
|
|
||||||
|
"A glyph in a shenikan dictionary."
|
||||||
|
type Glyph {
|
||||||
|
kind: GlyphKind!
|
||||||
|
ortho: String!
|
||||||
|
attrs: [GlyphAttribute]!
|
||||||
|
}
|
||||||
|
|
||||||
|
"The kind of a glyph."
|
||||||
|
enum GlyphKind {
|
||||||
|
"A bare vowel cluster." VOWEL
|
||||||
|
"A consonant cluster." CLUSTER
|
||||||
|
"A punctuation mark." PUNCTUATION
|
||||||
|
"A numeric or symbolic glyph." NUMERIC
|
||||||
|
"A complete syllable cluster." SYLLABLE
|
||||||
|
}
|
||||||
|
|
||||||
|
# TODO: group these into, e.g. consonant height, to enforce mutex appropriately?
|
||||||
|
"The determining attributes of a glyph."
|
||||||
|
enum GlyphAttribute {
|
||||||
|
# Vowel Attributes
|
||||||
|
"Marks an outer vowel. (valid for VOWEL/SYLLABLE glyphs)" OUTER
|
||||||
|
"Marks a slashed vowel. (valid for VOWEL/SYLLABLE glyphs)" SLASHED
|
||||||
|
"Marks an inner vowel. (valid for VOWEL/SYLLABLE glyphs)" INNER
|
||||||
|
|
||||||
|
# Consonant Cluster Attributes (and Punctuation Attributes???)
|
||||||
|
"Marks a left consonant cluster or punctuation. (valid for CLUSTER/VOWEL/PUNCTUATION glyphs)" LEFT
|
||||||
|
"Marks a center consonant cluster. (valid for CLUSTER/VOWEL glyphs)" CENTER
|
||||||
|
"Marks a right consonant cluster or punctuation. (valid for CLUSTER/VOWEL/PUNCTUATION glyphs)" RIGHT
|
||||||
|
"Marks a top consonant cluster. (valid for CLUSTER/VOWEL glyphs)" TOP
|
||||||
|
"Marks a middle consonant cluster. (valid for CLUSTER/VOWEL glyphs)" MIDDLE
|
||||||
|
"Marks a bottom consonant cluster. (valid for CLUSTER/VOWEL glyphs)" BOTTOM
|
||||||
|
"Marks a tall consonant cluster. (valid for CLUSTER/VOWEL glyphs)" TALL
|
||||||
|
"Marks a wide consonant cluster. (valid for CLUSTER/VOWEL glyphs)" WIDE
|
||||||
|
"Marks a tall and wide consonant cluster. (valid for CLUSTER/VOWEL glyphs)" BOTH
|
||||||
|
|
||||||
|
# Numeric Attributes
|
||||||
|
"Marks a circular number. (valid for NUMERIC glyphs)" CIRCLE
|
||||||
|
"Marks a dash number. (valid for NUMERIC glyphs)" DASH
|
||||||
|
"Marks a vee number. (valid for NUMERIC glyphs)" VEE
|
||||||
|
"Marks a hump number. (valid for NUMERIC glyphs)" HUMP
|
||||||
|
"Marks a dot number. (valid for NUMERIC glyphs)" DOT
|
||||||
|
}
|
||||||
|
|
||||||
|
"A shenikan dialect"
|
||||||
|
type Dialect {
|
||||||
|
name: String!
|
||||||
|
replacements: [Replacement!]!
|
||||||
|
}
|
||||||
|
|
||||||
|
"A consonant cluster replacement for a dialect"
|
||||||
|
type Replacement {
|
||||||
|
old: String!
|
||||||
|
new: String!
|
||||||
|
}
|
||||||
|
|
||||||
|
"A shenikan derivational morphology"
|
||||||
|
type DerivationalMorphology {
|
||||||
|
spelling: String!
|
||||||
|
definitions: [Definition!]!
|
||||||
|
}
|
||||||
|
|
||||||
|
"A shenikan-english definition"
|
||||||
|
type Definition {
|
||||||
|
pos: PartOfSpeech!
|
||||||
|
short: String!
|
||||||
|
long: String
|
||||||
|
}
|
||||||
|
|
||||||
|
"A part of speech"
|
||||||
|
enum PartOfSpeech {
|
||||||
|
NOUN
|
||||||
|
PRONOUN
|
||||||
|
VERB
|
||||||
|
ADJECTIVE
|
||||||
|
ADVERB
|
||||||
|
ADPOSITION
|
||||||
|
CONJUNCTION
|
||||||
|
SYNTAX
|
||||||
|
}
|
||||||
|
|
||||||
|
"A pentascale of derivational morphologies"
|
||||||
|
type DMPenta {
|
||||||
|
spelling: String!
|
||||||
|
name: String
|
||||||
|
extremes: Extremes
|
||||||
|
}
|
||||||
|
|
||||||
|
"The extremes of a pentascale"
|
||||||
|
type Extremes {
|
||||||
|
i: String
|
||||||
|
u: String
|
||||||
|
}
|
||||||
|
|
||||||
|
"A shenikan word"
|
||||||
|
type Word {
|
||||||
|
spelling: String!
|
||||||
|
definitions: [Definition!]!
|
||||||
|
derivations: [Derivation!]!
|
||||||
|
}
|
||||||
|
|
||||||
|
"A shenikan derivation"
|
||||||
|
type Derivation {
|
||||||
|
root: String!
|
||||||
|
via: [String!]!
|
||||||
|
}
|
||||||
|
|
||||||
|
"A pentascale of words"
|
||||||
|
type Penta {
|
||||||
|
spelling: String!
|
||||||
|
name: String
|
||||||
|
extremes: Extremes
|
||||||
|
}
|
||||||
|
|
||||||
|
"An icosapentascale of words"
|
||||||
|
type Icosa {
|
||||||
|
spelling: String!
|
||||||
|
name: String
|
||||||
|
extremes: DoubleExtremes
|
||||||
|
}
|
||||||
|
|
||||||
|
"The extremes of an icosapentascale"
|
||||||
|
type DoubleExtremes {
|
||||||
|
ix: String
|
||||||
|
ux: String
|
||||||
|
xi: String
|
||||||
|
xu: String
|
||||||
|
}
|
43
tools/server/server.go
Normal file
43
tools/server/server.go
Normal file
|
@ -0,0 +1,43 @@
|
||||||
|
package main
|
||||||
|
|
||||||
|
import (
|
||||||
|
"log"
|
||||||
|
"net/http"
|
||||||
|
"os"
|
||||||
|
|
||||||
|
"github.com/99designs/gqlgen/graphql/handler"
|
||||||
|
"github.com/99designs/gqlgen/graphql/handler/extension"
|
||||||
|
"github.com/99designs/gqlgen/graphql/handler/lru"
|
||||||
|
"github.com/99designs/gqlgen/graphql/handler/transport"
|
||||||
|
"github.com/99designs/gqlgen/graphql/playground"
|
||||||
|
"github.com/vektah/gqlparser/v2/ast"
|
||||||
|
"labprogramming.net/shenikan_server/graph"
|
||||||
|
)
|
||||||
|
|
||||||
|
const defaultPort = "8080"
|
||||||
|
|
||||||
|
func main() {
|
||||||
|
port := os.Getenv("PORT")
|
||||||
|
if port == "" {
|
||||||
|
port = defaultPort
|
||||||
|
}
|
||||||
|
|
||||||
|
srv := handler.New(graph.NewExecutableSchema(graph.Config{Resolvers: &graph.Resolver{}}))
|
||||||
|
|
||||||
|
srv.AddTransport(transport.Options{})
|
||||||
|
srv.AddTransport(transport.GET{})
|
||||||
|
srv.AddTransport(transport.POST{})
|
||||||
|
|
||||||
|
srv.SetQueryCache(lru.New[*ast.QueryDocument](1000))
|
||||||
|
|
||||||
|
srv.Use(extension.Introspection{})
|
||||||
|
srv.Use(extension.AutomaticPersistedQuery{
|
||||||
|
Cache: lru.New[string](100),
|
||||||
|
})
|
||||||
|
|
||||||
|
http.Handle("/", playground.Handler("GraphQL playground", "/query"))
|
||||||
|
http.Handle("/query", srv)
|
||||||
|
|
||||||
|
log.Printf("connect to http://localhost:%s/ for GraphQL playground", port)
|
||||||
|
log.Fatal(http.ListenAndServe(":"+port, nil))
|
||||||
|
}
|
22
tools/shenikan/go.mod
Normal file
22
tools/shenikan/go.mod
Normal file
|
@ -0,0 +1,22 @@
|
||||||
|
module labprogramming.net/shenikan
|
||||||
|
|
||||||
|
go 1.23.1
|
||||||
|
|
||||||
|
require (
|
||||||
|
cuelabs.dev/go/oci/ociregistry v0.0.0-20240807094312-a32ad29eed79 // indirect
|
||||||
|
cuelang.org/go v0.10.0 // indirect
|
||||||
|
github.com/cockroachdb/apd/v3 v3.2.1 // indirect
|
||||||
|
github.com/emicklei/proto v1.13.2 // indirect
|
||||||
|
github.com/google/uuid v1.6.0 // indirect
|
||||||
|
github.com/mitchellh/go-wordwrap v1.0.1 // indirect
|
||||||
|
github.com/opencontainers/go-digest v1.0.0 // indirect
|
||||||
|
github.com/opencontainers/image-spec v1.1.0 // indirect
|
||||||
|
github.com/pelletier/go-toml/v2 v2.2.2 // indirect
|
||||||
|
github.com/protocolbuffers/txtpbfmt v0.0.0-20230328191034-3462fbc510c0 // indirect
|
||||||
|
github.com/rogpeppe/go-internal v1.12.1-0.20240709150035-ccf4b4329d21 // indirect
|
||||||
|
golang.org/x/mod v0.20.0 // indirect
|
||||||
|
golang.org/x/net v0.28.0 // indirect
|
||||||
|
golang.org/x/oauth2 v0.22.0 // indirect
|
||||||
|
golang.org/x/text v0.17.0 // indirect
|
||||||
|
gopkg.in/yaml.v3 v3.0.1 // indirect
|
||||||
|
)
|
45
tools/shenikan/go.sum
Normal file
45
tools/shenikan/go.sum
Normal file
|
@ -0,0 +1,45 @@
|
||||||
|
cuelabs.dev/go/oci/ociregistry v0.0.0-20240807094312-a32ad29eed79 h1:EceZITBGET3qHneD5xowSTY/YHbNybvMWGh62K2fG/M=
|
||||||
|
cuelabs.dev/go/oci/ociregistry v0.0.0-20240807094312-a32ad29eed79/go.mod h1:5A4xfTzHTXfeVJBU6RAUf+QrlfTCW+017q/QiW+sMLg=
|
||||||
|
cuelang.org/go v0.10.0 h1:Y1Pu4wwga5HkXfLFK1sWAYaSWIBdcsr5Cb5AWj2pOuE=
|
||||||
|
cuelang.org/go v0.10.0/go.mod h1:HzlaqqqInHNiqE6slTP6+UtxT9hN6DAzgJgdbNxXvX8=
|
||||||
|
github.com/cockroachdb/apd/v3 v3.2.1 h1:U+8j7t0axsIgvQUqthuNm82HIrYXodOV2iWLWtEaIwg=
|
||||||
|
github.com/cockroachdb/apd/v3 v3.2.1/go.mod h1:klXJcjp+FffLTHlhIG69tezTDvdP065naDsHzKhYSqc=
|
||||||
|
github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
|
||||||
|
github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
|
||||||
|
github.com/emicklei/proto v1.13.2 h1:z/etSFO3uyXeuEsVPzfl56WNgzcvIr42aQazXaQmFZY=
|
||||||
|
github.com/emicklei/proto v1.13.2/go.mod h1:rn1FgRS/FANiZdD2djyH7TMA9jdRDcYQ9IEN9yvjX0A=
|
||||||
|
github.com/google/uuid v1.6.0 h1:NIvaJDMOsjHA8n1jAhLSgzrAzy1Hgr+hNrb57e+94F0=
|
||||||
|
github.com/google/uuid v1.6.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo=
|
||||||
|
github.com/mitchellh/go-wordwrap v1.0.1 h1:TLuKupo69TCn6TQSyGxwI1EblZZEsQ0vMlAFQflz0v0=
|
||||||
|
github.com/mitchellh/go-wordwrap v1.0.1/go.mod h1:R62XHJLzvMFRBbcrT7m7WgmE1eOyTSsCt+hzestvNj0=
|
||||||
|
github.com/opencontainers/go-digest v1.0.0 h1:apOUWs51W5PlhuyGyz9FCeeBIOUDA/6nW8Oi/yOhh5U=
|
||||||
|
github.com/opencontainers/go-digest v1.0.0/go.mod h1:0JzlMkj0TRzQZfJkVvzbP0HBR3IKzErnv2BNG4W4MAM=
|
||||||
|
github.com/opencontainers/image-spec v1.1.0 h1:8SG7/vwALn54lVB/0yZ/MMwhFrPYtpEHQb2IpWsCzug=
|
||||||
|
github.com/opencontainers/image-spec v1.1.0/go.mod h1:W4s4sFTMaBeK1BQLXbG4AdM2szdn85PY75RI83NrTrM=
|
||||||
|
github.com/pelletier/go-toml/v2 v2.2.2 h1:aYUidT7k73Pcl9nb2gScu7NSrKCSHIDE89b3+6Wq+LM=
|
||||||
|
github.com/pelletier/go-toml/v2 v2.2.2/go.mod h1:1t835xjRzz80PqgE6HHgN2JOsmgYu/h4qDAS4n929Rs=
|
||||||
|
github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
|
||||||
|
github.com/protocolbuffers/txtpbfmt v0.0.0-20230328191034-3462fbc510c0 h1:sadMIsgmHpEOGbUs6VtHBXRR1OHevnj7hLx9ZcdNGW4=
|
||||||
|
github.com/protocolbuffers/txtpbfmt v0.0.0-20230328191034-3462fbc510c0/go.mod h1:jgxiZysxFPM+iWKwQwPR+y+Jvo54ARd4EisXxKYpB5c=
|
||||||
|
github.com/rogpeppe/go-internal v1.12.1-0.20240709150035-ccf4b4329d21 h1:igWZJluD8KtEtAgRyF4x6lqcxDry1ULztksMJh2mnQE=
|
||||||
|
github.com/rogpeppe/go-internal v1.12.1-0.20240709150035-ccf4b4329d21/go.mod h1:RMRJLmBOqWacUkmJHRMiPKh1S1m3PA7Zh4W80/kWPpg=
|
||||||
|
github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME=
|
||||||
|
github.com/stretchr/objx v0.4.0/go.mod h1:YvHI0jy2hoMjB+UWwv71VJQ9isScKT/TqJzVSSt89Yw=
|
||||||
|
github.com/stretchr/objx v0.5.0/go.mod h1:Yh+to48EsGEfYuaHDzXPcE3xhTkx73EhmCGUpEOglKo=
|
||||||
|
github.com/stretchr/objx v0.5.2/go.mod h1:FRsXN1f5AsAjCGJKqEizvkpNtU+EGNCLh3NxZ/8L+MA=
|
||||||
|
github.com/stretchr/testify v1.7.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg=
|
||||||
|
github.com/stretchr/testify v1.8.0/go.mod h1:yNjHg4UonilssWZ8iaSj1OCr/vHnekPRkoO+kdMU+MU=
|
||||||
|
github.com/stretchr/testify v1.8.4/go.mod h1:sz/lmYIOXD/1dqDmKjjqLyZ2RngseejIcXlSw2iwfAo=
|
||||||
|
github.com/stretchr/testify v1.9.0/go.mod h1:r2ic/lqez/lEtzL7wO/rwa5dbSLXVDPFyf8C91i36aY=
|
||||||
|
golang.org/x/mod v0.20.0 h1:utOm6MM3R3dnawAiJgn0y+xvuYRsm1RKM/4giyfDgV0=
|
||||||
|
golang.org/x/mod v0.20.0/go.mod h1:hTbmBsO62+eylJbnUtE2MGJUyE7QWk4xUqPFrRgJ+7c=
|
||||||
|
golang.org/x/net v0.28.0 h1:a9JDOJc5GMUJ0+UDqmLT86WiEy7iWyIhz8gz8E4e5hE=
|
||||||
|
golang.org/x/net v0.28.0/go.mod h1:yqtgsTWOOnlGLG9GFRrK3++bGOUEkNBoHZc8MEDWPNg=
|
||||||
|
golang.org/x/oauth2 v0.22.0 h1:BzDx2FehcG7jJwgWLELCdmLuxk2i+x9UDpSiss2u0ZA=
|
||||||
|
golang.org/x/oauth2 v0.22.0/go.mod h1:XYTD2NtWslqkgxebSiOHnXEap4TF09sJSc7H1sXbhtI=
|
||||||
|
golang.org/x/text v0.17.0 h1:XtiM5bkSOt+ewxlOE/aE/AKEHibwj/6gvWMl9Rsh0Qc=
|
||||||
|
golang.org/x/text v0.17.0/go.mod h1:BuEKDfySbSR4drPmRPG/7iBdf8hvFMuRexcpahXilzY=
|
||||||
|
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
|
||||||
|
gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
|
||||||
|
gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA=
|
||||||
|
gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
|
170
tools/shenikan/shenikan.go
Normal file
170
tools/shenikan/shenikan.go
Normal file
|
@ -0,0 +1,170 @@
|
||||||
|
package shenikan
|
||||||
|
|
||||||
|
import (
|
||||||
|
"strings"
|
||||||
|
"io/fs"
|
||||||
|
|
||||||
|
// "cuelang.org/go/cue"
|
||||||
|
"cuelang.org/go/cue/cuecontext"
|
||||||
|
"cuelang.org/go/cue/load"
|
||||||
|
|
||||||
|
"labprogramming.net/shenikandata"
|
||||||
|
)
|
||||||
|
|
||||||
|
type Glyph struct {
|
||||||
|
Kind string `json:"kind"`
|
||||||
|
Ortho string `json:"ortho"`
|
||||||
|
Attrs []string `json:"attrs"`
|
||||||
|
}
|
||||||
|
|
||||||
|
type Replacement struct {
|
||||||
|
Old string `json:"old"`
|
||||||
|
New string `json:"new"`
|
||||||
|
}
|
||||||
|
|
||||||
|
type Dialect struct {
|
||||||
|
Name string `json:"name"`
|
||||||
|
Replacements []Replacement `json:"replacements"`
|
||||||
|
}
|
||||||
|
|
||||||
|
type Definition struct {
|
||||||
|
PartOfSpeech string `json:"pos"`
|
||||||
|
Short string `json:"short"`
|
||||||
|
Long *string `json:"long"`
|
||||||
|
}
|
||||||
|
|
||||||
|
type DerivationalMorphology struct {
|
||||||
|
Spelling string `json:"spelling"`
|
||||||
|
Definitions []Definition
|
||||||
|
}
|
||||||
|
|
||||||
|
type PentaExtremes struct {
|
||||||
|
I *string `json:"i"`
|
||||||
|
U *string `json:"u"`
|
||||||
|
}
|
||||||
|
|
||||||
|
type DMPenta struct {
|
||||||
|
Spelling string `json:"spelling"`
|
||||||
|
Name *string `json:"name"`
|
||||||
|
Extremes *PentaExtremes `json:"extremes"`
|
||||||
|
}
|
||||||
|
|
||||||
|
type Derivation struct {
|
||||||
|
Root string `json:"root"`
|
||||||
|
Via []string `json:"via"`
|
||||||
|
}
|
||||||
|
|
||||||
|
type Word struct {
|
||||||
|
Spelling string `json:"spelling"`
|
||||||
|
Definitions []Definition `json:"definitions"`
|
||||||
|
Derivations []Derivation `json:"derivations"`
|
||||||
|
}
|
||||||
|
|
||||||
|
type Penta struct {
|
||||||
|
Spelling string `json:"spelling"`
|
||||||
|
Name *string `json:"name"`
|
||||||
|
Extremes *PentaExtremes `json:"extremes"`
|
||||||
|
}
|
||||||
|
|
||||||
|
type IcosaExtremes struct {
|
||||||
|
IX *string `json:"ix"`
|
||||||
|
UX *string `json:"ux"`
|
||||||
|
XI *string `json:"xi"`
|
||||||
|
XU *string `json:"xu"`
|
||||||
|
}
|
||||||
|
|
||||||
|
type Icosa struct {
|
||||||
|
Spelling string `json:"spelling"`
|
||||||
|
Name *string `json:"name"`
|
||||||
|
Extremes *IcosaExtremes `json:"extremes"`
|
||||||
|
}
|
||||||
|
|
||||||
|
type Dictionary struct {
|
||||||
|
Glyphs []Glyph `json:"glyphs"`
|
||||||
|
Dialects []Dialect `json:"dialects"`
|
||||||
|
DMs []DerivationalMorphology `json:"dms"`
|
||||||
|
DMPentas []DMPenta `json:"dmPentas"`
|
||||||
|
Words []Word `json:"words"`
|
||||||
|
Pentas []Penta `json:"pentas"`
|
||||||
|
Icosas []Icosa `json:"icosas"`
|
||||||
|
}
|
||||||
|
|
||||||
|
type dictionaryData struct {
|
||||||
|
Dict *Dictionary `json:"dictionary"`
|
||||||
|
}
|
||||||
|
|
||||||
|
var DictError error
|
||||||
|
var Dict *Dictionary = func() *Dictionary {
|
||||||
|
var overlay map[string]load.Source = make(map[string]load.Source)
|
||||||
|
|
||||||
|
sources, DictError := fs.Glob(shenikandata.Cues, "*")
|
||||||
|
if DictError != nil {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
for source := range sources {
|
||||||
|
content, DictError := shenikandata.Cues.ReadFile(sources[source])
|
||||||
|
if DictError != nil {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
overlay["/.secretprefix/" + sources[source]] = load.FromBytes(content)
|
||||||
|
}
|
||||||
|
|
||||||
|
insts := load.Instances([]string{"."}, &load.Config{
|
||||||
|
Dir: "/.secretprefix",
|
||||||
|
Env: []string{},
|
||||||
|
Overlay: overlay,
|
||||||
|
})
|
||||||
|
|
||||||
|
ctx := cuecontext.New()
|
||||||
|
vals, DictError := ctx.BuildInstances(insts)
|
||||||
|
if DictError != nil {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
var dd dictionaryData
|
||||||
|
DictError = vals[0].Decode(&dd)
|
||||||
|
|
||||||
|
return dd.Dict
|
||||||
|
}()
|
||||||
|
|
||||||
|
// Canonicalizes a string to unicode shenikan without pr or pl
|
||||||
|
func Canonicalize(shenikan string) (result string) {
|
||||||
|
result = shenikan
|
||||||
|
result = strings.ReplaceAll(result, "sh", "∫")
|
||||||
|
result = strings.ReplaceAll(result, "th", "θ")
|
||||||
|
result = strings.ReplaceAll(result, "`", "«")
|
||||||
|
result = strings.ReplaceAll(result, "'", "»")
|
||||||
|
result = strings.ReplaceAll(result, "c", "x")
|
||||||
|
result = strings.ReplaceAll(result, "ç", "x")
|
||||||
|
result = strings.ReplaceAll(result, "ç", "x")
|
||||||
|
result = strings.ReplaceAll(result, "pr", "r")
|
||||||
|
result = strings.ReplaceAll(result, "pl", "l")
|
||||||
|
result = strings.ReplaceAll(result, "j", "p")
|
||||||
|
result = strings.ReplaceAll(result, "g", "k")
|
||||||
|
result = strings.ReplaceAll(result, "z", "s")
|
||||||
|
result = strings.ReplaceAll(result, "d", "t")
|
||||||
|
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
func RenderCSUR(shenikan string, offset int) (result string) {
|
||||||
|
result = Canonicalize(shenikan)
|
||||||
|
|
||||||
|
base := 0xF3A00 + 0x100 * offset
|
||||||
|
var longest int = 0
|
||||||
|
for _, glyph := range Dict.Glyphs {
|
||||||
|
if len(glyph.Ortho) > longest {
|
||||||
|
longest = len(glyph.Ortho)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
for length := longest; length > 0; length-- {
|
||||||
|
for i, glyph := range Dict.Glyphs {
|
||||||
|
if len(glyph.Ortho) == length {
|
||||||
|
result = strings.ReplaceAll(result, glyph.Ortho, string(rune(base + i)))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return
|
||||||
|
}
|
20
tools/shenikan/shenikan_test.go
Normal file
20
tools/shenikan/shenikan_test.go
Normal file
|
@ -0,0 +1,20 @@
|
||||||
|
package shenikan
|
||||||
|
|
||||||
|
import (
|
||||||
|
"testing"
|
||||||
|
)
|
||||||
|
|
||||||
|
func TestLoadUp2(t *testing.T) {
|
||||||
|
dict, err := LoadDictionary("../../")
|
||||||
|
if err != nil {
|
||||||
|
t.Fatal("Failed to load ../../:", err)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
if len(dict.Glyphs) == 0 {
|
||||||
|
t.Fatal("No glyphs in ../../:", dict)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
t.Log("Loaded", dict)
|
||||||
|
}
|
86
types.cue
Normal file
86
types.cue
Normal file
|
@ -0,0 +1,86 @@
|
||||||
|
// CUE definitions for the eventual output dictionary
|
||||||
|
package shenikan
|
||||||
|
|
||||||
|
_vowels: ["i", "e", "a", "o", "u"]
|
||||||
|
#VowelAttribute: "outer" | "slashed" | "inner"
|
||||||
|
#ClusterAttribute: "left" | "center" | "right" | "top" | "middle" | "bottom" |
|
||||||
|
"tall" | "wide" | "both"
|
||||||
|
#PunctuationAttribute: "left" | "right"
|
||||||
|
#NumericAttribute: "circle" | "dash" | "vee" | "hump" | "dot"
|
||||||
|
#SyllableAttribute: #VowelAttribute | #ClusterAttribute
|
||||||
|
|
||||||
|
#Glyph: #VowelGlyph | #ClusterGlyph | #PunctuationGlyph | #NumericGlyph |
|
||||||
|
#SyllableGlyph
|
||||||
|
|
||||||
|
#VowelGlyph: {kind: "vowel", ortho: string, attrs: [...#VowelAttribute]}
|
||||||
|
#ClusterGlyph: {kind: "cluster", ortho: string, attrs: [...#ClusterAttribute]}
|
||||||
|
#PunctuationGlyph: {kind: "punctuation", ortho: string, attrs: [...#PunctuationAttribute]}
|
||||||
|
#NumericGlyph: {kind: "numeric", ortho: string, attrs: [...#NumericAttribute]}
|
||||||
|
#SyllableGlyph: {kind: "syllable", ortho: string, attrs: [...#SyllableAttribute]}
|
||||||
|
|
||||||
|
#Replacement: {
|
||||||
|
old: string
|
||||||
|
new: string
|
||||||
|
}
|
||||||
|
#Dialect: {
|
||||||
|
name: string
|
||||||
|
replacements: [...#Replacement]
|
||||||
|
}
|
||||||
|
|
||||||
|
#PartOfSpeech: "noun" | "pronoun" | "verb" | "adjective" | "adverb" | "adposition" | "conjunction" | "syntax"
|
||||||
|
#Definition: {
|
||||||
|
pos: #PartOfSpeech
|
||||||
|
short: string
|
||||||
|
long?: string
|
||||||
|
}
|
||||||
|
#DerivationalMorphology: {
|
||||||
|
spelling: string
|
||||||
|
definitions: [...#Definition]
|
||||||
|
}
|
||||||
|
#DMPenta: {
|
||||||
|
spelling: string
|
||||||
|
name?: string
|
||||||
|
extremes?: {
|
||||||
|
i?: string
|
||||||
|
u?: string
|
||||||
|
}
|
||||||
|
}
|
||||||
|
#Derivation: {
|
||||||
|
root: string
|
||||||
|
via: [...string]
|
||||||
|
}
|
||||||
|
#Word: {
|
||||||
|
spelling: string
|
||||||
|
definitions: [...#Definition]
|
||||||
|
derivations: [...#Derivation]
|
||||||
|
}
|
||||||
|
|
||||||
|
#Penta: {
|
||||||
|
spelling: string
|
||||||
|
name?: string
|
||||||
|
extremes?: {
|
||||||
|
i?: string
|
||||||
|
u?: string
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#Icosa: {
|
||||||
|
spelling: string
|
||||||
|
name?: string
|
||||||
|
extremes?: {
|
||||||
|
ix?: string
|
||||||
|
ux?: string
|
||||||
|
xi?: string
|
||||||
|
xu?: string
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
dictionary: {
|
||||||
|
glyphs: [...#Glyph]
|
||||||
|
dialects: [...#Dialect]
|
||||||
|
dms: [...#DerivationalMorphology]
|
||||||
|
dmPentas: [...#DMPenta]
|
||||||
|
words: [...#Word]
|
||||||
|
pentas: [...#Penta]
|
||||||
|
icosas: [...#Icosa]
|
||||||
|
}
|
Loading…
Reference in a new issue