m
This commit is contained in:
parent
22ee15654f
commit
7c4d0c0e34
2
.gitignore
vendored
2
.gitignore
vendored
@ -1,6 +1,8 @@
|
||||
# Devenv
|
||||
.devenv*
|
||||
devenv.local.nix
|
||||
db
|
||||
datasets
|
||||
|
||||
# direnv
|
||||
.direnv
|
||||
|
BIN
data/data.db
BIN
data/data.db
Binary file not shown.
BIN
data/data.db-shm
BIN
data/data.db-shm
Binary file not shown.
167
data/schema.sql
167
data/schema.sql
@ -1,167 +0,0 @@
|
||||
-- Enable foreign key support
|
||||
PRAGMA foreign_keys = ON;
|
||||
PRAGMA journal_mode = WAL;
|
||||
PRAGMA cache_size = -2000;
|
||||
PRAGMA mmap_size = 30000000000;
|
||||
|
||||
|
||||
-- Words table
|
||||
CREATE TABLE words (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
spelling TEXT NOT NULL,
|
||||
ipa TEXT NOT NULL,
|
||||
language_id INTEGER NOT NULL,
|
||||
frequency INTEGER,
|
||||
FOREIGN KEY (language_id) REFERENCES languages(id)
|
||||
);
|
||||
CREATE INDEX idx_words_spelling ON words(spelling);
|
||||
CREATE INDEX idx_words_language_id ON words(language_id);
|
||||
|
||||
|
||||
-- Languages table
|
||||
CREATE TABLE languages (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
name TEXT NOT NULL
|
||||
);
|
||||
|
||||
-- Parts of Speech table
|
||||
CREATE TABLE parts_of_speech (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
name TEXT NOT NULL
|
||||
);
|
||||
|
||||
-- Categories table (for noun and verb categories)
|
||||
CREATE TABLE categories (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
name TEXT NOT NULL,
|
||||
part_of_speech_id INTEGER NOT NULL,
|
||||
FOREIGN KEY (part_of_speech_id) REFERENCES parts_of_speech(id)
|
||||
);
|
||||
CREATE INDEX idx_categories_name ON categories(name);
|
||||
CREATE INDEX idx_categories_part_of_speech_id ON categories(part_of_speech_id);
|
||||
|
||||
-- Word Categories junction table
|
||||
CREATE TABLE word_categories (
|
||||
word_id INTEGER NOT NULL,
|
||||
category_id INTEGER NOT NULL,
|
||||
PRIMARY KEY (word_id, category_id),
|
||||
FOREIGN KEY (word_id) REFERENCES words(id),
|
||||
FOREIGN KEY (category_id) REFERENCES categories(id)
|
||||
);
|
||||
CREATE INDEX idx_word_categories_category_id ON word_categories(category_id);
|
||||
|
||||
-- Example data insertion
|
||||
INSERT INTO languages (name) VALUES ('en-us');
|
||||
INSERT INTO languages (name) VALUES ('th');
|
||||
INSERT INTO languages (name) VALUES ('zh-cn');
|
||||
INSERT INTO languages (name) VALUES ('zh-hk');
|
||||
INSERT INTO languages (name) VALUES ('ja-jp');
|
||||
|
||||
INSERT INTO parts_of_speech (name) VALUES ('noun'), ('verb'), ('adjective'), ('adverb'), ('pronoun'), ('adposition'), ('conjunction');
|
||||
|
||||
INSERT INTO categories (name, part_of_speech_id) VALUES
|
||||
('countable', 1),
|
||||
('uncountable', 1),
|
||||
('animate', 1),
|
||||
('inanimate', 1),
|
||||
('spatial', 1),
|
||||
('temporal', 1),
|
||||
('transitive', 2),
|
||||
('intransitive', 2),
|
||||
('action', 2),
|
||||
('mental', 2),
|
||||
('auxiliar', 2),
|
||||
('preposition', 6),
|
||||
('postposition', 6),
|
||||
('circumposition', 6);
|
||||
|
||||
-- Example word insertion
|
||||
INSERT INTO words (spelling, ipa, language_id) VALUES ('book', 'bʊk', 1);
|
||||
|
||||
-- Categorize 'book' as a countable, inanimate noun
|
||||
INSERT INTO word_categories (word_id, category_id)
|
||||
SELECT
|
||||
(SELECT id FROM words WHERE spelling = 'book'),
|
||||
id
|
||||
FROM categories
|
||||
WHERE name IN ('countable', 'inanimate');
|
||||
|
||||
-- Example verb insertion
|
||||
INSERT INTO words (spelling, ipa, language_id) VALUES ('think','θɪŋk', 1);
|
||||
|
||||
-- Categorize 'think' as an intransitive, mental verb
|
||||
INSERT INTO word_categories (word_id, category_id)
|
||||
SELECT
|
||||
(SELECT id FROM words WHERE spelling = 'think'),
|
||||
id
|
||||
FROM categories
|
||||
WHERE name IN ('intransitive', 'mental');
|
||||
|
||||
-- multi word stuff
|
||||
CREATE TABLE idioms(
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
text TEXT NOT NULL,
|
||||
language_id INTEGER NOT NULL,
|
||||
frequency INTEGER,
|
||||
FOREIGN KEY (language_id) REFERENCES languages(id)
|
||||
);
|
||||
CREATE TABLE idioms_words(
|
||||
idiom_id INTEGER NOT NULL,
|
||||
word_id INTEGER NOT NULL,
|
||||
PRIMARY KEY (idiom_id, word_id),
|
||||
FOREIGN KEY (word_id) REFERENCES words(id),
|
||||
FOREIGN KEY (idiom_id) REFERENCES idioms(id)
|
||||
);
|
||||
|
||||
-- phrasal verbs, other sui generis stuff
|
||||
CREATE TABLE expressions(
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
text TEXT NOT NULL,
|
||||
type TEXT NOT NULL, -- phrasal-verb, result-complement etc.
|
||||
language_id INTEGER NOT NULL,
|
||||
FOREIGN KEY (language_id) REFERENCES languages(id)
|
||||
);
|
||||
CREATE TABLE expression_words(
|
||||
expression_id INTEGER NOT NULL,
|
||||
word_id INTEGER NOT NULL,
|
||||
PRIMARY KEY (expression_id, word_id),
|
||||
FOREIGN KEY (expression_id) REFERENCES expressions(id),
|
||||
FOREIGN KEY (word_id) REFERENCES words(id)
|
||||
);
|
||||
|
||||
|
||||
-- Progress
|
||||
CREATE TABLE users(
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
name TEXT NOT NULL,
|
||||
creds TEXT NOT NULL
|
||||
);
|
||||
CREATE TABLE attempts(
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
user_id INTEGER NOT NULL,
|
||||
timestamp INTEGER NOT NULL,
|
||||
resource_id INTEGER NOT NULL,
|
||||
resources_type TEXT NOT NULL, -- name of the table, words, expressions, lessons etc.
|
||||
good INTEGER NOT NULL, -- 0 or 1
|
||||
FOREIGN KEY (user_id) REFERENCES users(id)
|
||||
);
|
||||
|
||||
-- Index to query attempts on a specific resource
|
||||
CREATE INDEX idx_attempts_resource ON attempts(resource_id, resources_type);
|
||||
|
||||
-- Index to query attempts for a specific user
|
||||
CREATE INDEX idx_attempts_user ON attempts(user_id);
|
||||
|
||||
-- (Optional) Index to query attempts by user and resource (useful if you often query by both)
|
||||
CREATE INDEX idx_attempts_user_resource ON attempts(user_id, resource_id, resources_type);
|
||||
-- Lessons
|
||||
CREATE TABLE lessons(
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
text TEXT NOT NULL
|
||||
);
|
||||
CREATE TABLE lessons_resources(
|
||||
resource_id INTEGER NOT NULL,
|
||||
resources_type TEXT NOT NULL, -- name of the table, words, expressions etc.
|
||||
lesson_id INTEGER NOT NULL,
|
||||
FOREIGN KEY (lesson_id) REFERENCES lessons(id)
|
||||
);
|
14
devenv.lock
14
devenv.lock
@ -53,16 +53,16 @@
|
||||
},
|
||||
"nixpkgs": {
|
||||
"locked": {
|
||||
"lastModified": 1716977621,
|
||||
"owner": "cachix",
|
||||
"repo": "devenv-nixpkgs",
|
||||
"rev": "4267e705586473d3e5c8d50299e71503f16a6fb6",
|
||||
"lastModified": 1729265718,
|
||||
"owner": "nixos",
|
||||
"repo": "nixpkgs",
|
||||
"rev": "ccc0c2126893dd20963580b6478d1a10a4512185",
|
||||
"type": "github"
|
||||
},
|
||||
"original": {
|
||||
"owner": "cachix",
|
||||
"ref": "rolling",
|
||||
"repo": "devenv-nixpkgs",
|
||||
"owner": "nixos",
|
||||
"ref": "nixpkgs-unstable",
|
||||
"repo": "nixpkgs",
|
||||
"type": "github"
|
||||
}
|
||||
},
|
||||
|
@ -1,7 +1,8 @@
|
||||
# yaml-language-server: $schema=https://devenv.sh/devenv.schema.json
|
||||
inputs:
|
||||
nixpkgs:
|
||||
url: github:cachix/devenv-nixpkgs/rolling
|
||||
# url: github:cachix/devenv-nixpkgs/rolling
|
||||
url: github:nixos/nixpkgs/nixpkgs-unstable
|
||||
|
||||
# If you're using non-OSS software, you can set allowUnfree to true.
|
||||
# allowUnfree: true
|
||||
|
242
server/db.ts
Normal file
242
server/db.ts
Normal file
@ -0,0 +1,242 @@
|
||||
import { Database } from 'bun:sqlite';
|
||||
import { wordFactorial } from './utils';
|
||||
|
||||
// read
|
||||
|
||||
export function fetchResource(db: Database, spelling: string) {
|
||||
const query = db.query(`
|
||||
SELECT
|
||||
spelling,
|
||||
ipa,
|
||||
frequency,
|
||||
type,
|
||||
subtype,
|
||||
GROUP_CONCAT(c.name, ',') AS category,
|
||||
FROM expressions
|
||||
JOIN word_categories wc ON wc.word_id = words.id
|
||||
JOIN categories c ON c.id = wc.category_id
|
||||
WHERE spelling = $spelling
|
||||
GROUP BY words.id
|
||||
`);
|
||||
return query.get({ spelling });
|
||||
}
|
||||
export function fetchFrequent(db: Database, count: number, page: number) {
|
||||
const offset = (page - 1) * count;
|
||||
const query = db.query(`
|
||||
SELECT
|
||||
spelling,
|
||||
ipa,
|
||||
frequency,
|
||||
GROUP_CONCAT(c.name, ',') AS category,
|
||||
FROM expressions e
|
||||
JOIN word_categories wc ON wc.word_id = e.id
|
||||
JOIN categories c ON c.id = wc.category_id
|
||||
ORDER BY e.frequency DESC
|
||||
LIMIT $count
|
||||
OFFSET $offset
|
||||
`);
|
||||
return query.get({ count, offset });
|
||||
}
|
||||
export function fetchLessons(db: Database, count: number, page: number) {
|
||||
const p = page < 1 ? 1 : page;
|
||||
const offset = (p - 1) * count;
|
||||
const queryString = `
|
||||
SELECT
|
||||
l.id, l.text as ltext, cards.text as ctext, cards.note as cnote, cards.id as cid
|
||||
FROM cards_lessons cl
|
||||
JOIN lessons l ON l.id = cl.lesson_id
|
||||
JOIN cards ON cards.id = cl.card_id
|
||||
LIMIT $count
|
||||
OFFSET $offset
|
||||
`;
|
||||
const query = db.query(queryString);
|
||||
const res = query.all({ count, offset });
|
||||
return res;
|
||||
}
|
||||
|
||||
// SELECT l.id, l.text, cards.text, cards.note FROM cards_lessons cl LEFT JOIN lessons l ON l.id = cl.lesson_id LEFT JOIN cards ON cards.id = cl.card_id ORDER BY l.id ASC LIMIT 20 OFFSET 0;
|
||||
export function fetchLesson(db: Database, lesson: number) {
|
||||
const queryString = `
|
||||
SELECT
|
||||
l.id, l.text, cards.text, cards.note, cards.id as cid
|
||||
FROM cards_lessons cl
|
||||
JOIN lessons l ON l.id = cl.lesson_id
|
||||
JOIN cards ON cards.id = lc.card_id
|
||||
WHERE l.id = $lesson
|
||||
`;
|
||||
const query = db.query(queryString);
|
||||
return query.all({ lesson });
|
||||
}
|
||||
export function fetchCard(db: Database, cid: number, userid: number) {
|
||||
const query = db.query(`
|
||||
SELECT
|
||||
l.id, l.text, cards.text, cards.note
|
||||
FROM cards_lessons cl
|
||||
JOIN lessons l ON l.id = cl.lesson_id
|
||||
JOIN cards ON cards.id = lc.card_id
|
||||
JOIN attempts a ON a.card_id = cards.id AND a.user_id = $userid
|
||||
WHERE cards.id = $cid
|
||||
`);
|
||||
return query.all({ cid, userid });
|
||||
}
|
||||
|
||||
// write
|
||||
export function addLesson(db: Database, text: string) {
|
||||
const query = db.query(`
|
||||
INSERT
|
||||
INTO lessons(text)
|
||||
VALUES($text)
|
||||
`);
|
||||
const res = query.run({ text });
|
||||
return res.lastInsertRowid;
|
||||
}
|
||||
export function addCard(
|
||||
db: Database,
|
||||
lesson_id: number | bigint | null,
|
||||
text: string,
|
||||
mnote?: string,
|
||||
) {
|
||||
const note = mnote ? mnote : null;
|
||||
const query = db.query(`
|
||||
INSERT
|
||||
INTO cards(text, note)
|
||||
VALUES($text, $note)
|
||||
`);
|
||||
const params = { text, note };
|
||||
const res = query.run(params);
|
||||
const cid = res.lastInsertRowid;
|
||||
const wquery = db.query(`
|
||||
INSERT OR IGNORE
|
||||
INTO cards_expressions(card_id, expression_id)
|
||||
VALUES($cid, (
|
||||
SELECT id FROM expressions e
|
||||
WHERE e.spelling LIKE $spelling
|
||||
))
|
||||
`);
|
||||
const wtr = db.transaction((pairs) => {
|
||||
for (const pair of pairs) wquery.run(pair);
|
||||
});
|
||||
const words = text
|
||||
.replace(/[^\w\s]/g, '')
|
||||
.replace(/\s+/g, ' ')
|
||||
.trim()
|
||||
.split(' ');
|
||||
const combinations = wordFactorial(words);
|
||||
const richWords = combinations.map((spelling) => {
|
||||
return { spelling, cid };
|
||||
});
|
||||
wtr(richWords);
|
||||
if (lesson_id) {
|
||||
const query = db.query(`
|
||||
INSERT INTO cards_lessons(card_id, lesson_id)
|
||||
VALUES($cid, $lesson_id)
|
||||
`);
|
||||
query.run({ lesson_id, cid });
|
||||
}
|
||||
}
|
||||
|
||||
export function addUser(db: Database, name: string, creds: string) {
|
||||
const query = db.query(`
|
||||
INSERT
|
||||
INTO users(name, creds)
|
||||
VALUES($name, $creds)
|
||||
`);
|
||||
query.run({ $name: name, $creds: creds });
|
||||
}
|
||||
|
||||
export function addWord(
|
||||
db: Database,
|
||||
spelling: string,
|
||||
ipa: string,
|
||||
language: string,
|
||||
type: string,
|
||||
subtype: string | null,
|
||||
) {
|
||||
const queryString = `
|
||||
INSERT
|
||||
INTO expressions(spelling, ipa, type, subtype, frequency, language_id)
|
||||
VALUES($spelling, $ipa, $type, $subtype, 0, (
|
||||
SELECT id FROM languages
|
||||
WHERE name = $language
|
||||
))
|
||||
`;
|
||||
const query = db.prepare(queryString);
|
||||
const res = query.run({ spelling, ipa, language, type, subtype });
|
||||
return res.lastInsertRowid;
|
||||
}
|
||||
export function addCat(db: Database, wordId: number | bigint, domain: string) {
|
||||
const queryString = `
|
||||
INSERT
|
||||
INTO word_categories(word_id, category_id)
|
||||
VALUES($wordId, (
|
||||
SELECT id FROM categories
|
||||
WHERE name = $category
|
||||
))
|
||||
`;
|
||||
const category = domains[domain] || 'unknown';
|
||||
const query = db.query(queryString);
|
||||
const res = query.run({ wordId, category });
|
||||
return res.lastInsertRowid;
|
||||
}
|
||||
|
||||
const domains: Record<string, string> = {
|
||||
'adj.all': 'adjective',
|
||||
'adj.pert': 'adjective',
|
||||
'adj.ppl': 'adjective',
|
||||
'adv.all': 'adverb',
|
||||
'noun.Tops': '',
|
||||
'noun.act': 'abstract',
|
||||
'noun.animal': 'animate',
|
||||
'noun.artifact': 'inanimate',
|
||||
'noun.attribute': 'abstract',
|
||||
'noun.body': 'inanimate',
|
||||
'noun.cognition': 'abstract',
|
||||
'noun.communication': 'abstract',
|
||||
'noun.event': 'abstract',
|
||||
'noun.feeling': 'abstract',
|
||||
'noun.food': 'inanimate',
|
||||
'noun.group': 'noun',
|
||||
'noun.location': 'spatial',
|
||||
'noun.motive': 'abstract',
|
||||
'noun.object': 'inanimate',
|
||||
'noun.person': 'animate',
|
||||
'noun.phenomenon': 'abstract',
|
||||
'noun.plant': 'noun',
|
||||
'noun.possession': 'noun',
|
||||
'noun.process': 'noun',
|
||||
'noun.quantity': 'uncountable',
|
||||
'noun.relation': 'noun',
|
||||
'noun.shape': 'noun',
|
||||
'noun.state': 'noun',
|
||||
'noun.substance': 'uncountable',
|
||||
'noun.time': 'temporal',
|
||||
'verb.body': 'verb',
|
||||
'verb.change': 'verb',
|
||||
'verb.cognition': 'verb',
|
||||
'verb.communication': 'verb',
|
||||
'verb.competition': 'verb',
|
||||
'verb.consumption': 'verb',
|
||||
'verb.contact': 'verb',
|
||||
'verb.creation': 'verb',
|
||||
'verb.emotion': 'mental',
|
||||
'verb.motion': 'verb',
|
||||
'verb.perception': 'mental',
|
||||
'verb.possession': 'verb',
|
||||
'verb.social': 'verb',
|
||||
'verb.stative': 'verb',
|
||||
'verb.weather': 'verb',
|
||||
};
|
||||
export function addFrequency(
|
||||
db: Database,
|
||||
spelling: string,
|
||||
frequency: number,
|
||||
) {
|
||||
const queryString = `
|
||||
UPDATE expressions
|
||||
SET frequency = $frequency
|
||||
WHERE expressions.spelling = $spelling
|
||||
`;
|
||||
const query = db.query(queryString);
|
||||
const res = query.run({ spelling, frequency });
|
||||
console.log(res, 'added frequency');
|
||||
}
|
4
server/hanchu.d.ts
vendored
Normal file
4
server/hanchu.d.ts
vendored
Normal file
@ -0,0 +1,4 @@
|
||||
declare module 'wordnet' {
|
||||
const value: any;
|
||||
export default value;
|
||||
}
|
@ -7,5 +7,7 @@
|
||||
},
|
||||
"peerDependencies": {
|
||||
"typescript": "^5.0.0"
|
||||
},
|
||||
"dependencies": {
|
||||
}
|
||||
}
|
||||
|
143
server/seeding.ts
Normal file
143
server/seeding.ts
Normal file
@ -0,0 +1,143 @@
|
||||
import { Database } from 'bun:sqlite';
|
||||
import { addCard, addCat, addFrequency, addLesson, addWord } from './db';
|
||||
import Wordnet from 'en-wordnet';
|
||||
|
||||
// const db = new Database('../db/data.db');
|
||||
const db = new Database('../db/data.db', { strict: true });
|
||||
const wndb = new Database('../datasets/en-wordnet/data.sqlite');
|
||||
db.exec('PRAGMA journal_mode = WAL;');
|
||||
|
||||
const SYMBOL_REGEX = new RegExp(/[\W\d]/);
|
||||
|
||||
// async function englishIPA() {
|
||||
// const file = Bun.file('ipa/en-us/ipadict.txt');
|
||||
// const s = file.stream();
|
||||
// const reader = s.getReader();
|
||||
// const decoder = new TextDecoder();
|
||||
// let leftover = '';
|
||||
// while (true) {
|
||||
// const { value, done } = await reader.read();
|
||||
// if (done) break;
|
||||
// const chunk = decoder.decode(value, { stream: true });
|
||||
// const lines = (leftover + chunk).split('\n');
|
||||
|
||||
// // Process each line except the last (which might be incomplete)
|
||||
// for (const line of lines.slice(0, -1)) saveLine(line);
|
||||
|
||||
// // Save the last incomplete line to process in the next iteration
|
||||
// leftover = lines[lines.length - 1];
|
||||
// }
|
||||
|
||||
// // Handle any remaining content after reading all chunks
|
||||
// if (leftover) saveLine(leftover);
|
||||
// }
|
||||
|
||||
async function englishFreq() {
|
||||
const file = Bun.file('../datasets/unigram_freq.csv');
|
||||
const s = file.stream();
|
||||
const reader = s.getReader();
|
||||
const decoder = new TextDecoder();
|
||||
let leftover = '';
|
||||
let lineCount = 0;
|
||||
while (true) {
|
||||
const { value, done } = await reader.read();
|
||||
if (done) break;
|
||||
const chunk = decoder.decode(value, { stream: true });
|
||||
const lines = (leftover + chunk).split('\n');
|
||||
|
||||
// Process each line except the last (which might be incomplete)
|
||||
for (const line of lines.slice(0, -1)) {
|
||||
lineCount++;
|
||||
const [spelling, _frequency] = line.split(',');
|
||||
addFrequency(db, spelling, lineCount);
|
||||
}
|
||||
|
||||
// Save the last incomplete line to process in the next iteration
|
||||
leftover = lines[lines.length - 1];
|
||||
}
|
||||
|
||||
// Handle any remaining content after reading all chunks
|
||||
if (leftover) addFrequency(db, leftover, lineCount + 1);
|
||||
}
|
||||
// TODO no conjunctions or adpositions in Wordnet!!
|
||||
function englishIPA() {
|
||||
const queryString = `
|
||||
SELECT words.wordid, word, pronunciation, domainname FROM words
|
||||
JOIN lexes_pronunciations lp ON lp.wordid = words.wordid
|
||||
JOIN pronunciations pr ON pr.pronunciationid = lp.pronunciationid
|
||||
JOIN senses ON senses.wordid = words.wordid
|
||||
JOIN synsets ON synsets.synsetid = senses.synsetid
|
||||
JOIN domains ON domains.domainid = synsets.domainid
|
||||
GROUP BY words.wordid
|
||||
`;
|
||||
const query = wndb.query(queryString);
|
||||
const res: Array<{
|
||||
word: string;
|
||||
pronunciation: string;
|
||||
domainname: string;
|
||||
}> = query.all() as any;
|
||||
for (const r of res) {
|
||||
console.log('adding word', r);
|
||||
// if (r.word === 'abrasive') throw new Error('stop right here');
|
||||
const split = r.word.split(' ');
|
||||
const type = split.length > 1 ? 'expression' : 'word';
|
||||
const subtype = null;
|
||||
const wordid = addWord(db, r.word, r.pronunciation, 'en-us', type, subtype);
|
||||
addCat(db, wordid, r.domainname);
|
||||
}
|
||||
}
|
||||
// function saveLine(line: string) {
|
||||
// const [spelling, ipa] = line.split(/\s+/);
|
||||
// if (!spelling || !ipa) return;
|
||||
// const hasSymbols = spelling.match(SYMBOL_REGEX);
|
||||
// if (hasSymbols) return;
|
||||
// const isWord = checkWordNet(spelling);
|
||||
// console.log(spelling, isWord);
|
||||
// if (!isWord) return;
|
||||
// const split = spelling.split(' ');
|
||||
// const type = split.length > 1 ? 'expression' : 'word';
|
||||
// const subtype = null;
|
||||
// addWord(db, spelling, ipa, 'en-us', type, subtype);
|
||||
// }
|
||||
|
||||
// function checkWordNet(word: string) {
|
||||
// const query = wndb.query(`SELECT * FROM words WHERE word = $word`);
|
||||
// const res = query.get({ $word: word });
|
||||
// return !!res;
|
||||
// }
|
||||
|
||||
function englishCards() {
|
||||
const lesson_id = addLesson(db, 'First Lesson, some easy stuff');
|
||||
const texts = [
|
||||
'I',
|
||||
'friend',
|
||||
'my friend',
|
||||
'you',
|
||||
'your friend',
|
||||
"my friends' friend",
|
||||
'you are my friend',
|
||||
'I am your friend',
|
||||
'your friend is my friend',
|
||||
'my friend is your friend',
|
||||
'he is my friend',
|
||||
'this is mine',
|
||||
'this is yours',
|
||||
"this is my friends'",
|
||||
'no',
|
||||
'you are not my friend',
|
||||
'this is not yours',
|
||||
'your friend is not my friend',
|
||||
'that is mine',
|
||||
'this is mine, that is yours',
|
||||
'he is not your friend',
|
||||
'no, I am not',
|
||||
'that is not me',
|
||||
"that is not mine, that is my friends'",
|
||||
];
|
||||
for (const text of texts) {
|
||||
addCard(db, lesson_id, text);
|
||||
}
|
||||
}
|
||||
englishIPA();
|
||||
englishFreq();
|
||||
englishCards();
|
138
server/server.ts
138
server/server.ts
@ -1,35 +1,103 @@
|
||||
import { Database } from 'bun:sqlite';
|
||||
import {
|
||||
addUser,
|
||||
fetchCard,
|
||||
fetchLesson,
|
||||
fetchLessons,
|
||||
fetchResource,
|
||||
} from './db';
|
||||
|
||||
const db = new Database('../data/data.db');
|
||||
const db = new Database('../db/data.db', { strict: true });
|
||||
db.exec('PRAGMA journal_mode = WAL;');
|
||||
|
||||
Bun.serve({
|
||||
fetch(req) {
|
||||
console.log('req object', req);
|
||||
const url = new URL(req.url);
|
||||
if (url.pathname === '/') return new Response('Home page!');
|
||||
console.log(url.pathname, 'url');
|
||||
const user = parseUser(req);
|
||||
if (req.method === 'POST' && url.pathname === '/api')
|
||||
return handlePost(req, url);
|
||||
return handlePost(req, user, url);
|
||||
if (req.method === 'GET' && url.pathname.startsWith('/api'))
|
||||
return handleGet(req, url);
|
||||
return new Response('404!');
|
||||
return handleGet(req, user, url);
|
||||
return serveStatic(url);
|
||||
},
|
||||
});
|
||||
|
||||
async function handleGet(_req: Request, url: URL) {
|
||||
if (url.pathname === '/api/resource') return handleGetResource(url);
|
||||
else return new Response('huh');
|
||||
}
|
||||
function handleGetResource(url: URL) {
|
||||
const params = new URLSearchParams(url.search);
|
||||
console.log(params);
|
||||
const type = params.get('type');
|
||||
const resource = params.get('res');
|
||||
const data = fetchResource(type!, resource!);
|
||||
return Response.json({ ok: true, data });
|
||||
function parseUser(req: Request): number {
|
||||
// console.log(req, 'request');
|
||||
return 0;
|
||||
}
|
||||
|
||||
async function handlePost(req: Request, url: URL) {
|
||||
async function serveStatic(url: URL) {
|
||||
const filename = url.pathname === '/' ? '/index.html' : url.pathname;
|
||||
const headers = { 'Content-type': 'text/html' };
|
||||
const opts = { headers };
|
||||
try {
|
||||
const file = await Bun.file(`../ui/${filename}`).bytes();
|
||||
return new Response(file, opts);
|
||||
} catch (_) {
|
||||
return new Response('404!');
|
||||
}
|
||||
}
|
||||
|
||||
async function handleGet(_req: Request, user: number, url: URL) {
|
||||
if (url.pathname === '/api/resource') return handleGetExpresion(user, url);
|
||||
if (url.pathname === '/api/card') return handleGetCard(user, url);
|
||||
if (url.pathname === '/api/lesson') return handleGetLesson(user, url);
|
||||
if (url.pathname === '/api/lessons') return handleGetLessons(user, url);
|
||||
else return new Response('huh');
|
||||
}
|
||||
function handleGetExpresion(user: number, url: URL) {
|
||||
const params = new URLSearchParams(url.search);
|
||||
const expression = params.get('exp');
|
||||
const data = fetchResource(db, expression!);
|
||||
return Response.json({ ok: data });
|
||||
}
|
||||
type LessonsType = Record<
|
||||
number,
|
||||
{
|
||||
id: number;
|
||||
text: string;
|
||||
cards: Array<{ text: string; note: string | null; id: number }>;
|
||||
}
|
||||
>;
|
||||
type LessonsDBType = {
|
||||
id: number;
|
||||
ltext: string;
|
||||
ctext: string;
|
||||
cnote: string | null;
|
||||
cid: number;
|
||||
};
|
||||
function handleGetLesson(user: number, url: URL) {
|
||||
const params = new URLSearchParams(url.search);
|
||||
const lesson = params.get('lesson');
|
||||
const data = fetchLesson(db, Number(lesson!));
|
||||
return Response.json({ ok: data });
|
||||
}
|
||||
function handleGetCard(user: number, url: URL) {
|
||||
const params = new URLSearchParams(url.search);
|
||||
const card = params.get('card');
|
||||
const data = fetchCard(db, Number(card), user);
|
||||
return Response.json({ ok: data });
|
||||
}
|
||||
function handleGetLessons(user: number, url: URL) {
|
||||
const params = new URLSearchParams(url.search);
|
||||
const page = params.get('page') || '0';
|
||||
const data: LessonsDBType[] = fetchLessons(db, 20, Number(page)) as any;
|
||||
const lessons = data.reduce((acc: LessonsType, item: LessonsDBType) => {
|
||||
let cur = acc[item.id] || { id: item.id, text: item.ltext, cards: [] };
|
||||
const cards = [
|
||||
...cur.cards,
|
||||
{ text: item.ctext, note: item.cnote, id: item.cid },
|
||||
];
|
||||
const def = { ...cur, cards };
|
||||
return { ...acc, [item.id]: def };
|
||||
}, {} as LessonsType);
|
||||
console.log(lessons, 'lesons');
|
||||
return Response.json({ ok: lessons });
|
||||
}
|
||||
|
||||
async function handlePost(req: Request, user: number, url: URL) {
|
||||
const data = await req.json();
|
||||
if (url.pathname === '/api/user') return handlePostUser(data);
|
||||
else return new Response('huh');
|
||||
@ -37,38 +105,6 @@ async function handlePost(req: Request, url: URL) {
|
||||
// https://bun.sh/guides/http/server
|
||||
type PostUser = { name: string; creds: string };
|
||||
function handlePostUser(user: PostUser) {
|
||||
addUser(user.name, user.creds);
|
||||
addUser(db, user.name, user.creds);
|
||||
return new Response('ok');
|
||||
}
|
||||
|
||||
function addUser(name: string, creds: string) {
|
||||
const query = db.query(`
|
||||
INSERT
|
||||
INTO users(name, creds)
|
||||
VALUES($name, $creds)
|
||||
`);
|
||||
query.run({ $name: name, $creds: creds });
|
||||
}
|
||||
|
||||
function fetchResource(type: string, resource: string) {
|
||||
if (type === 'word') return fetchWord(resource);
|
||||
else return {};
|
||||
}
|
||||
function fetchWord(word: string) {
|
||||
const query = db.query(`
|
||||
SELECT
|
||||
spelling,
|
||||
ipa,
|
||||
languages.name AS language,
|
||||
GROUP_CONCAT(c.name, ',') AS category,
|
||||
ps.name AS pos
|
||||
FROM words
|
||||
JOIN languages ON languages.id = words.language_id
|
||||
JOIN word_categories wc ON words.id = wc.word_id
|
||||
JOIN categories c ON wc.category_id = c.id
|
||||
JOIN parts_of_speech ps ON ps.id = c.part_of_speech_id
|
||||
WHERE spelling = $spelling
|
||||
GROUP BY words.id
|
||||
`);
|
||||
return query.get({ $spelling: word });
|
||||
}
|
||||
|
11
server/utils.ts
Normal file
11
server/utils.ts
Normal file
@ -0,0 +1,11 @@
|
||||
export function wordFactorial(words: string[]): string[] {
|
||||
const combinations: string[] = [];
|
||||
for (let i = 0; i < words.length; i++) {
|
||||
let inner = '';
|
||||
for (let ii = i; ii < words.length; ii++) {
|
||||
inner += (ii > i ? ' ' : '') + words[ii];
|
||||
combinations.push(inner);
|
||||
}
|
||||
}
|
||||
return combinations;
|
||||
}
|
BIN
ui/elm-stuff/0.19.1/Api.elmi
Normal file
BIN
ui/elm-stuff/0.19.1/Api.elmi
Normal file
Binary file not shown.
BIN
ui/elm-stuff/0.19.1/Api.elmo
Normal file
BIN
ui/elm-stuff/0.19.1/Api.elmo
Normal file
Binary file not shown.
BIN
ui/elm-stuff/0.19.1/Main.elmi
Normal file
BIN
ui/elm-stuff/0.19.1/Main.elmi
Normal file
Binary file not shown.
BIN
ui/elm-stuff/0.19.1/Main.elmo
Normal file
BIN
ui/elm-stuff/0.19.1/Main.elmo
Normal file
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
@ -0,0 +1 @@
|
||||
{"direct":{"elm/browser":"1.0.2","elm/core":"1.0.5","elm/html":"1.0.0","elm/http":"2.0.0","elm/json":"1.1.3","elm/random":"1.0.0","elm/time":"1.0.0","mdgriffith/elm-ui":"1.1.8"},"indirect":{"elm/bytes":"1.0.8","elm/file":"1.0.5","elm/url":"1.0.0","elm/virtual-dom":"1.0.3"}}
|
@ -0,0 +1 @@
|
||||
{"direct":{"elm/browser":"1.0.2","elm/core":"1.0.5","elm/html":"1.0.0","elm/http":"2.0.0","elm/json":"1.1.3","elm/random":"1.0.0","elm/time":"1.0.0","mdgriffith/elm-ui":"1.1.8"},"indirect":{"elm/bytes":"1.0.8","elm/file":"1.0.5","elm/url":"1.0.0","elm/virtual-dom":"1.0.3"}}
|
@ -0,0 +1 @@
|
||||
{"direct":{"elm/browser":"1.0.2","elm/core":"1.0.5","elm/html":"1.0.0","elm/json":"1.1.3","elm/random":"1.0.0","elm/time":"1.0.0","mdgriffith/elm-ui":"1.1.8"},"indirect":{"elm/url":"1.0.0","elm/virtual-dom":"1.0.3"}}
|
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
@ -0,0 +1,31 @@
|
||||
{
|
||||
"type": "application",
|
||||
"source-directories": [
|
||||
"src",
|
||||
"../../../../../../../../../../../nix/store/7c4670wjl90sfwmvd34yz4wi9g2wccgx-elm-test-0.19.1-revision12/lib/node_modules/elm-test/elm/src",
|
||||
"../../../../../src"
|
||||
],
|
||||
"elm-version": "0.19.1",
|
||||
"dependencies": {
|
||||
"direct": {
|
||||
"elm/browser": "1.0.2",
|
||||
"elm/core": "1.0.5",
|
||||
"elm/html": "1.0.0",
|
||||
"elm/http": "2.0.0",
|
||||
"elm/json": "1.1.3",
|
||||
"elm/random": "1.0.0",
|
||||
"elm/time": "1.0.0",
|
||||
"mdgriffith/elm-ui": "1.1.8"
|
||||
},
|
||||
"indirect": {
|
||||
"elm/bytes": "1.0.8",
|
||||
"elm/file": "1.0.5",
|
||||
"elm/url": "1.0.0",
|
||||
"elm/virtual-dom": "1.0.3"
|
||||
}
|
||||
},
|
||||
"test-dependencies": {
|
||||
"direct": {},
|
||||
"indirect": {}
|
||||
}
|
||||
}
|
@ -9,10 +9,13 @@
|
||||
"elm/browser": "1.0.2",
|
||||
"elm/core": "1.0.5",
|
||||
"elm/html": "1.0.0",
|
||||
"elm/http": "2.0.0",
|
||||
"elm/json": "1.1.3",
|
||||
"mdgriffith/elm-ui": "1.1.8"
|
||||
},
|
||||
"indirect": {
|
||||
"elm/json": "1.1.3",
|
||||
"elm/bytes": "1.0.8",
|
||||
"elm/file": "1.0.5",
|
||||
"elm/time": "1.0.0",
|
||||
"elm/url": "1.0.0",
|
||||
"elm/virtual-dom": "1.0.3"
|
||||
|
@ -0,0 +1,24 @@
|
||||
<!DOCTYPE html>
|
||||
<html>
|
||||
<head>
|
||||
<meta charset="UTF-8">
|
||||
<title>Prosody</title>
|
||||
</head>
|
||||
<body>
|
||||
<div id="root"></div>
|
||||
<script src="elm.js"></script>
|
||||
<script>
|
||||
const app = Elm.Main.init({
|
||||
node: document.getElementById('root')
|
||||
});
|
||||
</script>
|
||||
</body>
|
||||
</html>
|
||||
SELECT
|
||||
l.id, l.text, cards.text, cards.note
|
||||
FROM cards_lessons cl
|
||||
JOIN lessons l ON l.id = cl.lesson_id
|
||||
JOIN cards ON cards.id = lc.card_id
|
||||
LIMIT 20
|
||||
OFFSET 0
|
||||
BY l.id ASC
|
113
ui/src/Api.elm
Normal file
113
ui/src/Api.elm
Normal file
@ -0,0 +1,113 @@
|
||||
module Api exposing (Card, Lesson, Lessons, ServerResponse(..), fetchLessons)
|
||||
|
||||
import Dict exposing (Dict)
|
||||
import Http
|
||||
import Json.Decode as Decode
|
||||
|
||||
|
||||
|
||||
-- data types
|
||||
|
||||
|
||||
type alias Card =
|
||||
{ text : String
|
||||
, note : Maybe String
|
||||
, id : Int
|
||||
}
|
||||
|
||||
|
||||
type alias Lesson =
|
||||
{ text : String
|
||||
, id : Int
|
||||
, cards : List Card
|
||||
}
|
||||
|
||||
|
||||
type alias Lessons =
|
||||
Dict Int Lesson
|
||||
|
||||
|
||||
type ServerResponse
|
||||
= OkResponse Lessons
|
||||
| ErrorResponse String
|
||||
|
||||
|
||||
|
||||
-- json decoders
|
||||
|
||||
|
||||
serverResponseDecoder : Decode.Decoder ServerResponse
|
||||
serverResponseDecoder =
|
||||
Decode.oneOf
|
||||
[ Decode.map OkResponse
|
||||
(Decode.field "ok" lessonsDecoder)
|
||||
, Decode.map ErrorResponse (Decode.field "error" Decode.string)
|
||||
]
|
||||
|
||||
|
||||
cardDecoder : Decode.Decoder Card
|
||||
cardDecoder =
|
||||
Decode.map3 Card
|
||||
(Decode.field "text" Decode.string)
|
||||
(Decode.field "note" (Decode.maybe Decode.string))
|
||||
(Decode.field "id" Decode.int)
|
||||
|
||||
|
||||
lessonDecoder : Decode.Decoder Lesson
|
||||
lessonDecoder =
|
||||
Decode.map3 Lesson
|
||||
(Decode.field "text" Decode.string)
|
||||
(Decode.field "id" Decode.int)
|
||||
(Decode.field "cards" (Decode.list cardDecoder))
|
||||
|
||||
|
||||
lessonsDecoder : Decode.Decoder Lessons
|
||||
lessonsDecoder =
|
||||
Decode.dict lessonDecoder
|
||||
|> Decode.andThen convertKeysToIntDict
|
||||
|
||||
|
||||
convertKeysToIntDict : Dict String Lesson -> Decode.Decoder Lessons
|
||||
convertKeysToIntDict stringKeyedDict =
|
||||
stringKeyedDict
|
||||
|> Dict.toList
|
||||
|> List.map (\( key, lesson ) -> ( String.toInt key, lesson ))
|
||||
|> List.foldl
|
||||
(\maybePair acc ->
|
||||
case maybePair of
|
||||
( Just intKey, lesson ) ->
|
||||
Dict.insert intKey lesson acc
|
||||
|
||||
( Nothing, _ ) ->
|
||||
acc
|
||||
)
|
||||
Dict.empty
|
||||
|> Decode.succeed
|
||||
|
||||
|
||||
|
||||
-- http command
|
||||
|
||||
|
||||
fetchLessons : (Result Http.Error ServerResponse -> msg) -> Cmd msg
|
||||
fetchLessons toMsg =
|
||||
Http.get
|
||||
{ url = "http://localhost:3000/api/lessons"
|
||||
, expect = Http.expectJson toMsg serverResponseDecoder
|
||||
}
|
||||
|
||||
|
||||
|
||||
-- FetchRawJsonHandler (Ok rawJson) ->
|
||||
-- Debug.log "Raw json response" rawJson
|
||||
-- |> (\_ ->
|
||||
-- case Decode.decodeString lessonsDecoder rawJson of
|
||||
-- Ok lessons ->
|
||||
-- ( { model | lessons = lessons, isLoading = False }, Cmd.none )
|
||||
-- Err decodeError ->
|
||||
-- Debug.log "Decode Error" decodeError
|
||||
-- |> (\_ -> ( { model | isLoading = False }, Cmd.none ))
|
||||
-- )
|
||||
-- FetchRawJsonHandler (Err httpError) ->
|
||||
-- Debug.log "HTTP Error" httpError
|
||||
-- |> (\_ -> ( { model | isLoading = False }, Cmd.none ))
|
194
ui/src/Main.elm
194
ui/src/Main.elm
@ -1,24 +1,190 @@
|
||||
module Main exposing (..)
|
||||
|
||||
import Api exposing (Card, Lesson, Lessons, ServerResponse(..), fetchLessons)
|
||||
import Browser
|
||||
import Html exposing (Html, button, div, text)
|
||||
import Html.Events exposing (onClick)
|
||||
import Dict exposing (Dict)
|
||||
import Element exposing (..)
|
||||
import Element.Background as Background
|
||||
import Element.Border as Border
|
||||
import Element.Events exposing (onClick)
|
||||
import Element.Font as Font
|
||||
import Html exposing (Html)
|
||||
import Http
|
||||
|
||||
main =
|
||||
Browser.sandbox { init = 0, update = update, view = view }
|
||||
|
||||
-- user actions
|
||||
type Msg = Increment | Decrement
|
||||
type Tab
|
||||
= Lessons
|
||||
| Words
|
||||
| Pronunciation
|
||||
|
||||
|
||||
type Msg
|
||||
= UserSelectedTab Tab
|
||||
| FetchDataHandler (Result Http.Error ServerResponse)
|
||||
|
||||
|
||||
|
||||
-- state
|
||||
|
||||
|
||||
type alias Model =
|
||||
{ lessons : Lessons
|
||||
, tab : Tab
|
||||
, isLoading : Bool
|
||||
}
|
||||
|
||||
|
||||
sampleLessons : Lessons
|
||||
sampleLessons =
|
||||
Dict.fromList []
|
||||
|
||||
|
||||
initialState : Model
|
||||
initialState =
|
||||
{ isLoading = False
|
||||
, lessons = sampleLessons
|
||||
, tab = Lessons
|
||||
}
|
||||
|
||||
|
||||
|
||||
-- update
|
||||
|
||||
|
||||
update : Msg -> Model -> ( Model, Cmd Msg )
|
||||
update msg model =
|
||||
case msg of
|
||||
Increment ->
|
||||
model + 1
|
||||
UserSelectedTab t ->
|
||||
( { model | tab = t }, Cmd.none )
|
||||
|
||||
Decrement ->
|
||||
model - 1
|
||||
FetchDataHandler (Ok serres) ->
|
||||
-- let
|
||||
-- _ =
|
||||
-- Debug.log "hi" serres
|
||||
-- in
|
||||
case serres of
|
||||
OkResponse lessons ->
|
||||
( { model | lessons = lessons, isLoading = False }, Cmd.none )
|
||||
|
||||
ErrorResponse _ ->
|
||||
( { model | isLoading = False }, Cmd.none )
|
||||
|
||||
FetchDataHandler (Err _) ->
|
||||
( { model | isLoading = False }, Cmd.none )
|
||||
|
||||
|
||||
view : Model -> Html Msg
|
||||
view model =
|
||||
div []
|
||||
[ button [ onClick Decrement ] [ text "-" ]
|
||||
, div [] [ text (String.fromInt model) ]
|
||||
, button [ onClick Increment ] [ text "+" ]
|
||||
if model.isLoading then
|
||||
layout [] (text "...")
|
||||
|
||||
else
|
||||
layout [ width fill, height fill ] <|
|
||||
column
|
||||
[ centerX ]
|
||||
[ row []
|
||||
[ tabEl Lessons model.tab
|
||||
, tabEl Words model.tab
|
||||
, tabEl Pronunciation model.tab
|
||||
]
|
||||
, if model.tab == Lessons then
|
||||
lessonsView model.lessons
|
||||
|
||||
else
|
||||
el [] (text "WIP")
|
||||
]
|
||||
|
||||
|
||||
tabEl : Tab -> Tab -> Element Msg
|
||||
tabEl tab selectedTab =
|
||||
let
|
||||
isSelected =
|
||||
tab == selectedTab
|
||||
|
||||
padOffset =
|
||||
if isSelected then
|
||||
0
|
||||
|
||||
else
|
||||
2
|
||||
|
||||
borderWidths =
|
||||
if isSelected then
|
||||
{ left = 2, top = 2, right = 2, bottom = 0 }
|
||||
|
||||
else
|
||||
{ bottom = 2, top = 0, left = 0, right = 0 }
|
||||
|
||||
corners =
|
||||
if isSelected then
|
||||
{ topLeft = 6, topRight = 6, bottomLeft = 0, bottomRight = 0 }
|
||||
|
||||
else
|
||||
{ topLeft = 0, topRight = 0, bottomLeft = 0, bottomRight = 0 }
|
||||
in
|
||||
el
|
||||
[ Border.widthEach borderWidths
|
||||
, Border.roundEach corners
|
||||
, Border.color color.blue
|
||||
, onClick <| UserSelectedTab tab
|
||||
]
|
||||
<|
|
||||
el
|
||||
[ centerX
|
||||
, centerY
|
||||
, paddingEach { left = 30, right = 30, top = 10 + padOffset, bottom = 10 - padOffset }
|
||||
]
|
||||
<|
|
||||
text <|
|
||||
case tab of
|
||||
Lessons ->
|
||||
"Lessons"
|
||||
|
||||
Words ->
|
||||
"Words"
|
||||
|
||||
Pronunciation ->
|
||||
"Audio"
|
||||
|
||||
|
||||
lessonsView : Lessons -> Element Msg
|
||||
lessonsView lessons =
|
||||
Dict.values lessons
|
||||
|> List.map lessonPreview
|
||||
|> column []
|
||||
|
||||
|
||||
lessonPreview : Lesson -> Element Msg
|
||||
lessonPreview lesson =
|
||||
el []
|
||||
(column []
|
||||
[ text ("Lesson: " ++ String.fromInt lesson.id)
|
||||
, text lesson.text
|
||||
]
|
||||
)
|
||||
|
||||
|
||||
init : flags -> ( Model, Cmd Msg )
|
||||
init flags =
|
||||
Debug.log "Init flags" flags
|
||||
|> (\_ -> Debug.log "Initial State" initialState)
|
||||
|> (\_ -> ( initialState, fetchLessons FetchDataHandler ))
|
||||
|
||||
|
||||
main : Program () Model Msg
|
||||
main =
|
||||
Browser.element
|
||||
{ init = init
|
||||
, view = view
|
||||
, update = update
|
||||
, subscriptions = \_ -> Sub.none
|
||||
}
|
||||
|
||||
|
||||
color =
|
||||
{ blue = rgb255 0x72 0x9F 0xCF
|
||||
, darkCharcoal = rgb255 0x2E 0x34 0x36
|
||||
, lightBlue = rgb255 0xC5 0xE8 0xF7
|
||||
, lightGrey = rgb255 0xE0 0xE0 0xE0
|
||||
, white = rgb255 0xFF 0xFF 0xFF
|
||||
}
|
||||
|
Loading…
Reference in New Issue
Block a user