Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Add Global Transforms #34

Merged
merged 2 commits into from
Oct 15, 2023
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
5 changes: 4 additions & 1 deletion .github/workflows/rust.yml
Original file line number Diff line number Diff line change
Expand Up @@ -49,4 +49,7 @@ jobs:
- name: Ingest
run: cargo run -- ingest -d examples/ingest_from_json/empty_language -o generate=word -o language="example" json examples/ingest_from_json/input.json
- name: Post-ingest render
run: cargo run -- render -d examples/ingest_from_json/empty_language line
run: cargo run -- render -d examples/ingest_from_json/empty_language line

- name: Globals
run: cargo run -- render -d examples/global_transforms line
20 changes: 20 additions & 0 deletions examples/global_transforms/etymology/ety.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,20 @@
{
"transforms": {
"of-from-latin": {
"transforms": [
{
"match_replace": {
"old": "l",
"new": "t"
}
},
{
"match_replace": {
"old": "a",
"new": "aa"
}
}
]
}
}
}
31 changes: 31 additions & 0 deletions examples/global_transforms/globals.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,31 @@
{
"transforms": [
{
"transforms": [
{
"match_replace": {
"old": "au",
"new": "ū"
}
}
],
"conditional": {
"etymon": {
"language": {
"match": {
"equals": "Old Exemplum"
}
}
},
"lexis": {
"language": {
"match": {
"equals": "New Exemplum"
}
}
}
}
}
]
}

23 changes: 23 additions & 0 deletions examples/global_transforms/phonetics/rules.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,23 @@
{
"groups": {
"C": [
"x",
"m",
"p",
"l"
],
"V": [
"e",
"a"
],
"S": [
"VC",
"CCV"
]
},
"lexis_types": {
"word": [
"SSS"
]
}
}
9 changes: 9 additions & 0 deletions examples/global_transforms/readme.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,9 @@
# Using Global Transforms

In addition to specifying transforms in word etymology, transforms can be specified at a global level, where they will be applied to all words that meet the given match statements.

Unlike an etymon-level transform, global transforms can take a match statement for the `lexis`, the word targeted by the transform, as well as `etymon`, which will match the first upstream etymon of the given lexis.

After looking at the example in `globals.json`, you can run the example file with

`kirum render -d ./ line`
39 changes: 39 additions & 0 deletions examples/global_transforms/tree/global_transforms.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,39 @@
{
"words": {
"latin_example": {
"type": "word",
"language": "New Exemplum",
"definition": "an instance, model, example",
"part_of_speech": "noun",
"etymology": {
"etymons": [
{
"etymon": "old_word"
}
]
},
"archaic": true,
"derivatives": [
{
"lexis": {
"language": "New Exemplum",
"definition": "model, example",
"part_of_speech": "noun",
"archaic": true
},
"transforms": [
"of-from-latin"
]
}
]
},
"old_word": {
"word": "aushal",
"type": "word",
"language": "Old Exemplum",
"definition": "To buy, remove",
"part_of_speech": "verb",
"archaic": true
}
}
}
1 change: 0 additions & 1 deletion kirum/src/entries.rs
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,6 @@ pub struct RawTransform{
pub conditional: Option<LexisMatch>
}


impl From<RawTransform> for Transform{
fn from(value: RawTransform) -> Self {
Transform { name: String::new(), lex_match: value.conditional, transforms: value.transforms}
Expand Down
32 changes: 26 additions & 6 deletions kirum/src/files.rs
Original file line number Diff line number Diff line change
@@ -1,16 +1,17 @@
use std::{path::{PathBuf, Path}, collections::HashMap, fs::File, io::Write};
use anyhow::{Result, Context, anyhow};
use libkirum::{kirum::{LanguageTree, Lexis}, transforms::{Transform, TransformFunc}, word::{Etymology, Edge}, lexcreate::LexPhonology};
use libkirum::{kirum::{LanguageTree, Lexis}, transforms::{Transform, TransformFunc, GlobalTransform}, word::{Etymology, Edge}, lexcreate::LexPhonology};
use serde::Serialize;
use walkdir::{WalkDir, DirEntry};
use crate::entries::{RawTransform, RawLexicalEntry, TransformGraph, WordGraph};
use crate::{entries::{RawTransform, RawLexicalEntry, TransformGraph, WordGraph}, global::Global};
use handlebars::Handlebars;

/// contains path data for everything needed for a project
pub struct Project {
pub graphs: Vec<PathBuf>,
pub transforms: Vec<PathBuf>,
pub phonetic_rules: Option<Vec<PathBuf>>
pub phonetic_rules: Option<Vec<PathBuf>>,
pub globals: Option<PathBuf>
}

/// renders any templating code that was written into word definitions
Expand Down Expand Up @@ -49,14 +50,25 @@ pub fn read_from_files(proj: Project) -> Result<LanguageTree>{
tree.word_creator_phonology = create_phonetics(phonetic_files)?;
}



for (lex_name, node) in &language_map{
debug!("creating node entry {}", lex_name);
let node_lex: Lexis = Lexis { id: lex_name.to_string(), ..node.clone().into() };
add_single_word(&mut tree, &transform_map, &language_map, &node_lex, &node.etymology)?;
}

if let Some(globals) = proj.globals {
let raw = std::fs::read_to_string(globals)?;
let global_trans: Global = serde_json::from_str(&raw)?;
if let Some(raw_trans) = global_trans.transforms {
let mut final_trans: Vec<GlobalTransform> = Vec::new();
for trans in raw_trans {
final_trans.push(trans.into())
}
tree.global_transforms = Some(final_trans);
}

}

Ok(tree)
}

Expand Down Expand Up @@ -164,6 +176,7 @@ pub fn handle_directory(path: &str) -> Result<Project> {
let lang_graph_dir = lang_dir.join("tree");
let lang_transform_dir = lang_dir.join("etymology");
let phonetics_path = lang_dir.join("phonetics");
let globals_file = lang_dir.join("globals.json");

debug!("using tree path: {}", lang_graph_dir.display());
let graphs: Vec<PathBuf> = read_subdir_create_list(lang_graph_dir)?;
Expand All @@ -178,11 +191,18 @@ pub fn handle_directory(path: &str) -> Result<Project> {
} else {
None
};

let global_trans: Option<PathBuf> = if globals_file.exists() {
Some(globals_file)
} else {
None
};


Ok(Project { graphs,
transforms,
phonetic_rules})
phonetic_rules,
globals: global_trans})
}

fn read_subdir_create_list(path: PathBuf) -> Result<Vec<PathBuf>>{
Expand Down
38 changes: 38 additions & 0 deletions kirum/src/global.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,38 @@
use libkirum::{transforms::{TransformFunc, GlobalTransform}, matching::LexisMatch};
use serde::{Serialize, Deserialize};
use serde_with::skip_serializing_none;


#[skip_serializing_none]
#[derive(Serialize, Deserialize, Clone, Debug, Default)]
/// Defines the contents of the global.json file
pub struct Global {
/// Specifies global transforms
pub transforms: Option<Vec<RawGlobalTransform>>
}


#[skip_serializing_none]
#[derive(Serialize, Deserialize, Clone, Debug, Default)]
pub struct RawGlobalTransform {
pub transforms: Vec<TransformFunc>,
pub conditional: GlobalConditionals
}

#[skip_serializing_none]
#[derive(Serialize, Deserialize, Clone, Debug, Default)]
pub struct GlobalConditionals {
pub etymon: Option<LexisMatch>,
pub lexis: LexisMatch
}


impl From<RawGlobalTransform> for GlobalTransform {
fn from(value: RawGlobalTransform) -> Self {
GlobalTransform {
lex_match: value.conditional.lexis,
etymon_match: value.conditional.etymon,
transforms: value.transforms
}
}
}
1 change: 1 addition & 0 deletions kirum/src/main.rs
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,7 @@ mod new;
mod generate;
mod ingest;
mod import;
mod global;

use clap::Parser;
use entries::create_json_graph;
Expand Down
10 changes: 7 additions & 3 deletions kirum/src/new.rs
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
use std::{path::PathBuf, io::Write, collections::HashMap, fs::{self, File}};
use libkirum::{transforms::TransformFunc, word::{Etymology, Edge}, lexcreate::LexPhonology};
use crate::entries::{RawTransform, TransformGraph, RawLexicalEntry, Derivative, WordGraph};
use crate::{entries::{RawTransform, TransformGraph, RawLexicalEntry, Derivative, WordGraph}, global::Global};
use anyhow::{Result, Context, anyhow};

pub fn create_project_directory(name: &str) -> Result<()>{
Expand Down Expand Up @@ -104,7 +104,11 @@ pub fn create_new_project(name: &str) -> Result<()> {
write_json("ety", &mut ety_path, trans_data).context("error writing ety file")?;
write_json("rules", &mut phonetic_path, phonetic_data).context("error writing rules file")?;


let base_globals = Global{transforms: None};
let globals_data = serde_json::to_string_pretty(&base_globals)?;
let mut globals_file = File::create(base.join("globals.json")).context("could not create globals file")?;
write!(globals_file, "{}", globals_data).context("error writing globals file")?;

Ok(())
}

Expand All @@ -115,7 +119,7 @@ fn write_json(subpath: &str, base_path: &mut PathBuf, data: String) -> Result<()
.context(format!("could not create json file {} {}", subpath, base_path.display()))?;

write!(phonetics_file, "{}", data)
.context("error writing phonetics file".to_string())?;
.context("error writing phonetics file")?;

Ok(())
}
Expand Down
Loading