initial commit
This commit is contained in:
commit
abac5189a3
8
.gitignore
vendored
Normal file
8
.gitignore
vendored
Normal file
|
@ -0,0 +1,8 @@
|
|||
.#*
|
||||
/build
|
||||
/target
|
||||
result
|
||||
result-*
|
||||
zig-*
|
||||
perf.data*
|
||||
flamegraph.svg
|
206
Cargo.lock
generated
Normal file
206
Cargo.lock
generated
Normal file
|
@ -0,0 +1,206 @@
|
|||
# This file is automatically @generated by Cargo.
|
||||
# It is not intended for manual editing.
|
||||
version = 3
|
||||
|
||||
[[package]]
|
||||
name = "bitflags"
|
||||
version = "2.4.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "b4682ae6287fcf752ecaabbfcc7b6f9b72aa33933dc23a554d853aea8eea8635"
|
||||
|
||||
[[package]]
|
||||
name = "delegate-attr"
|
||||
version = "0.2.9"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "ee7e7ea0dba407429d816e8e38dda1a467cd74737722f2ccc8eae60429a1a3ab"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"syn 1.0.109",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "libc"
|
||||
version = "0.2.148"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "9cdc71e17332e86d2e1d38c1f99edcb6288ee11b815fb1a4b049eaa2114d369b"
|
||||
|
||||
[[package]]
|
||||
name = "memmap2"
|
||||
version = "0.7.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "f49388d20533534cd19360ad3d6a7dadc885944aa802ba3995040c5ec11288c6"
|
||||
dependencies = [
|
||||
"libc",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "miette"
|
||||
version = "5.10.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "59bb584eaeeab6bd0226ccf3509a69d7936d148cf3d036ad350abe35e8c6856e"
|
||||
dependencies = [
|
||||
"miette-derive",
|
||||
"once_cell",
|
||||
"thiserror",
|
||||
"unicode-width",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "miette-derive"
|
||||
version = "5.10.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "49e7bc1560b95a3c4a25d03de42fe76ca718ab92d1a22a55b9b4cf67b3ae635c"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"syn 2.0.37",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "once_cell"
|
||||
version = "1.18.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "dd8b5dd2ae5ed71462c540258bedcb51965123ad7e7ccf4b9a8cafaa4a63576d"
|
||||
|
||||
[[package]]
|
||||
name = "proc-macro2"
|
||||
version = "1.0.67"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "3d433d9f1a3e8c1263d9456598b16fec66f4acc9a74dacffd35c7bb09b3a1328"
|
||||
dependencies = [
|
||||
"unicode-ident",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "quote"
|
||||
version = "1.0.33"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "5267fca4496028628a95160fc423a33e8b2e6af8a5302579e322e4b520293cae"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "readfilez"
|
||||
version = "0.3.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "94b9901cccc46094c4fd92a4a1185c3756278f4f16fe22cc50a115f4478e49a5"
|
||||
dependencies = [
|
||||
"delegate-attr",
|
||||
"memmap2",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "syn"
|
||||
version = "1.0.109"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "72b64191b275b66ffe2469e8af2c1cfe3bafa67b529ead792a6d0160888b4237"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"unicode-ident",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "syn"
|
||||
version = "2.0.37"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "7303ef2c05cd654186cb250d29049a24840ca25d2747c25c0381c8d9e2f582e8"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"unicode-ident",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "thiserror"
|
||||
version = "1.0.48"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "9d6d7a740b8a666a7e828dd00da9c0dc290dff53154ea77ac109281de90589b7"
|
||||
dependencies = [
|
||||
"thiserror-impl",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "thiserror-impl"
|
||||
version = "1.0.48"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "49922ecae66cc8a249b77e68d1d0623c1b2c514f0060c27cdc68bd62a1219d35"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"syn 2.0.37",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "tinyvec"
|
||||
version = "1.6.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "87cc5ceb3875bb20c2890005a4e226a4651264a5c75edb2421b52861a0a0cb50"
|
||||
dependencies = [
|
||||
"tinyvec_macros",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "tinyvec_macros"
|
||||
version = "0.1.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "1f3ccbac311fea05f86f61904b462b55fb3df8837a366dfc601a0161d0532f20"
|
||||
|
||||
[[package]]
|
||||
name = "unicode-ident"
|
||||
version = "1.0.12"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "3354b9ac3fae1ff6755cb6db53683adb661634f67557942dea4facebec0fee4b"
|
||||
|
||||
[[package]]
|
||||
name = "unicode-normalization"
|
||||
version = "0.1.22"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "5c5713f0fc4b5db668a2ac63cdb7bb4469d8c9fed047b1d0292cc7b0ce2ba921"
|
||||
dependencies = [
|
||||
"tinyvec",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "unicode-width"
|
||||
version = "0.1.11"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "e51733f11c9c4f72aa0c160008246859e340b00807569a0da0e7a1079b27ba85"
|
||||
|
||||
[[package]]
|
||||
name = "yn-functor"
|
||||
version = "0.2.0"
|
||||
dependencies = [
|
||||
"yn-functor-derive",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "yn-functor-derive"
|
||||
version = "0.1.0"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"syn 1.0.109",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "yn-qgy4hbz-core"
|
||||
version = "0.1.0"
|
||||
dependencies = [
|
||||
"bitflags",
|
||||
"miette",
|
||||
"readfilez",
|
||||
"thiserror",
|
||||
"unicode-ident",
|
||||
"unicode-normalization",
|
||||
"yn-functor",
|
||||
"yz-string-utils",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "yz-string-utils"
|
||||
version = "0.3.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "c5df07aec85c71914730e9bd51ad5be56054e1bea77999b523b74f585d641862"
|
8
Cargo.toml
Normal file
8
Cargo.toml
Normal file
|
@ -0,0 +1,8 @@
|
|||
[workspace]
|
||||
members = ["crates/*"]
|
||||
resolver = "2"
|
||||
|
||||
[profile.release]
|
||||
codegen-units = 3
|
||||
debug = 1
|
||||
lto = "thin"
|
201
LICENSES/Apache-2.0
Normal file
201
LICENSES/Apache-2.0
Normal file
|
@ -0,0 +1,201 @@
|
|||
Apache License
|
||||
Version 2.0, January 2004
|
||||
http://www.apache.org/licenses/
|
||||
|
||||
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
|
||||
|
||||
1. Definitions.
|
||||
|
||||
"License" shall mean the terms and conditions for use, reproduction,
|
||||
and distribution as defined by Sections 1 through 9 of this document.
|
||||
|
||||
"Licensor" shall mean the copyright owner or entity authorized by
|
||||
the copyright owner that is granting the License.
|
||||
|
||||
"Legal Entity" shall mean the union of the acting entity and all
|
||||
other entities that control, are controlled by, or are under common
|
||||
control with that entity. For the purposes of this definition,
|
||||
"control" means (i) the power, direct or indirect, to cause the
|
||||
direction or management of such entity, whether by contract or
|
||||
otherwise, or (ii) ownership of fifty percent (50%) or more of the
|
||||
outstanding shares, or (iii) beneficial ownership of such entity.
|
||||
|
||||
"You" (or "Your") shall mean an individual or Legal Entity
|
||||
exercising permissions granted by this License.
|
||||
|
||||
"Source" form shall mean the preferred form for making modifications,
|
||||
including but not limited to software source code, documentation
|
||||
source, and configuration files.
|
||||
|
||||
"Object" form shall mean any form resulting from mechanical
|
||||
transformation or translation of a Source form, including but
|
||||
not limited to compiled object code, generated documentation,
|
||||
and conversions to other media types.
|
||||
|
||||
"Work" shall mean the work of authorship, whether in Source or
|
||||
Object form, made available under the License, as indicated by a
|
||||
copyright notice that is included in or attached to the work
|
||||
(an example is provided in the Appendix below).
|
||||
|
||||
"Derivative Works" shall mean any work, whether in Source or Object
|
||||
form, that is based on (or derived from) the Work and for which the
|
||||
editorial revisions, annotations, elaborations, or other modifications
|
||||
represent, as a whole, an original work of authorship. For the purposes
|
||||
of this License, Derivative Works shall not include works that remain
|
||||
separable from, or merely link (or bind by name) to the interfaces of,
|
||||
the Work and Derivative Works thereof.
|
||||
|
||||
"Contribution" shall mean any work of authorship, including
|
||||
the original version of the Work and any modifications or additions
|
||||
to that Work or Derivative Works thereof, that is intentionally
|
||||
submitted to Licensor for inclusion in the Work by the copyright owner
|
||||
or by an individual or Legal Entity authorized to submit on behalf of
|
||||
the copyright owner. For the purposes of this definition, "submitted"
|
||||
means any form of electronic, verbal, or written communication sent
|
||||
to the Licensor or its representatives, including but not limited to
|
||||
communication on electronic mailing lists, source code control systems,
|
||||
and issue tracking systems that are managed by, or on behalf of, the
|
||||
Licensor for the purpose of discussing and improving the Work, but
|
||||
excluding communication that is conspicuously marked or otherwise
|
||||
designated in writing by the copyright owner as "Not a Contribution."
|
||||
|
||||
"Contributor" shall mean Licensor and any individual or Legal Entity
|
||||
on behalf of whom a Contribution has been received by Licensor and
|
||||
subsequently incorporated within the Work.
|
||||
|
||||
2. Grant of Copyright License. Subject to the terms and conditions of
|
||||
this License, each Contributor hereby grants to You a perpetual,
|
||||
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||
copyright license to reproduce, prepare Derivative Works of,
|
||||
publicly display, publicly perform, sublicense, and distribute the
|
||||
Work and such Derivative Works in Source or Object form.
|
||||
|
||||
3. Grant of Patent License. Subject to the terms and conditions of
|
||||
this License, each Contributor hereby grants to You a perpetual,
|
||||
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||
(except as stated in this section) patent license to make, have made,
|
||||
use, offer to sell, sell, import, and otherwise transfer the Work,
|
||||
where such license applies only to those patent claims licensable
|
||||
by such Contributor that are necessarily infringed by their
|
||||
Contribution(s) alone or by combination of their Contribution(s)
|
||||
with the Work to which such Contribution(s) was submitted. If You
|
||||
institute patent litigation against any entity (including a
|
||||
cross-claim or counterclaim in a lawsuit) alleging that the Work
|
||||
or a Contribution incorporated within the Work constitutes direct
|
||||
or contributory patent infringement, then any patent licenses
|
||||
granted to You under this License for that Work shall terminate
|
||||
as of the date such litigation is filed.
|
||||
|
||||
4. Redistribution. You may reproduce and distribute copies of the
|
||||
Work or Derivative Works thereof in any medium, with or without
|
||||
modifications, and in Source or Object form, provided that You
|
||||
meet the following conditions:
|
||||
|
||||
(a) You must give any other recipients of the Work or
|
||||
Derivative Works a copy of this License; and
|
||||
|
||||
(b) You must cause any modified files to carry prominent notices
|
||||
stating that You changed the files; and
|
||||
|
||||
(c) You must retain, in the Source form of any Derivative Works
|
||||
that You distribute, all copyright, patent, trademark, and
|
||||
attribution notices from the Source form of the Work,
|
||||
excluding those notices that do not pertain to any part of
|
||||
the Derivative Works; and
|
||||
|
||||
(d) If the Work includes a "NOTICE" text file as part of its
|
||||
distribution, then any Derivative Works that You distribute must
|
||||
include a readable copy of the attribution notices contained
|
||||
within such NOTICE file, excluding those notices that do not
|
||||
pertain to any part of the Derivative Works, in at least one
|
||||
of the following places: within a NOTICE text file distributed
|
||||
as part of the Derivative Works; within the Source form or
|
||||
documentation, if provided along with the Derivative Works; or,
|
||||
within a display generated by the Derivative Works, if and
|
||||
wherever such third-party notices normally appear. The contents
|
||||
of the NOTICE file are for informational purposes only and
|
||||
do not modify the License. You may add Your own attribution
|
||||
notices within Derivative Works that You distribute, alongside
|
||||
or as an addendum to the NOTICE text from the Work, provided
|
||||
that such additional attribution notices cannot be construed
|
||||
as modifying the License.
|
||||
|
||||
You may add Your own copyright statement to Your modifications and
|
||||
may provide additional or different license terms and conditions
|
||||
for use, reproduction, or distribution of Your modifications, or
|
||||
for any such Derivative Works as a whole, provided Your use,
|
||||
reproduction, and distribution of the Work otherwise complies with
|
||||
the conditions stated in this License.
|
||||
|
||||
5. Submission of Contributions. Unless You explicitly state otherwise,
|
||||
any Contribution intentionally submitted for inclusion in the Work
|
||||
by You to the Licensor shall be under the terms and conditions of
|
||||
this License, without any additional terms or conditions.
|
||||
Notwithstanding the above, nothing herein shall supersede or modify
|
||||
the terms of any separate license agreement you may have executed
|
||||
with Licensor regarding such Contributions.
|
||||
|
||||
6. Trademarks. This License does not grant permission to use the trade
|
||||
names, trademarks, service marks, or product names of the Licensor,
|
||||
except as required for reasonable and customary use in describing the
|
||||
origin of the Work and reproducing the content of the NOTICE file.
|
||||
|
||||
7. Disclaimer of Warranty. Unless required by applicable law or
|
||||
agreed to in writing, Licensor provides the Work (and each
|
||||
Contributor provides its Contributions) on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
||||
implied, including, without limitation, any warranties or conditions
|
||||
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
|
||||
PARTICULAR PURPOSE. You are solely responsible for determining the
|
||||
appropriateness of using or redistributing the Work and assume any
|
||||
risks associated with Your exercise of permissions under this License.
|
||||
|
||||
8. Limitation of Liability. In no event and under no legal theory,
|
||||
whether in tort (including negligence), contract, or otherwise,
|
||||
unless required by applicable law (such as deliberate and grossly
|
||||
negligent acts) or agreed to in writing, shall any Contributor be
|
||||
liable to You for damages, including any direct, indirect, special,
|
||||
incidental, or consequential damages of any character arising as a
|
||||
result of this License or out of the use or inability to use the
|
||||
Work (including but not limited to damages for loss of goodwill,
|
||||
work stoppage, computer failure or malfunction, or any and all
|
||||
other commercial damages or losses), even if such Contributor
|
||||
has been advised of the possibility of such damages.
|
||||
|
||||
9. Accepting Warranty or Additional Liability. While redistributing
|
||||
the Work or Derivative Works thereof, You may choose to offer,
|
||||
and charge a fee for, acceptance of support, warranty, indemnity,
|
||||
or other liability obligations and/or rights consistent with this
|
||||
License. However, in accepting such obligations, You may act only
|
||||
on Your own behalf and on Your sole responsibility, not on behalf
|
||||
of any other Contributor, and only if You agree to indemnify,
|
||||
defend, and hold each Contributor harmless for any liability
|
||||
incurred by, or claims asserted against, such Contributor by reason
|
||||
of your accepting any such warranty or additional liability.
|
||||
|
||||
END OF TERMS AND CONDITIONS
|
||||
|
||||
APPENDIX: How to apply the Apache License to your work.
|
||||
|
||||
To apply the Apache License to your work, attach the following
|
||||
boilerplate notice, with the fields enclosed by brackets "[]"
|
||||
replaced with your own identifying information. (Don't include
|
||||
the brackets!) The text should be enclosed in the appropriate
|
||||
comment syntax for the file format. We also recommend that a
|
||||
file or class name and description of purpose be included on the
|
||||
same "printed page" as the copyright notice for easier
|
||||
identification within third-party archives.
|
||||
|
||||
Copyright [yyyy] [name of copyright owner]
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
5
LICENSES/HEADER
Normal file
5
LICENSES/HEADER
Normal file
|
@ -0,0 +1,5 @@
|
|||
/*
|
||||
* SPDX-FileCopyrightText: 2023 Alain Zscheile <fogti@ytrizja.de>
|
||||
*
|
||||
* SPDX-License-Identifier: Apache-2.0
|
||||
*/
|
15
crates/yn-functor-derive/Cargo.toml
Normal file
15
crates/yn-functor-derive/Cargo.toml
Normal file
|
@ -0,0 +1,15 @@
|
|||
[package]
|
||||
name = "yn-functor-derive"
|
||||
version = "0.1.0"
|
||||
authors = ["Bodil Stokke <bodil@bodil.org>", "Alain Zscheile <fogti+devel@ytrizja.de>"]
|
||||
license = "MPL-2.0+"
|
||||
edition = "2021"
|
||||
rust-version = "1.65"
|
||||
|
||||
[lib]
|
||||
proc_macro = true
|
||||
|
||||
[dependencies]
|
||||
syn = { version = "1.0.107", features = ["derive", "extra-traits"] }
|
||||
quote = "1.0.23"
|
||||
proc-macro2 = "1.0.50"
|
338
crates/yn-functor-derive/src/lib.rs
Normal file
338
crates/yn-functor-derive/src/lib.rs
Normal file
|
@ -0,0 +1,338 @@
|
|||
#![recursion_limit = "256"]
|
||||
|
||||
extern crate proc_macro;
|
||||
|
||||
use std::collections::HashMap;
|
||||
|
||||
use proc_macro2::{Span, TokenStream};
|
||||
use quote::{quote, quote_spanned};
|
||||
use syn::{
|
||||
parse_macro_input, punctuated::Punctuated, spanned::Spanned, token::Comma, Data, DataEnum,
|
||||
DeriveInput, Field, Fields, FieldsNamed, FieldsUnnamed, GenericParam, Ident, Index, Type,
|
||||
TypeParam,
|
||||
};
|
||||
|
||||
fn type_params_replace(
|
||||
input_params: &Punctuated<GenericParam, Comma>,
|
||||
replace: &TypeParam,
|
||||
with: Ident,
|
||||
) -> Punctuated<GenericParam, Comma> {
|
||||
let mut output = input_params.clone();
|
||||
for param in output.iter_mut() {
|
||||
match param {
|
||||
GenericParam::Type(ref mut type_param) if type_param == replace => {
|
||||
type_param.ident = with;
|
||||
break;
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
output
|
||||
}
|
||||
|
||||
fn report_error(span: Span, msg: &str) -> proc_macro::TokenStream {
|
||||
(quote_spanned! {span => compile_error! {#msg}}).into()
|
||||
}
|
||||
|
||||
fn decide_functor_generic_type(input: &DeriveInput) -> Result<&TypeParam, proc_macro::TokenStream> {
|
||||
let mut generics_iter = input.generics.type_params();
|
||||
let generic_type = match generics_iter.next() {
|
||||
Some(t) => t,
|
||||
None => {
|
||||
return Err(report_error(
|
||||
input.ident.span(),
|
||||
"can't derive Functor for a type without type parameters",
|
||||
));
|
||||
}
|
||||
};
|
||||
|
||||
if let Some(next_type_param) = generics_iter.next() {
|
||||
return Err(report_error(
|
||||
next_type_param.span(),
|
||||
"can't derive Functor for a type with multiple type parameters; did you mean Bifunctor?",
|
||||
));
|
||||
}
|
||||
|
||||
Ok(generic_type)
|
||||
}
|
||||
|
||||
#[proc_macro_derive(Functor)]
|
||||
pub fn derive_functor(input: proc_macro::TokenStream) -> proc_macro::TokenStream {
|
||||
let input = parse_macro_input!(input as DeriveInput);
|
||||
let name = &input.ident;
|
||||
let type_params = &input.generics.params;
|
||||
let where_clause = input.generics.where_clause.as_ref().map(|c| &c.predicates);
|
||||
|
||||
let generic_type = match decide_functor_generic_type(&input) {
|
||||
Ok(t) => t,
|
||||
Err(err) => return err,
|
||||
};
|
||||
|
||||
let type_map = HashMap::from([(
|
||||
generic_type.ident.clone(),
|
||||
Ident::new("f", Span::call_site()),
|
||||
)]);
|
||||
|
||||
let fmap_impl = match &input.data {
|
||||
Data::Struct(data) => match &data.fields {
|
||||
Fields::Named(fields) => derive_functor_named_struct(name, fields, &type_map, false),
|
||||
Fields::Unnamed(fields) => {
|
||||
derive_functor_unnamed_struct(name, fields, &type_map, false)
|
||||
}
|
||||
Fields::Unit => {
|
||||
return report_error(
|
||||
input.ident.span(),
|
||||
"can't derive Functor for an empty struct",
|
||||
);
|
||||
}
|
||||
},
|
||||
Data::Enum(data) => derive_functor_enum(name, data, &type_map, false),
|
||||
Data::Union(_) => {
|
||||
return report_error(input.ident.span(), "can't derive Functor for a union type");
|
||||
}
|
||||
};
|
||||
|
||||
let type_params_with_t = type_params_replace(
|
||||
type_params,
|
||||
generic_type,
|
||||
Ident::new("DerivedTargetType", Span::call_site()),
|
||||
);
|
||||
|
||||
quote!(
|
||||
impl<'derivedlifetime, #type_params> ::yn_functor::Functor<'derivedlifetime, #generic_type> for #name<#type_params>
|
||||
where #generic_type: 'derivedlifetime, #where_clause {
|
||||
type Target<DerivedTargetType> = #name<#type_params_with_t> where DerivedTargetType: 'derivedlifetime;
|
||||
fn fmap<DerivedType, F>(self, f: F) -> Self::Target<DerivedType>
|
||||
where
|
||||
DerivedType: 'derivedlifetime,
|
||||
F: Fn(#generic_type) -> DerivedType + 'derivedlifetime
|
||||
{
|
||||
#fmap_impl
|
||||
}
|
||||
}
|
||||
)
|
||||
.into()
|
||||
}
|
||||
|
||||
#[proc_macro_derive(FunctorRef)]
|
||||
pub fn derive_functor_ref(input: proc_macro::TokenStream) -> proc_macro::TokenStream {
|
||||
let input = parse_macro_input!(input as DeriveInput);
|
||||
let name = &input.ident;
|
||||
let type_params = &input.generics.params;
|
||||
let where_clause = input.generics.where_clause.as_ref().map(|c| &c.predicates);
|
||||
|
||||
let generic_type = match decide_functor_generic_type(&input) {
|
||||
Ok(t) => t,
|
||||
Err(err) => return err,
|
||||
};
|
||||
|
||||
let type_map = HashMap::from([(
|
||||
generic_type.ident.clone(),
|
||||
Ident::new("f", Span::call_site()),
|
||||
)]);
|
||||
|
||||
let fmapref_impl = match &input.data {
|
||||
Data::Struct(data) => match &data.fields {
|
||||
Fields::Named(fields) => derive_functor_named_struct(name, fields, &type_map, true),
|
||||
Fields::Unnamed(fields) => derive_functor_unnamed_struct(name, fields, &type_map, true),
|
||||
Fields::Unit => {
|
||||
return report_error(
|
||||
input.ident.span(),
|
||||
"can't derive FunctorRef for an empty struct",
|
||||
);
|
||||
}
|
||||
},
|
||||
Data::Enum(data) => derive_functor_enum(name, data, &type_map, true),
|
||||
Data::Union(_) => {
|
||||
return report_error(
|
||||
input.ident.span(),
|
||||
"can't derive FunctorRef for a union type",
|
||||
);
|
||||
}
|
||||
};
|
||||
|
||||
quote!(
|
||||
impl<'derivedlifetime, #type_params> ::yn_functor::FunctorRef<'derivedlifetime, #generic_type> for #name<#type_params>
|
||||
where #generic_type: 'derivedlifetime, #where_clause {
|
||||
fn fmap_ref<DerivedType, F>(&self, f: F) -> Self::Target<DerivedType>
|
||||
where
|
||||
DerivedType: 'derivedlifetime,
|
||||
F: Fn(&#generic_type) -> DerivedType + 'derivedlifetime
|
||||
{
|
||||
#fmapref_impl
|
||||
}
|
||||
}
|
||||
)
|
||||
.into()
|
||||
}
|
||||
|
||||
fn match_type_param<'a>(params: &'a HashMap<Ident, Ident>, ty: &Type) -> Option<&'a Ident> {
|
||||
if let Type::Path(path) = ty {
|
||||
if let Some(segment) = path.path.segments.iter().next() {
|
||||
return params.get(&segment.ident);
|
||||
}
|
||||
}
|
||||
None
|
||||
}
|
||||
|
||||
fn filter_fields<P, F1, F2>(
|
||||
fields: &Punctuated<Field, P>,
|
||||
ty: &HashMap<Ident, Ident>,
|
||||
transform: F1,
|
||||
copy: F2,
|
||||
) -> Vec<TokenStream>
|
||||
where
|
||||
F1: Fn(&Ident, &Ident) -> TokenStream,
|
||||
F2: Fn(&Ident) -> TokenStream,
|
||||
{
|
||||
fields
|
||||
.iter()
|
||||
.map(|field| {
|
||||
if let Some(f) = match_type_param(ty, &field.ty) {
|
||||
transform(&field.ident.clone().unwrap(), f)
|
||||
} else {
|
||||
copy(&field.ident.clone().unwrap())
|
||||
}
|
||||
})
|
||||
.collect()
|
||||
}
|
||||
|
||||
fn derive_functor_named_struct(
|
||||
name: &Ident,
|
||||
fields: &FieldsNamed,
|
||||
generic_types: &HashMap<Ident, Ident>,
|
||||
as_ref: bool,
|
||||
) -> TokenStream {
|
||||
let apply_fields = filter_fields(
|
||||
&fields.named,
|
||||
generic_types,
|
||||
|field, function_name| {
|
||||
if as_ref {
|
||||
quote! {
|
||||
#field: #function_name(&self.#field),
|
||||
}
|
||||
} else {
|
||||
quote! {
|
||||
#field: #function_name(self.#field),
|
||||
}
|
||||
}
|
||||
},
|
||||
|field| {
|
||||
quote! {
|
||||
#field: self.#field,
|
||||
}
|
||||
},
|
||||
)
|
||||
.into_iter();
|
||||
quote! {
|
||||
#name {
|
||||
#(#apply_fields)*
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn derive_functor_unnamed_struct(
|
||||
name: &Ident,
|
||||
fields: &FieldsUnnamed,
|
||||
generic_types: &HashMap<Ident, Ident>,
|
||||
as_ref: bool,
|
||||
) -> TokenStream {
|
||||
let fields = fields.unnamed.iter().enumerate().map(|(index, field)| {
|
||||
let index = Index::from(index);
|
||||
if let Some(function_name) = match_type_param(generic_types, &field.ty) {
|
||||
if as_ref {
|
||||
quote! { #function_name(&self.#index), }
|
||||
} else {
|
||||
quote! { #function_name(self.#index), }
|
||||
}
|
||||
} else {
|
||||
quote! { self.#index, }
|
||||
}
|
||||
});
|
||||
quote! { #name(#(#fields)*) }
|
||||
}
|
||||
|
||||
fn derive_functor_enum(
|
||||
name: &Ident,
|
||||
data: &DataEnum,
|
||||
generic_types: &HashMap<Ident, Ident>,
|
||||
as_ref: bool,
|
||||
) -> TokenStream {
|
||||
let variants = data.variants.iter().map(|variant| {
|
||||
let ident = &variant.ident;
|
||||
match &variant.fields {
|
||||
Fields::Named(fields) => {
|
||||
let args: Vec<Ident> = fields
|
||||
.named
|
||||
.iter()
|
||||
.map(|field| {
|
||||
Ident::new(
|
||||
&format!("arg_{}", field.ident.clone().unwrap()),
|
||||
field.ident.clone().unwrap().span(),
|
||||
)
|
||||
})
|
||||
.collect();
|
||||
let apply =
|
||||
fields
|
||||
.named
|
||||
.iter()
|
||||
.zip(args.clone().into_iter())
|
||||
.map(|(field, arg)| {
|
||||
let name = &field.ident;
|
||||
if let Some(function_name) = match_type_param(generic_types, &field.ty)
|
||||
{
|
||||
if as_ref {
|
||||
quote! { #name: #function_name(&#arg) }
|
||||
} else {
|
||||
quote! { #name: #function_name(#arg) }
|
||||
}
|
||||
} else {
|
||||
quote! { #name: #arg }
|
||||
}
|
||||
});
|
||||
let args = fields
|
||||
.named
|
||||
.iter()
|
||||
.zip(args.into_iter())
|
||||
.map(|(field, arg)| {
|
||||
let name = &field.ident;
|
||||
quote! { #name:#arg }
|
||||
});
|
||||
quote! {
|
||||
#name::#ident { #(#args,)* } => #name::#ident { #(#apply,)* },
|
||||
}
|
||||
}
|
||||
Fields::Unnamed(fields) => {
|
||||
let args: Vec<Ident> = fields
|
||||
.unnamed
|
||||
.iter()
|
||||
.enumerate()
|
||||
.map(|(index, _)| Ident::new(&format!("arg{index}"), Span::call_site()))
|
||||
.collect();
|
||||
let fields = fields.unnamed.iter().zip(args.iter()).map(|(field, arg)| {
|
||||
if let Some(function_name) = match_type_param(generic_types, &field.ty) {
|
||||
if as_ref {
|
||||
quote! { #function_name(&#arg) }
|
||||
} else {
|
||||
quote! { #function_name(#arg) }
|
||||
}
|
||||
} else {
|
||||
quote! { #arg }
|
||||
}
|
||||
});
|
||||
let args = args.iter();
|
||||
quote! {
|
||||
#name::#ident(#(#args,)*) => #name::#ident(#(#fields,)*),
|
||||
}
|
||||
}
|
||||
Fields::Unit => quote! {
|
||||
#name::#ident => #name::#ident,
|
||||
},
|
||||
}
|
||||
});
|
||||
quote! {
|
||||
match self {
|
||||
#(#variants)*
|
||||
}
|
||||
}
|
||||
}
|
10
crates/yn-functor/Cargo.toml
Normal file
10
crates/yn-functor/Cargo.toml
Normal file
|
@ -0,0 +1,10 @@
|
|||
[package]
|
||||
name = "yn-functor"
|
||||
version = "0.2.0"
|
||||
edition = "2021"
|
||||
rust-version = "1.65"
|
||||
authors = ["Bodil Stokke <bodil@bodil.org>", "Alain Zscheile <fogti+devel@ytrizja.de>"]
|
||||
license = "MPL-2.0+"
|
||||
|
||||
[dependencies]
|
||||
yn-functor-derive = { path = "../yn-functor-derive" }
|
3
crates/yn-functor/README.md
Normal file
3
crates/yn-functor/README.md
Normal file
|
@ -0,0 +1,3 @@
|
|||
# yn-functor
|
||||
|
||||
This crate is basically `higher`, but only the functor parts (the rest pulled in too much junk)
|
272
crates/yn-functor/src/functor.rs
Normal file
272
crates/yn-functor/src/functor.rs
Normal file
|
@ -0,0 +1,272 @@
|
|||
use core::{cell::RefCell, mem::MaybeUninit};
|
||||
|
||||
use alloc::{
|
||||
boxed::Box,
|
||||
collections::{LinkedList, VecDeque},
|
||||
rc::Rc,
|
||||
vec::Vec,
|
||||
};
|
||||
|
||||
use crate::repeat;
|
||||
|
||||
/// A `Functor` lets you change the type parameter of a generic type.
|
||||
///
|
||||
/// A `Functor` defines a method `fmap` on a type `F<_>: Functor` which converts
|
||||
/// an `F<A>` to `F<B>` using a function `Fn(A) -> B` applied to the `A`s inside
|
||||
/// it.
|
||||
///
|
||||
/// You can also use this just to modify the values inside your container value
|
||||
/// without changing their type, if the mapping function returns a value of the
|
||||
/// same type. This is called an "endofunctor." In an ideal Rust, we would be
|
||||
/// able to implement this as a special case of [`fmap`](Functor::fmap)
|
||||
/// modifying the data in place, but in the Rust we have, beware that using
|
||||
/// [`fmap`](Functor::fmap) in this manner is considerably less efficient than
|
||||
/// using a mutable reference iterator.
|
||||
pub trait Functor<'a, A: 'a> {
|
||||
type Target<T: 'a>: Functor<'a, T, Target<A> = Self>;
|
||||
|
||||
/// Map a functor of `A` to a functor of `B` using a function from `A`
|
||||
/// to `B`.
|
||||
fn fmap<B: 'a, F: 'a>(self, f: F) -> Self::Target<B>
|
||||
where
|
||||
F: Fn(A) -> B;
|
||||
|
||||
/// Map the functor to the provided constant value.
|
||||
fn fconst<B>(self, b: B) -> Self::Target<B>
|
||||
where
|
||||
Self: Sized,
|
||||
B: Clone,
|
||||
{
|
||||
self.fmap(repeat(b))
|
||||
}
|
||||
|
||||
/// Map the functor to the unit value `()`.
|
||||
fn void(self) -> Self::Target<()>
|
||||
where
|
||||
Self: Sized,
|
||||
{
|
||||
self.fconst(())
|
||||
}
|
||||
|
||||
/// Turn the functor into an iterator.
|
||||
///
|
||||
/// ```
|
||||
/// # use yn_functor::Functor;
|
||||
/// let my_functor = vec![1, 2, 3];
|
||||
/// let iter = my_functor.f_into_iter();
|
||||
/// let my_vec: Vec<i32> = iter.collect();
|
||||
/// assert_eq!(my_vec, vec![1, 2, 3]);
|
||||
/// ```
|
||||
fn f_into_iter(self) -> Box<dyn Iterator<Item = A>>
|
||||
where
|
||||
Self: Sized,
|
||||
A: 'static,
|
||||
{
|
||||
let store = Rc::new(RefCell::new(Vec::new()));
|
||||
let istore = store.clone();
|
||||
self.fmap(move |a| istore.borrow_mut().push(a));
|
||||
Box::new(
|
||||
match Rc::try_unwrap(store) {
|
||||
Ok(store) => store,
|
||||
Err(_) => unreachable!(),
|
||||
}
|
||||
.into_inner()
|
||||
.into_iter(),
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
/// `FunctorRef` is an extension to [`Functor`](Functor) which provides a
|
||||
/// non-destructive [`fmap`](Functor::fmap) passing references to the mapping
|
||||
/// function.
|
||||
///
|
||||
/// This trait is separate from [`Functor`](Functor) because it can only be
|
||||
/// implemented for types which can be reconstructed using only references and
|
||||
/// the function mapping `&A` to `B`. For instance, it can't be implemented for
|
||||
/// [`Result<A, E>`](Result) because in the [`Err`](Result::Err) case, we can't
|
||||
/// map to another [`Err`](Result::Err) without ownership of the `E`.
|
||||
pub trait FunctorRef<'a, A: 'a>: Functor<'a, A> {
|
||||
/// Map a functor of `A` to a functor of `B` using a function from `&A` to
|
||||
/// `B`.
|
||||
fn fmap_ref<B: 'a, F: 'a>(&self, f: F) -> Self::Target<B>
|
||||
where
|
||||
F: Fn(&A) -> B;
|
||||
|
||||
/// Given a type `A` implementing [`Clone`](Clone), create a new identical
|
||||
/// `FunctorRef<A>` by cloning the values inside `self`.
|
||||
///
|
||||
/// This is mostly useful for data structures which don't necessarily
|
||||
/// implement [`Clone`](Clone). For those which do, you should reimplement
|
||||
/// this method simply as a call to [`Clone::clone()`](Clone) for
|
||||
/// performance.
|
||||
fn fclone(&self) -> Self
|
||||
where
|
||||
Self: Sized + FunctorRef<'a, A, Target<A> = Self>,
|
||||
A: Clone,
|
||||
{
|
||||
self.fmap_ref(Clone::clone)
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a, A: 'a> Functor<'a, A> for Option<A> {
|
||||
type Target<T: 'a> = Option<T>;
|
||||
|
||||
fn fmap<B: 'a, F>(self, f: F) -> Self::Target<B>
|
||||
where
|
||||
F: Fn(A) -> B,
|
||||
{
|
||||
self.map(f)
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a, A: 'a> FunctorRef<'a, A> for Option<A> {
|
||||
fn fmap_ref<B: 'a, F>(&self, f: F) -> Self::Target<B>
|
||||
where
|
||||
F: Fn(&A) -> B,
|
||||
{
|
||||
self.as_ref().map(f)
|
||||
}
|
||||
|
||||
fn fclone(&self) -> Self
|
||||
where
|
||||
A: Clone,
|
||||
{
|
||||
self.clone()
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a, A: 'a, E> Functor<'a, A> for Result<A, E> {
|
||||
type Target<T: 'a> = Result<T, E>;
|
||||
|
||||
fn fmap<B, F>(self, f: F) -> Self::Target<B>
|
||||
where
|
||||
B: 'a,
|
||||
F: Fn(A) -> B,
|
||||
{
|
||||
self.map(f)
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a, A: 'a, const N: usize> Functor<'a, A> for [A; N] {
|
||||
type Target<T: 'a> = [T; N];
|
||||
|
||||
#[allow(unsafe_code)]
|
||||
fn fmap<B, F>(self, f: F) -> Self::Target<B>
|
||||
where
|
||||
B: 'a,
|
||||
F: Fn(A) -> B + 'a,
|
||||
{
|
||||
let mut out: MaybeUninit<[B; N]> = MaybeUninit::uninit();
|
||||
let mut ptr: *mut B = out.as_mut_ptr().cast();
|
||||
for item in self.into_iter() {
|
||||
unsafe {
|
||||
ptr.write(f(item));
|
||||
ptr = ptr.add(1);
|
||||
}
|
||||
}
|
||||
unsafe { out.assume_init() }
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a, A: 'a, const N: usize> FunctorRef<'a, A> for [A; N] {
|
||||
#[allow(unsafe_code)]
|
||||
fn fmap_ref<B, F>(&self, f: F) -> Self::Target<B>
|
||||
where
|
||||
B: 'a,
|
||||
F: Fn(&A) -> B + 'a,
|
||||
{
|
||||
let mut out: MaybeUninit<[B; N]> = MaybeUninit::uninit();
|
||||
let mut ptr: *mut B = out.as_mut_ptr().cast();
|
||||
for item in self.iter() {
|
||||
unsafe {
|
||||
ptr.write(f(item));
|
||||
ptr = ptr.add(1);
|
||||
}
|
||||
}
|
||||
unsafe { out.assume_init() }
|
||||
}
|
||||
|
||||
fn fclone(&self) -> Self
|
||||
where
|
||||
A: Clone,
|
||||
{
|
||||
self.clone()
|
||||
}
|
||||
}
|
||||
|
||||
macro_rules! impl_functor_for_collection {
|
||||
($type:ident) => {
|
||||
impl<'a, A: 'a> Functor<'a, A> for $type<A> {
|
||||
type Target<T: 'a> = $type<T>;
|
||||
|
||||
fn fmap<B, F>(self, f: F) -> Self::Target<B>
|
||||
where
|
||||
B: 'a,
|
||||
F: Fn(A) -> B,
|
||||
{
|
||||
self.into_iter().map(f).collect()
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a, A: 'a> FunctorRef<'a, A> for $type<A> {
|
||||
fn fmap_ref<B: 'a, F>(&self, f: F) -> Self::Target<B>
|
||||
where
|
||||
F: Fn(&A) -> B,
|
||||
{
|
||||
self.iter().map(f).collect()
|
||||
}
|
||||
|
||||
fn fclone(&self) -> Self
|
||||
where
|
||||
A: Clone,
|
||||
{
|
||||
self.clone()
|
||||
}
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
impl_functor_for_collection!(Vec);
|
||||
impl_functor_for_collection!(VecDeque);
|
||||
impl_functor_for_collection!(LinkedList);
|
||||
|
||||
#[cfg(test)]
|
||||
mod test {
|
||||
use alloc::vec;
|
||||
use crate::Functor;
|
||||
|
||||
#[test]
|
||||
fn option_functor() {
|
||||
let a = Option::Some(31337);
|
||||
let b = a.fmap(|x| x + 2);
|
||||
assert_eq!(b, Option::Some(31339));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn array_endofunctor() {
|
||||
let a: [usize; 5] = [1, 2, 3, 4, 5];
|
||||
let b = a.fmap(|x| x * 2);
|
||||
assert_eq!(b, [2, 4, 6, 8, 10]);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn array_exofunctor() {
|
||||
let a: [u64; 5] = [1, 2, 3, 4, 5];
|
||||
let b = a.fmap(|x| ((x * 2) as u16));
|
||||
assert_eq!(b, [2, 4, 6, 8, 10]);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn vec_endofunctor() {
|
||||
let a = vec![1, 2, 3, 4, 5];
|
||||
let b = a.fmap(|x| x * 2);
|
||||
assert_eq!(b, vec![2, 4, 6, 8, 10]);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn vec_exofunctor() {
|
||||
let a = vec![1, 2, 3];
|
||||
let b = a.fmap(|x| (x as usize) * 2);
|
||||
assert_eq!(b, vec![2usize, 4usize, 6usize]);
|
||||
}
|
||||
}
|
33
crates/yn-functor/src/lib.rs
Normal file
33
crates/yn-functor/src/lib.rs
Normal file
|
@ -0,0 +1,33 @@
|
|||
#![deny(unsafe_code, nonstandard_style)]
|
||||
#![forbid(rust_2018_idioms)]
|
||||
#![warn(unreachable_pub, missing_debug_implementations)]
|
||||
#![no_std]
|
||||
|
||||
extern crate alloc;
|
||||
|
||||
pub use yn_functor_derive::{Functor, FunctorRef};
|
||||
|
||||
pub mod functor;
|
||||
#[doc(inline)]
|
||||
pub use crate::functor::{Functor, FunctorRef};
|
||||
|
||||
pub mod profunctor;
|
||||
#[doc(inline)]
|
||||
pub use crate::profunctor::Profunctor;
|
||||
|
||||
/// Construct a function that ignores its argument and returns the same value
|
||||
/// every time you call it.
|
||||
///
|
||||
/// You may know this function as `const` in certain other languages.
|
||||
///
|
||||
/// ```
|
||||
/// # use yn_functor::repeat;
|
||||
/// let f = repeat(31337);
|
||||
/// assert_eq!(f("Joe"), 31337);
|
||||
/// assert_eq!(f("Mike"), 31337);
|
||||
/// assert_eq!(f("Robert"), 31337);
|
||||
/// assert_eq!(f("Bjarne"), 31337);
|
||||
/// ```
|
||||
pub fn repeat<A: Clone, B>(value: A) -> impl Fn(B) -> A {
|
||||
move |_| value.clone()
|
||||
}
|
31
crates/yn-functor/src/profunctor.rs
Normal file
31
crates/yn-functor/src/profunctor.rs
Normal file
|
@ -0,0 +1,31 @@
|
|||
use core::convert::identity;
|
||||
|
||||
/// A `Profunctor` is just a `Bifunctor` that is contravariant over its first
|
||||
/// argument and covariant over its second argument. What's the problem?
|
||||
pub trait Profunctor<'a, B: 'a, C: 'a> {
|
||||
type Target<T: 'a, U: 'a>: Profunctor<'a, T, U, Target<B, C> = Self>;
|
||||
|
||||
/// Map a function over both arguments of the profunctor.
|
||||
fn dimap<A: 'a, D: 'a, L: 'a, R: 'a>(self, left: L, right: R) -> Self::Target<A, D>
|
||||
where
|
||||
L: Fn(A) -> B,
|
||||
R: Fn(C) -> D;
|
||||
|
||||
/// Map a function over the contravariant first argument only.
|
||||
fn lcmap<A: 'a, L: 'a>(self, left: L) -> Self::Target<A, C>
|
||||
where
|
||||
Self: Sized,
|
||||
L: Fn(A) -> B,
|
||||
{
|
||||
self.dimap(left, identity)
|
||||
}
|
||||
|
||||
/// Map a function over the covariant second argument only.
|
||||
fn rmap<D: 'a, R: 'a>(self, right: R) -> Self::Target<B, D>
|
||||
where
|
||||
Self: Sized,
|
||||
R: Fn(C) -> D,
|
||||
{
|
||||
self.dimap(identity, right)
|
||||
}
|
||||
}
|
17
crates/yn-qgy4hbz-core/Cargo.toml
Normal file
17
crates/yn-qgy4hbz-core/Cargo.toml
Normal file
|
@ -0,0 +1,17 @@
|
|||
[package]
|
||||
name = "yn-qgy4hbz-core"
|
||||
version = "0.1.0"
|
||||
edition = "2021"
|
||||
license = "Apache-2.0"
|
||||
|
||||
[dependencies]
|
||||
bitflags = "2.4"
|
||||
miette = "5.10"
|
||||
thiserror = "1.0"
|
||||
unicode-ident = "1.0"
|
||||
unicode-normalization = "0.1"
|
||||
yn-functor.path = "../yn-functor"
|
||||
yz-string-utils = "0.3.1"
|
||||
|
||||
[dev-dependencies]
|
||||
readfilez = "0.3.1"
|
267
crates/yn-qgy4hbz-core/src/lib.rs
Normal file
267
crates/yn-qgy4hbz-core/src/lib.rs
Normal file
|
@ -0,0 +1,267 @@
|
|||
//use bitflags::bitflags;
|
||||
pub mod parser;
|
||||
use parser::{Env as ParseEnv, Error as Perr, ErrorKind as Pek, Parse, Token, TokenKind as Tok};
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
pub enum Pattern {
|
||||
Ident(Box<str>),
|
||||
Ignore,
|
||||
}
|
||||
|
||||
/*
|
||||
bitflags! {
|
||||
#[repr(transparent)]
|
||||
#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
|
||||
pub struct DefinFlags: u8 {
|
||||
const PUBLIC = 0b00000001;
|
||||
const MUTABLE = 0b00000010;
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
pub struct Defin {
|
||||
pub flags: DefinFlags,
|
||||
pub value: Expr,
|
||||
pub name: Box<str>,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
pub struct Block {
|
||||
pub defs: Box<[Defin]>,
|
||||
pub res: Box<Expr>,
|
||||
}
|
||||
*/
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
pub struct Record {
|
||||
pub fields: Vec<(Option<Box<str>>, Expr)>,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
pub enum Expr {
|
||||
TyTy,
|
||||
|
||||
Lambda {
|
||||
pat: Pattern,
|
||||
pty: Option<Box<Expr>>,
|
||||
exp: Box<Expr>,
|
||||
},
|
||||
|
||||
SelfRecur {
|
||||
pat: Pattern,
|
||||
inner: Box<Expr>,
|
||||
},
|
||||
|
||||
Apply {
|
||||
lam: Box<Expr>,
|
||||
args: Vec<Expr>,
|
||||
},
|
||||
|
||||
Ref(usize),
|
||||
|
||||
Record(Record),
|
||||
TyRecord(Record),
|
||||
|
||||
Select {
|
||||
prim: Box<Expr>,
|
||||
then: Vec<Box<str>>,
|
||||
},
|
||||
//Block(Block),
|
||||
}
|
||||
|
||||
impl Pattern {
|
||||
pub fn in_this<T>(&self, env: &mut ParseEnv<'_>, f: impl FnOnce(&mut ParseEnv<'_>) -> T) -> T {
|
||||
match self {
|
||||
Pattern::Ident(i) => env.names.push(i.clone()),
|
||||
Pattern::Ignore => env.names.push("".to_string().into_boxed_str()),
|
||||
}
|
||||
let height = env.names.len();
|
||||
let ret = f(env);
|
||||
assert_eq!(env.names.len(), height);
|
||||
env.names.pop();
|
||||
ret
|
||||
}
|
||||
}
|
||||
|
||||
impl Parse for Pattern {
|
||||
fn parse(env: &mut ParseEnv<'_>) -> Result<Self, Perr> {
|
||||
let backup = env.lxr.clone();
|
||||
let Token { offset, kind } = env.lxr.next().unwrap_or_else(|| {
|
||||
Err(Perr {
|
||||
offset: env.lxr.offset(),
|
||||
kind: Pek::UnexpectedEof("pattern"),
|
||||
})
|
||||
})?;
|
||||
Ok(match kind {
|
||||
Tok::PatOut(i) => Pattern::Ident(i),
|
||||
Tok::PatIgnore => Pattern::Ignore,
|
||||
_ => {
|
||||
env.lxr = backup;
|
||||
return Err(Perr {
|
||||
offset: offset.try_into().unwrap(),
|
||||
kind: Pek::UnexpectedToken {
|
||||
kind,
|
||||
ctx: "pattern",
|
||||
},
|
||||
});
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
impl Parse for Record {
|
||||
fn parse(env: &mut ParseEnv<'_>) -> Result<Self, Perr> {
|
||||
env.lxr.expect(Tok::LBrace, "record")?;
|
||||
|
||||
let mut fields = Vec::new();
|
||||
|
||||
loop {
|
||||
let lxrbak = env.lxr.clone();
|
||||
let name = {
|
||||
let Token { kind, .. } = env.lxr.next_in_noeof("record")?;
|
||||
|
||||
if let Tok::DotIdent(i) = kind {
|
||||
if env.lxr.expect(Tok::Assign, "record").is_ok() {
|
||||
Some(i)
|
||||
} else {
|
||||
None
|
||||
}
|
||||
} else if let Tok::RBrace = kind {
|
||||
env.lxr = lxrbak;
|
||||
break;
|
||||
} else {
|
||||
None
|
||||
}
|
||||
};
|
||||
if name.is_none() {
|
||||
// backtrack
|
||||
env.lxr = lxrbak;
|
||||
}
|
||||
|
||||
let expr = Expr::parse(env)?;
|
||||
env.lxr.expect(Tok::SemiColon, "record")?;
|
||||
fields.push((name, expr));
|
||||
}
|
||||
|
||||
env.lxr.expect(Tok::RBrace, "record")?;
|
||||
Ok(Record { fields })
|
||||
}
|
||||
}
|
||||
|
||||
fn parse_minexpr(env: &mut ParseEnv<'_>) -> Result<Expr, Perr> {
|
||||
let Token {
|
||||
offset: fi_offset,
|
||||
kind: fi_kind,
|
||||
} = env.lxr.next_in_noeof("expression")?;
|
||||
let mut ret = match fi_kind {
|
||||
Tok::Ident(i) => {
|
||||
if let Some(x) = env.lookup(&i) {
|
||||
Expr::Ref(x)
|
||||
} else {
|
||||
return Err(Perr {
|
||||
offset: fi_offset.try_into().unwrap(),
|
||||
kind: Pek::UnknownIdent(i),
|
||||
});
|
||||
}
|
||||
}
|
||||
Tok::Lambda => {
|
||||
let pat = Pattern::parse(env)?;
|
||||
let pty = if env.lxr.got(Tok::DubColon).is_some() {
|
||||
Some(Box::new(Expr::parse(env)?))
|
||||
} else {
|
||||
None
|
||||
};
|
||||
env.lxr.expect(Tok::RArr, "lambda")?;
|
||||
let exp = Box::new(pat.in_this(env, Expr::parse)?);
|
||||
return Ok(Expr::Lambda { pat, pty, exp });
|
||||
}
|
||||
Tok::Mu => {
|
||||
let pat = Pattern::parse(env)?;
|
||||
env.lxr.expect(Tok::RArr, "mu")?;
|
||||
let inner = Box::new(pat.in_this(env, Expr::parse)?);
|
||||
return Ok(Expr::SelfRecur { pat, inner });
|
||||
}
|
||||
Tok::LParen => {
|
||||
let inner = Expr::parse(env)?;
|
||||
env.lxr.expect(Tok::RParen, "parens")?;
|
||||
return Ok(inner);
|
||||
}
|
||||
Tok::Caret => {
|
||||
return if let Ok(r) = Record::parse(env) {
|
||||
Ok(Expr::TyRecord(r))
|
||||
} else {
|
||||
let Token { kind, offset } = env.lxr.next_in_noeof("expression")?;
|
||||
Err(parser::unexpected_token(offset, kind, "expression"))
|
||||
}
|
||||
}
|
||||
Tok::Dot => {
|
||||
return if let Ok(r) = Record::parse(env) {
|
||||
Ok(Expr::Record(r))
|
||||
} else {
|
||||
let Token { kind, offset } = env.lxr.next_in_noeof("expression")?;
|
||||
Err(parser::unexpected_token(offset, kind, "expression"))
|
||||
}
|
||||
}
|
||||
/*
|
||||
Tok::LBrace => {
|
||||
let inner = ;
|
||||
env.lxr.expect(Tok::RBrace, "braces")?;
|
||||
return Ok(inner);
|
||||
}
|
||||
*/
|
||||
_ => return Err(parser::unexpected_token(fi_offset, fi_kind, "expression")),
|
||||
};
|
||||
|
||||
loop {
|
||||
let lxrbak = env.lxr.clone();
|
||||
let Token { kind, .. } = match env.lxr.next() {
|
||||
Some(Ok(x)) => x,
|
||||
_ => {
|
||||
env.lxr = lxrbak;
|
||||
break;
|
||||
}
|
||||
};
|
||||
match kind {
|
||||
Tok::DotIdent(i) => {
|
||||
if let Expr::Select { then, .. } = &mut ret {
|
||||
then.push(i);
|
||||
} else {
|
||||
ret = Expr::Select {
|
||||
prim: Box::new(ret),
|
||||
then: vec![i],
|
||||
};
|
||||
}
|
||||
}
|
||||
_ => {
|
||||
env.lxr = lxrbak;
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
Ok(ret)
|
||||
}
|
||||
|
||||
impl Parse for Expr {
|
||||
fn parse(env: &mut ParseEnv<'_>) -> Result<Self, Perr> {
|
||||
let base = parse_minexpr(env)?;
|
||||
let mut args = Vec::new();
|
||||
|
||||
loop {
|
||||
let mut nxtenv = env.clone();
|
||||
args.push(match parse_minexpr(&mut nxtenv) {
|
||||
Ok(x) => x,
|
||||
Err(_) => break,
|
||||
});
|
||||
*env = nxtenv;
|
||||
}
|
||||
|
||||
Ok(if args.is_empty() {
|
||||
base
|
||||
} else {
|
||||
Expr::Apply {
|
||||
lam: Box::new(base),
|
||||
args,
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
277
crates/yn-qgy4hbz-core/src/parser/lex.rs
Normal file
277
crates/yn-qgy4hbz-core/src/parser/lex.rs
Normal file
|
@ -0,0 +1,277 @@
|
|||
use super::{Error, ErrorKind};
|
||||
|
||||
use core::fmt;
|
||||
use yz_string_utils::StrLexerBase;
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct Lexer<'a> {
|
||||
inner: StrLexerBase<'a>,
|
||||
}
|
||||
|
||||
pub type Offset = u32;
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
pub struct Token {
|
||||
pub kind: TokenKind<Box<str>>,
|
||||
pub offset: Offset,
|
||||
}
|
||||
|
||||
impl fmt::Display for Token {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
write!(f, "at offset {}: {:?}", self.offset, self.kind)
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, PartialEq, yn_functor::Functor)]
|
||||
pub enum TokenKind<S> {
|
||||
Ident(S),
|
||||
PatOut(S),
|
||||
DotIdent(S),
|
||||
Symbol(S),
|
||||
Integer(usize),
|
||||
|
||||
LParen,
|
||||
RParen,
|
||||
LBrace,
|
||||
RBrace,
|
||||
LArr,
|
||||
RArr,
|
||||
LdubArr,
|
||||
RdubArr,
|
||||
Caret,
|
||||
Dot,
|
||||
DubColon,
|
||||
SemiColon,
|
||||
Assign,
|
||||
Dollar,
|
||||
PatIgnore,
|
||||
|
||||
Data,
|
||||
Lambda,
|
||||
Let,
|
||||
TyLambda,
|
||||
Mu,
|
||||
Match,
|
||||
Mutable,
|
||||
}
|
||||
|
||||
impl<'a> Lexer<'a> {
|
||||
pub fn new(inp: &'a str) -> Self {
|
||||
Self {
|
||||
inner: StrLexerBase { inp, offset: 0 },
|
||||
}
|
||||
}
|
||||
|
||||
#[inline(always)]
|
||||
pub fn offset(&self) -> usize {
|
||||
self.inner.offset
|
||||
}
|
||||
|
||||
pub fn peek(&self) -> Option<Result<Token, Error>> {
|
||||
self.clone().next()
|
||||
}
|
||||
|
||||
pub fn next_in_noeof(&mut self, ctx: &'static str) -> Result<Token, Error> {
|
||||
let offset = self.offset();
|
||||
self.next().unwrap_or_else(|| {
|
||||
Err(Error {
|
||||
offset,
|
||||
kind: ErrorKind::UnexpectedEof(ctx),
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
pub fn got(&mut self, xkind: TokenKind<Box<str>>) -> Option<Offset> {
|
||||
let mut nxt = self.clone();
|
||||
match nxt.next() {
|
||||
Some(Ok(Token { offset, kind })) if xkind == kind => {
|
||||
*self = nxt;
|
||||
Some(offset)
|
||||
}
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn expect(
|
||||
&mut self,
|
||||
xkind: TokenKind<Box<str>>,
|
||||
ctx: &'static str,
|
||||
) -> Result<Offset, Error> {
|
||||
let mut nxt = self.clone();
|
||||
let Token { offset, kind } = nxt.next_in_noeof(ctx)?;
|
||||
if xkind == kind {
|
||||
*self = nxt;
|
||||
Ok(offset)
|
||||
} else {
|
||||
Err(Error {
|
||||
offset: offset.try_into().unwrap(),
|
||||
kind: ErrorKind::UnexpectedToken { kind, ctx },
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn consume_ident(slb: &mut StrLexerBase<'_>) -> Box<str> {
|
||||
use unicode_normalization::UnicodeNormalization;
|
||||
let s = slb
|
||||
.consume_select(unicode_ident::is_xid_continue)
|
||||
.nfc()
|
||||
.to_string();
|
||||
assert!(!s.is_empty());
|
||||
s.into()
|
||||
}
|
||||
|
||||
fn try_consume_ident(slb: &mut StrLexerBase<'_>) -> Option<Box<str>> {
|
||||
if slb.inp.chars().next().map(unicode_ident::is_xid_start) == Some(true) {
|
||||
Some(consume_ident(slb))
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> Iterator for Lexer<'a> {
|
||||
type Item = Result<Token, Error>;
|
||||
|
||||
fn next(&mut self) -> Option<Result<Token, Error>> {
|
||||
let slb = &mut self.inner;
|
||||
use TokenKind as Tk;
|
||||
|
||||
// handle whitespace
|
||||
slb.consume_select(|i| i.is_whitespace());
|
||||
let mut offset;
|
||||
|
||||
Some(
|
||||
'lxl: loop {
|
||||
if slb.inp.is_empty() {
|
||||
return None;
|
||||
}
|
||||
offset = match u32::try_from(slb.offset) {
|
||||
Ok(x) => x,
|
||||
Err(_) => {
|
||||
slb.inp = "";
|
||||
return Some(Err(Error {
|
||||
offset: slb.offset,
|
||||
kind: ErrorKind::OffsetOverflow,
|
||||
}));
|
||||
}
|
||||
};
|
||||
break match slb.inp.chars().next()? {
|
||||
'0'..='9' => {
|
||||
let s = slb.consume_select(|i| i.is_ascii_digit());
|
||||
debug_assert!(!s.is_empty());
|
||||
s.parse().map(TokenKind::Integer).map_err(|e| e.into())
|
||||
}
|
||||
|
||||
c if unicode_ident::is_xid_start(c) => {
|
||||
// identifier
|
||||
let s = consume_ident(slb);
|
||||
// handle keywords
|
||||
Ok(match &*s {
|
||||
"data" => Tk::Data,
|
||||
"let" => Tk::Let,
|
||||
"match" => Tk::Match,
|
||||
"mut" => Tk::Mutable,
|
||||
"λ" => Tk::Lambda,
|
||||
"μ" => Tk::Mu,
|
||||
"Λ" => Tk::TyLambda,
|
||||
_ => Tk::Ident(s),
|
||||
})
|
||||
}
|
||||
|
||||
c => {
|
||||
slb.consume(c.len_utf8());
|
||||
match c {
|
||||
'.' => Ok(if let Some(s) = try_consume_ident(slb) {
|
||||
Tk::DotIdent(s)
|
||||
} else {
|
||||
Tk::Dot
|
||||
}),
|
||||
';' => Ok(Tk::SemiColon),
|
||||
'^' => Ok(Tk::Caret),
|
||||
'$' => {
|
||||
Ok(if let Some(s) = try_consume_ident(slb) {
|
||||
if &*s == "_" {
|
||||
Tk::PatIgnore
|
||||
} else {
|
||||
Tk::PatOut(s)
|
||||
}
|
||||
} else {
|
||||
Tk::Dollar
|
||||
})
|
||||
},
|
||||
':' => {
|
||||
Ok(if let Some(s) = try_consume_ident(slb) {
|
||||
Tk::Symbol(s)
|
||||
} else {
|
||||
Tk::DubColon
|
||||
})
|
||||
},
|
||||
'=' => Ok(Tk::Assign),
|
||||
'←' => Ok(Tk::LArr),
|
||||
'→' => Ok(Tk::RArr),
|
||||
'⇐' => Ok(Tk::LdubArr),
|
||||
'⇒' => Ok(Tk::RdubArr),
|
||||
'<' => {
|
||||
if slb.inp.starts_with('-') {
|
||||
slb.consume(1);
|
||||
Ok(Tk::LArr)
|
||||
} else {
|
||||
Err(ErrorKind::UnhandledChar(c))
|
||||
}
|
||||
}
|
||||
'-' => {
|
||||
if slb.inp.starts_with('>') {
|
||||
slb.consume(1);
|
||||
Ok(Tk::RArr)
|
||||
} else {
|
||||
Err(ErrorKind::UnhandledChar(c))
|
||||
}
|
||||
}
|
||||
'{' /* '}' */ => Ok(Tk::LBrace),
|
||||
/* '{' */ '}' => Ok(Tk::RBrace),
|
||||
'(' /* ')' */ => {
|
||||
if slb.inp.starts_with('*') {
|
||||
// comment
|
||||
let mut lvl = 1;
|
||||
let mut it = slb.inp.chars().peekable();
|
||||
while lvl > 0 {
|
||||
let c = match it.next() {
|
||||
Some(c) => c,
|
||||
None => break 'lxl Err(ErrorKind::EofInComment),
|
||||
};
|
||||
slb.consume(c.len_utf8());
|
||||
match c {
|
||||
'(' => {
|
||||
if it.peek() == Some(&'*') {
|
||||
lvl += 1;
|
||||
}
|
||||
}
|
||||
'*' => {
|
||||
if it.peek() == Some(&')') {
|
||||
lvl -= 1;
|
||||
it.next();
|
||||
slb.consume(1);
|
||||
}
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
continue;
|
||||
} else {
|
||||
Ok(Tk::LParen)
|
||||
}
|
||||
}
|
||||
/* '(' */ ')' => Ok(Tk::RParen),
|
||||
_ => Err(ErrorKind::UnhandledChar(c)),
|
||||
}
|
||||
}
|
||||
};
|
||||
}
|
||||
.map(|kind| Token { offset, kind })
|
||||
.map_err(|kind| Error {
|
||||
offset: offset.try_into().unwrap(),
|
||||
kind,
|
||||
}),
|
||||
)
|
||||
}
|
||||
}
|
98
crates/yn-qgy4hbz-core/src/parser/mod.rs
Normal file
98
crates/yn-qgy4hbz-core/src/parser/mod.rs
Normal file
|
@ -0,0 +1,98 @@
|
|||
use core::fmt;
|
||||
use miette::Diagnostic;
|
||||
|
||||
mod lex;
|
||||
pub use lex::{Lexer, Offset, Token, TokenKind};
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
pub struct Error {
|
||||
// NOTE: the offset might exceed 32bit
|
||||
pub offset: usize,
|
||||
pub kind: ErrorKind,
|
||||
}
|
||||
|
||||
impl fmt::Display for Error {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
write!(f, "at offset {}: {}", self.offset, self.kind)
|
||||
}
|
||||
}
|
||||
|
||||
impl std::error::Error for Error {
|
||||
#[inline]
|
||||
fn source(&self) -> Option<&(dyn std::error::Error + 'static)> {
|
||||
self.kind.source()
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Diagnostic, thiserror::Error)]
|
||||
pub enum ErrorKind {
|
||||
// lexer errors
|
||||
#[error("offset overflowed")]
|
||||
#[diagnostic(code(yanais::parser::offset_overflow))]
|
||||
OffsetOverflow,
|
||||
|
||||
#[error("end of file inside comment encountered")]
|
||||
#[diagnostic(code(yanais::parser::eof_in_comment))]
|
||||
EofInComment,
|
||||
|
||||
#[error("unhandled character '{0}'")]
|
||||
#[diagnostic(code(yanais::parser::unhandled_char))]
|
||||
UnhandledChar(char),
|
||||
|
||||
#[error(transparent)]
|
||||
#[diagnostic(code(yanais::parser::invalid_int))]
|
||||
InvalidInt(#[from] core::num::ParseIntError),
|
||||
|
||||
// higher parser errors
|
||||
#[error("end of file encountered inside {0}")]
|
||||
#[diagnostic(code(yanais::parser::unexpected_eof))]
|
||||
UnexpectedEof(&'static str),
|
||||
|
||||
#[error("unexpected token {kind:?} inside {ctx}")]
|
||||
#[diagnostic(code(yanais::parser::unexpected_token))]
|
||||
UnexpectedToken {
|
||||
kind: TokenKind<Box<str>>,
|
||||
ctx: &'static str,
|
||||
},
|
||||
|
||||
#[error("unknown identifier {0:?}")]
|
||||
UnknownIdent(Box<str>),
|
||||
}
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct Env<'a> {
|
||||
pub lxr: lex::Lexer<'a>,
|
||||
pub names: Vec<Box<str>>,
|
||||
}
|
||||
|
||||
impl<'a> Env<'a> {
|
||||
pub fn new(lxr: lex::Lexer<'a>) -> Self {
|
||||
Self {
|
||||
lxr,
|
||||
names: Vec::new(),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn lookup(&self, name: &str) -> Option<usize> {
|
||||
self.names.iter().rev().enumerate().find_map(
|
||||
|(n, i)| {
|
||||
if &**i == name {
|
||||
Some(n)
|
||||
} else {
|
||||
None
|
||||
}
|
||||
},
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
pub fn unexpected_token(offset: u32, kind: TokenKind<Box<str>>, ctx: &'static str) -> Error {
|
||||
Error {
|
||||
offset: offset.try_into().unwrap(),
|
||||
kind: ErrorKind::UnexpectedToken { kind, ctx },
|
||||
}
|
||||
}
|
||||
|
||||
pub trait Parse: Sized {
|
||||
fn parse(env: &mut Env<'_>) -> Result<Self, Error>;
|
||||
}
|
8
crates/yn-qgy4hbz-core/tests/ex00.yns
Normal file
8
crates/yn-qgy4hbz-core/tests/ex00.yns
Normal file
|
@ -0,0 +1,8 @@
|
|||
λ $blti → λ $maybe → μ $simpl → .{
|
||||
.stack_ds = λ $T → μ $sdst → maybe ^{ T; sdst; };
|
||||
.stack = λ $T → μ $simpl2 → .{
|
||||
.sdst = simpl.stack_ds T;
|
||||
.nil = simpl2.sdst.none blti.unit;
|
||||
.push = λ $xs → λ $x → simpl2.sdst.some (blti.box .{ x; xs; });
|
||||
};
|
||||
}
|
11
crates/yn-qgy4hbz-core/tests/exs.rs
Normal file
11
crates/yn-qgy4hbz-core/tests/exs.rs
Normal file
|
@ -0,0 +1,11 @@
|
|||
use readfilez::read_from_file;
|
||||
use yn_qgy4hbz_core::parser::{Lexer, Parse, Env as ParseEnv};
|
||||
|
||||
fn do_parse(f: &str) {
|
||||
let fh = read_from_file(std::fs::File::open(f)).expect("unable to open example file");
|
||||
let mut penv = ParseEnv::new(Lexer::new(core::str::from_utf8(&*fh).expect("unable to parse example file (UTF-8)")));
|
||||
yn_qgy4hbz_core::Expr::parse(&mut penv).expect("unable to parse example file (yanais)");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn ex00() { do_parse("tests/ex00.yns"); }
|
20
examples/qgy4hbz/00.yns
Normal file
20
examples/qgy4hbz/00.yns
Normal file
|
@ -0,0 +1,20 @@
|
|||
(* every object is basically a trait, just an interface *)
|
||||
(* partial decomposition is possible *)
|
||||
|
||||
(* lets build a simple stack *)
|
||||
|
||||
λ $blti → {
|
||||
|
||||
pub let maybe = λ $T → data { :null; } λ $_ →
|
||||
.{ .none = blti.unitTy; .some = blti.boxTy T; };
|
||||
|
||||
let stack_ds = λ $T → μ $sdst → maybe (^{ T; sdst });
|
||||
|
||||
pub let stack = λ $T → {
|
||||
let sdst = stack_ds T;
|
||||
pub let nil = sdst.none blti.unit;
|
||||
pub let push = λ $xs → λ $x →
|
||||
sdst.some (blti.box .{ x; xs });
|
||||
};
|
||||
|
||||
}
|
Loading…
Reference in a new issue