Compare commits
13 commits
e9675b91bc
...
6f8a49dc32
Author | SHA1 | Date | |
---|---|---|---|
|
6f8a49dc32 | ||
|
01feff8cf5 | ||
|
117e8878d6 | ||
|
15f2b70449 | ||
|
5b11e2998a | ||
|
caf49d4890 | ||
|
c34886f954 | ||
|
8c47749c98 | ||
|
9e53c9d199 | ||
|
dab24aed61 | ||
|
de6ff073dc | ||
|
98b8d0378f | ||
|
c453a63c56 |
|
@ -10,8 +10,12 @@ log = "0.4.0"
|
||||||
anyhow = "1.0"
|
anyhow = "1.0"
|
||||||
env_logger = "0.10.0"
|
env_logger = "0.10.0"
|
||||||
itertools = "0.11.0"
|
itertools = "0.11.0"
|
||||||
|
rayon = "1.7.0"
|
||||||
|
|
||||||
unreal-sdk = {path = "../unreal-sdk"}
|
unreal-sdk = {path = "../unreal-sdk"}
|
||||||
|
|
||||||
quote = "1.0.28"
|
quote = "1.0.28"
|
||||||
proc-macro2 = "1.0.60"
|
proc-macro2 = "1.0.60"
|
||||||
|
clap = { version = "4.3.9", features = ["derive"] }
|
||||||
|
prettyplease = "0.2.9"
|
||||||
|
syn = { version = "2.0.22", features = ["full"] }
|
||||||
|
|
|
@ -1,9 +1,53 @@
|
||||||
use std::{borrow::Cow, collections::BTreeMap};
|
use std::{borrow::Cow, path::PathBuf};
|
||||||
|
|
||||||
use unreal_sdk::sdk::repr::ObjectRef;
|
use clap::{Args, Parser, Subcommand};
|
||||||
|
use unreal_sdk::sdk::repr::Sdk;
|
||||||
|
|
||||||
fn main() {
|
use crate::rust::Builder;
|
||||||
println!("Hello, world!");
|
|
||||||
|
#[derive(Parser)]
|
||||||
|
#[command(author, version, about, long_about = None)]
|
||||||
|
pub struct Cli {
|
||||||
|
#[command(subcommand)]
|
||||||
|
commands: Option<Commands>,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Args)]
|
||||||
|
pub struct Build {
|
||||||
|
#[arg(short, long)]
|
||||||
|
in_archive: PathBuf,
|
||||||
|
/// directory into which the sdk will be dumped.
|
||||||
|
#[arg(short, long)]
|
||||||
|
out: Option<PathBuf>,
|
||||||
|
#[arg(short, long, default_value = "false")]
|
||||||
|
single_file: bool,
|
||||||
|
#[arg(short, long, default_value = "true")]
|
||||||
|
feature_gate: bool,
|
||||||
|
#[arg(long, value_delimiter = ',', num_args = 1..)]
|
||||||
|
packages: Option<Vec<String>>,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Subcommand)]
|
||||||
|
pub enum Commands {
|
||||||
|
Build(Build),
|
||||||
|
}
|
||||||
|
|
||||||
|
fn main() -> anyhow::Result<()> {
|
||||||
|
env_logger::init();
|
||||||
|
log::info!("Hello, world!");
|
||||||
|
|
||||||
|
let cli = Cli::parse();
|
||||||
|
|
||||||
|
if let Some(Commands::Build(build)) = &cli.commands {
|
||||||
|
let sdk = Sdk::from_path_ron(&build.in_archive)?;
|
||||||
|
let builder = Builder::new(sdk);
|
||||||
|
|
||||||
|
let path = build.out.clone().unwrap_or(std::env::current_dir()?);
|
||||||
|
|
||||||
|
builder.build_in_dir(path, build)?;
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
struct SplitResult<'a> {
|
struct SplitResult<'a> {
|
||||||
|
@ -69,20 +113,6 @@ fn split_at_illegal_char<'a>(input: &'a str, disallowed_tokens: &[char]) -> Spli
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn canonicalize_name<'a>(
|
|
||||||
name: &'a str,
|
|
||||||
disallowed_tokens: &[char],
|
|
||||||
disallowed_strs: &[&str],
|
|
||||||
) -> Cow<'a, str> {
|
|
||||||
let valid = split_at_illegal_char(name, disallowed_tokens).into_valid(disallowed_tokens);
|
|
||||||
if disallowed_strs.contains(&valid.as_ref()) || valid.starts_with(|c: char| !c.is_alphabetic())
|
|
||||||
{
|
|
||||||
Cow::Owned(format!("_{}", &valid))
|
|
||||||
} else {
|
|
||||||
valid
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn empty_or_some(s: &str) -> Option<&str> {
|
fn empty_or_some(s: &str) -> Option<&str> {
|
||||||
if s.is_empty() {
|
if s.is_empty() {
|
||||||
None
|
None
|
||||||
|
@ -91,23 +121,140 @@ fn empty_or_some(s: &str) -> Option<&str> {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub struct CanonicalNames {
|
pub mod path_helper {
|
||||||
/// canonicalized type names for lookup when handling return types and parameters.
|
use quote::{format_ident, quote, ToTokens, TokenStreamExt};
|
||||||
types: BTreeMap<ObjectRef, String>,
|
use unreal_sdk::sdk::repr::Type;
|
||||||
|
|
||||||
|
use crate::rust::Builder;
|
||||||
|
|
||||||
|
pub struct Type2<'a> {
|
||||||
|
cache: &'a Builder,
|
||||||
|
inner: Type,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<'a> Type2<'a> {
|
||||||
|
pub fn new(cache: &'a Builder, inner: Type) -> Self {
|
||||||
|
Self { cache, inner }
|
||||||
|
}
|
||||||
|
|
||||||
|
fn child(&self, inner: Type) -> Self {
|
||||||
|
Self {
|
||||||
|
inner,
|
||||||
|
cache: self.cache,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<'a> ToTokens for Type2<'a> {
|
||||||
|
fn to_tokens(&self, tokens: &mut proc_macro2::TokenStream) {
|
||||||
|
match &self.inner {
|
||||||
|
Type::Ptr(inner) | Type::Ref(inner) => {
|
||||||
|
let inner = self.child(*inner.clone());
|
||||||
|
|
||||||
|
tokens.extend(quote! {
|
||||||
|
::core::option::Option<NonNull<#inner>>
|
||||||
|
});
|
||||||
|
}
|
||||||
|
Type::WeakPtr(inner) => {
|
||||||
|
let inner = self
|
||||||
|
.cache
|
||||||
|
.get_full_type_tokens(&inner)
|
||||||
|
.unwrap_or(quote!(crate::engine::UObject));
|
||||||
|
tokens.extend(quote!(
|
||||||
|
crate::engine::TWeakObjectPtr<#inner>
|
||||||
|
));
|
||||||
|
}
|
||||||
|
Type::SoftPtr(inner) => {
|
||||||
|
let inner = self
|
||||||
|
.cache
|
||||||
|
.get_full_type_tokens(&inner)
|
||||||
|
.unwrap_or(quote!(crate::engine::UObject));
|
||||||
|
tokens.extend(quote!(
|
||||||
|
crate::engine::TSoftObjectPtr<#inner>
|
||||||
|
));
|
||||||
|
}
|
||||||
|
Type::LazyPtr(inner) => {
|
||||||
|
let inner = self
|
||||||
|
.cache
|
||||||
|
.get_full_type_tokens(&inner)
|
||||||
|
.unwrap_or(quote!(crate::engine::UObject));
|
||||||
|
|
||||||
|
tokens.extend(quote!(
|
||||||
|
crate::engine::TLazyObjectPtr<#inner>
|
||||||
|
));
|
||||||
|
}
|
||||||
|
Type::AssetPtr(inner) => {
|
||||||
|
let inner = self
|
||||||
|
.cache
|
||||||
|
.get_full_type_tokens(&inner)
|
||||||
|
.unwrap_or(quote!(crate::engine::UObject));
|
||||||
|
|
||||||
|
tokens.extend(quote!(
|
||||||
|
crate::engine::TAssetPtr<#inner>
|
||||||
|
));
|
||||||
|
}
|
||||||
|
Type::Array(inner) => {
|
||||||
|
let inner = self.child(*inner.clone());
|
||||||
|
tokens.extend(quote!(
|
||||||
|
crate::engine::TArray<#inner>
|
||||||
|
));
|
||||||
|
}
|
||||||
|
Type::Primitive(prim) => {
|
||||||
|
tokens.append(format_ident!("{prim}"));
|
||||||
|
}
|
||||||
|
Type::RawArray { ty, len } => {
|
||||||
|
let ty = self.child(*ty.clone());
|
||||||
|
tokens.extend(quote! { [ #ty; #len ] });
|
||||||
|
}
|
||||||
|
Type::Name => tokens.extend(quote!(crate::engine::FName)),
|
||||||
|
Type::String => tokens.extend(quote!(crate::engine::FString)),
|
||||||
|
Type::Text => tokens.extend(quote!(crate::engine::FText)),
|
||||||
|
Type::Enum { enum_type, .. } => {
|
||||||
|
let inner = self
|
||||||
|
.cache
|
||||||
|
.get_full_type_tokens(&enum_type)
|
||||||
|
.unwrap_or(quote!(u8));
|
||||||
|
|
||||||
|
tokens.extend(inner);
|
||||||
|
}
|
||||||
|
Type::Class(class) => {
|
||||||
|
let inner = self
|
||||||
|
.cache
|
||||||
|
.get_full_type_tokens(&class)
|
||||||
|
.unwrap_or(quote!(crate::engine::UObject));
|
||||||
|
|
||||||
|
tokens.extend(quote!(::core::option::Option<#inner>));
|
||||||
|
}
|
||||||
|
Type::Struct(class) => {
|
||||||
|
let inner = self
|
||||||
|
.cache
|
||||||
|
.get_full_type_tokens(&class)
|
||||||
|
.unwrap_or(quote!(()));
|
||||||
|
|
||||||
|
tokens.extend(inner);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub mod rust {
|
pub mod rust {
|
||||||
use std::{borrow::Cow, collections::BTreeMap};
|
use std::{
|
||||||
|
borrow::Cow,
|
||||||
use anyhow::Context;
|
collections::{BTreeMap, BTreeSet},
|
||||||
use proc_macro2::TokenStream;
|
path::Path,
|
||||||
use quote::{format_ident, quote};
|
|
||||||
use unreal_sdk::sdk::repr::{
|
|
||||||
Class, ClassField, ClassMethod, Enum, ObjectRef, PrimitiveType, ProcessedPackage, Sdk,
|
|
||||||
StructKind, Type, UnrealType,
|
|
||||||
};
|
};
|
||||||
|
|
||||||
use crate::split_at_illegal_char;
|
use anyhow::Context;
|
||||||
|
use itertools::Itertools;
|
||||||
|
use proc_macro2::{Ident, TokenStream};
|
||||||
|
use quote::{format_ident, quote};
|
||||||
|
use unreal_sdk::sdk::repr::{
|
||||||
|
Class, ClassField, ClassMethod, Enum, ObjectRef, PackageRef, PrimitiveType,
|
||||||
|
ProcessedPackage, Sdk, StructKind, Type, UnrealType,
|
||||||
|
};
|
||||||
|
|
||||||
|
use crate::{path_helper::Type2, split_at_illegal_char};
|
||||||
|
|
||||||
// const KEYWORDS: [&'static str; 51] = [
|
// const KEYWORDS: [&'static str; 51] = [
|
||||||
// "as", "break", "const", "continue", "crate", "else", "enum", "extern", "false", "fn",
|
// "as", "break", "const", "continue", "crate", "else", "enum", "extern", "false", "fn",
|
||||||
|
@ -132,9 +279,9 @@ pub mod rust {
|
||||||
"i64", "i128", "usize", "isize",
|
"i64", "i128", "usize", "isize",
|
||||||
];
|
];
|
||||||
|
|
||||||
const CHARS: [char; 19] = [
|
const CHARS: [char; 21] = [
|
||||||
' ', '?', '+', '-', ':', '/', '^', '(', ')', '[', ']', '<', '>', '&', '.', '#', '\'', '"',
|
' ', '?', '+', '-', ':', '/', '^', '(', ')', '[', ']', '<', '>', '&', '.', '#', '\'', '"',
|
||||||
'%',
|
'%', ',', '|',
|
||||||
];
|
];
|
||||||
|
|
||||||
pub struct Builder {
|
pub struct Builder {
|
||||||
|
@ -142,6 +289,10 @@ pub mod rust {
|
||||||
sdk: Sdk,
|
sdk: Sdk,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn canonicalize_ident(name: &str) -> Ident {
|
||||||
|
format_ident!("{}", canonicalize_name(name))
|
||||||
|
}
|
||||||
|
|
||||||
fn canonicalize_name<'a>(name: &'a str) -> Cow<'a, str> {
|
fn canonicalize_name<'a>(name: &'a str) -> Cow<'a, str> {
|
||||||
let valid = split_at_illegal_char(name, &CHARS).into_valid(&CHARS);
|
let valid = split_at_illegal_char(name, &CHARS).into_valid(&CHARS);
|
||||||
if WORDS.contains(&valid.as_ref()) || valid.starts_with(|c: char| !c.is_alphabetic()) {
|
if WORDS.contains(&valid.as_ref()) || valid.starts_with(|c: char| !c.is_alphabetic()) {
|
||||||
|
@ -170,11 +321,24 @@ pub mod rust {
|
||||||
}
|
}
|
||||||
|
|
||||||
/// returns the absolute path of a type with the assumption that all
|
/// returns the absolute path of a type with the assumption that all
|
||||||
/// packages are children of the path `::crate::sdk`
|
/// packages are children of the path `crate::sdk`
|
||||||
fn get_type_path(&self, key: &ObjectRef) -> Option<String> {
|
pub fn get_type_package_path(&self, key: &ObjectRef) -> Option<TokenStream> {
|
||||||
let pkg = &self.sdk.packages.get(&key.package)?.name;
|
let pkg = format_ident!("{}", &self.sdk.packages.get(&key.package)?.name);
|
||||||
self.get_type_name(key)
|
Some(quote!(crate::sdk::#pkg))
|
||||||
.map(|name| format!("::crate::sdk::{pkg}::{name}"))
|
}
|
||||||
|
|
||||||
|
pub fn get_full_type_tokens(&self, key: &ObjectRef) -> Option<TokenStream> {
|
||||||
|
let pkg = self.get_type_package_path(key)?;
|
||||||
|
let name = format_ident!("{}", self.get_type_name(key)?);
|
||||||
|
Some(quote! {
|
||||||
|
#pkg::#name
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
/// returns the precached, prefixed and cannonicalized (for this
|
||||||
|
/// language, Rust) `Ident` for this object-ref
|
||||||
|
fn get_type_ident(&self, key: &ObjectRef) -> Option<Ident> {
|
||||||
|
Some(format_ident!("{}", self.type_name_cache.get(key)?))
|
||||||
}
|
}
|
||||||
|
|
||||||
/// returns the precached, prefixed and cannonicalized (for this
|
/// returns the precached, prefixed and cannonicalized (for this
|
||||||
|
@ -201,86 +365,121 @@ pub mod rust {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn build(self) -> anyhow::Result<()> {
|
pub fn build(self, args: &super::Build) -> anyhow::Result<BTreeMap<String, TokenStream>> {
|
||||||
for pkg in self.sdk.packages.values() {
|
let pkgs = if let Some(packages) = &args.packages {
|
||||||
self.generate_package(pkg)?;
|
let deps = self.dependencies_for_package_names(packages);
|
||||||
|
log::debug!("all dependencies: {deps:?}");
|
||||||
|
|
||||||
|
deps.iter()
|
||||||
|
.map(|id| self.sdk.packages.get(id).unwrap())
|
||||||
|
.collect::<Vec<_>>()
|
||||||
|
} else {
|
||||||
|
self.sdk.packages.values().collect::<Vec<_>>()
|
||||||
|
};
|
||||||
|
|
||||||
|
let packages = pkgs
|
||||||
|
.into_iter()
|
||||||
|
.map(|pkg| {
|
||||||
|
let name = canonicalize_name(&pkg.name).to_string();
|
||||||
|
let tokens = self.generate_package(pkg, args.feature_gate)?;
|
||||||
|
|
||||||
|
anyhow::Ok((name, tokens))
|
||||||
|
})
|
||||||
|
.collect::<Result<BTreeMap<_, _>, _>>()?;
|
||||||
|
|
||||||
|
Ok(packages)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn get_package_by_name(&self, name: &str) -> Option<PackageRef> {
|
||||||
|
self.sdk
|
||||||
|
.packages
|
||||||
|
.iter()
|
||||||
|
.find(|(_, pkg)| &pkg.name == name)
|
||||||
|
.map(|(id, _)| *id)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn dependencies_for_package_names(&self, names: &Vec<String>) -> BTreeSet<PackageRef> {
|
||||||
|
names
|
||||||
|
.iter()
|
||||||
|
.filter_map(|name| self.get_package_by_name(name))
|
||||||
|
.flat_map(|id| self.dependencies(self.sdk.packages.get(&id).unwrap()))
|
||||||
|
.collect::<BTreeSet<_>>()
|
||||||
|
}
|
||||||
|
|
||||||
|
fn dependencies(&self, pkg: &ProcessedPackage) -> BTreeSet<PackageRef> {
|
||||||
|
let mut set = BTreeSet::new();
|
||||||
|
|
||||||
|
self.dependencies_inner(pkg, &mut set);
|
||||||
|
|
||||||
|
set
|
||||||
|
}
|
||||||
|
|
||||||
|
fn dependencies_inner(&self, pkg: &ProcessedPackage, pkgs: &mut BTreeSet<PackageRef>) {
|
||||||
|
pkgs.insert(pkg.package_object);
|
||||||
|
|
||||||
|
// depth first, does that matter?
|
||||||
|
for id in pkg.dependencies.iter() {
|
||||||
|
if !pkgs.contains(id) {
|
||||||
|
if let Some(pkg) = self.sdk.packages.get(id) {
|
||||||
|
self.dependencies_inner(pkg, pkgs);
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn build_in_dir<P: AsRef<Path>>(
|
||||||
|
self,
|
||||||
|
path: P,
|
||||||
|
args: &super::Build,
|
||||||
|
) -> anyhow::Result<()> {
|
||||||
|
let packages = self.build(args)?;
|
||||||
|
|
||||||
|
let path = path.as_ref();
|
||||||
|
std::fs::create_dir_all(&path)?;
|
||||||
|
|
||||||
|
let mut mod_rs = TokenStream::new();
|
||||||
|
|
||||||
|
for (name, tokens) in packages {
|
||||||
|
let name = format_ident!("{name}");
|
||||||
|
if args.single_file {
|
||||||
|
mod_rs.extend(quote! {
|
||||||
|
pub mod #name {
|
||||||
|
#tokens
|
||||||
|
}
|
||||||
|
});
|
||||||
|
} else {
|
||||||
|
let path = path.join(format!("{name}.rs"));
|
||||||
|
|
||||||
|
log::info!("parsing {name}..");
|
||||||
|
let file = syn::parse_file(&tokens.to_string())
|
||||||
|
.context("syn failed to parse generated code")?;
|
||||||
|
|
||||||
|
log::info!("pretty printing {name}..");
|
||||||
|
let contents = prettyplease::unparse(&file);
|
||||||
|
log::info!("writing to {}..", path.display());
|
||||||
|
std::fs::write(path, contents)?;
|
||||||
|
|
||||||
|
mod_rs.extend(quote! {
|
||||||
|
pub mod #name;
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
let contents = prettyplease::unparse(
|
||||||
|
&syn::parse_file(&mod_rs.to_string()).context("syn failed to parse root module")?,
|
||||||
|
);
|
||||||
|
std::fs::write(path.join("mod.rs"), contents)?;
|
||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
fn type_name(&self, ty: &Type) -> anyhow::Result<String> {
|
|
||||||
let type_name = match ty {
|
|
||||||
Type::Ptr(inner) | Type::Ref(inner) => {
|
|
||||||
format!(
|
|
||||||
"::core::option::Option<NonNull<{}>>",
|
|
||||||
self.type_name(&inner)?
|
|
||||||
)
|
|
||||||
}
|
|
||||||
Type::WeakPtr(inner) => {
|
|
||||||
format!(
|
|
||||||
"::crate::engine::TWeakObjectPtr<{}>",
|
|
||||||
self.get_type_path(inner)
|
|
||||||
.context("type name was not cached.")?
|
|
||||||
)
|
|
||||||
}
|
|
||||||
Type::SoftPtr(inner) => {
|
|
||||||
format!(
|
|
||||||
"::crate::engine::TSoftObjectPtr<{}>",
|
|
||||||
self.get_type_path(inner)
|
|
||||||
.context("type name was not cached.")?
|
|
||||||
)
|
|
||||||
}
|
|
||||||
Type::LazyPtr(inner) => {
|
|
||||||
format!(
|
|
||||||
"::crate::engine::TLazyObjectPtr<{}>",
|
|
||||||
self.get_type_path(inner)
|
|
||||||
.context("type name was not cached.")?
|
|
||||||
)
|
|
||||||
}
|
|
||||||
Type::AssetPtr(inner) => format!(
|
|
||||||
"::crate::engine::TAssetPtr<{}>",
|
|
||||||
self.get_type_path(inner)
|
|
||||||
.context("type name was not cached.")?
|
|
||||||
),
|
|
||||||
Type::Array(inner) => {
|
|
||||||
format!("::crate::engine::TArray<{}>", self.type_name(&inner)?)
|
|
||||||
}
|
|
||||||
Type::Primitive(prim) => {
|
|
||||||
format!("{prim}")
|
|
||||||
}
|
|
||||||
Type::RawArray { ty, len } => {
|
|
||||||
format!("[{}; {}]", self.type_name(&ty)?, len)
|
|
||||||
}
|
|
||||||
Type::Name => "::crate::engine::FName".to_string(),
|
|
||||||
Type::String => "::crate::engine::FString".to_string(),
|
|
||||||
Type::Text => "::crate::engine::FText".to_string(),
|
|
||||||
Type::Enum { enum_type, .. } => self
|
|
||||||
.get_type_path(enum_type)
|
|
||||||
.context("type name was not cached.")?,
|
|
||||||
Type::Class(class) => {
|
|
||||||
format!(
|
|
||||||
"::core::option::Option<{}>",
|
|
||||||
self.get_type_path(class)
|
|
||||||
.context("type name was not cached.")?
|
|
||||||
)
|
|
||||||
}
|
|
||||||
Type::Struct(class) => self
|
|
||||||
.get_type_path(class)
|
|
||||||
.context("type name was not cached.")?
|
|
||||||
.clone(),
|
|
||||||
};
|
|
||||||
|
|
||||||
Ok(type_name)
|
|
||||||
}
|
|
||||||
|
|
||||||
fn generate_enum(&self, enum0: &Enum) -> anyhow::Result<TokenStream> {
|
fn generate_enum(&self, enum0: &Enum) -> anyhow::Result<TokenStream> {
|
||||||
let name = self
|
let name = self
|
||||||
.get_type_name(&enum0.obj_ref)
|
.get_type_ident(&enum0.obj_ref)
|
||||||
.context("enum name was not previously canonicalized and cached.")?;
|
.context("enum name was not previously canonicalized and cached.")?;
|
||||||
|
|
||||||
let variants = enum0.values.iter().map(|(&value, name)| {
|
let variants = enum0.values.iter().map(|(&value, name)| {
|
||||||
let name = canonicalize_name(&name);
|
let name = canonicalize_ident(&name);
|
||||||
quote! {
|
quote! {
|
||||||
#name = #value,
|
#name = #value,
|
||||||
}
|
}
|
||||||
|
@ -303,7 +502,7 @@ pub mod rust {
|
||||||
fn generate_object(
|
fn generate_object(
|
||||||
&self,
|
&self,
|
||||||
_class: &Class,
|
_class: &Class,
|
||||||
name: &str,
|
name: &Ident,
|
||||||
) -> anyhow::Result<(TokenStream, TokenStream)> {
|
) -> anyhow::Result<(TokenStream, TokenStream)> {
|
||||||
let typedef = quote! {
|
let typedef = quote! {
|
||||||
#[derive(Eq, PartialEq, Copy, Clone)]
|
#[derive(Eq, PartialEq, Copy, Clone)]
|
||||||
|
@ -313,17 +512,17 @@ pub mod rust {
|
||||||
let static_class_impl: TokenStream = Self::generate_find_object(name);
|
let static_class_impl: TokenStream = Self::generate_find_object(name);
|
||||||
|
|
||||||
let impls = quote! {
|
let impls = quote! {
|
||||||
impl AsUObject for #name {
|
impl crate::engine::AsUObject for #name {
|
||||||
fn as_uobject(&self) -> UObject {
|
fn as_uobject(&self) -> crate::engine::UObject {
|
||||||
UObject(self.0)
|
crate::engine::UObject(self.0)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn from_uobject(obj: &UObject) -> Self {
|
fn from_uobject(obj: &crate::engine::UObject) -> Self {
|
||||||
Self(obj.0)
|
Self(obj.0)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl AsPtr for #name {
|
impl crate::engine::AsPtr for #name {
|
||||||
fn as_ptr(&self) -> *const u8 {
|
fn as_ptr(&self) -> *const u8 {
|
||||||
unsafe { self.0.as_ref().get() as _ }
|
unsafe { self.0.as_ref().get() as _ }
|
||||||
}
|
}
|
||||||
|
@ -333,9 +532,10 @@ pub mod rust {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl StaticClass for #name {
|
impl crate::engine::StaticClass for #name {
|
||||||
fn get_static_class() -> Option<UClass> {
|
fn get_static_class() -> ::core::option::Option<crate::engine::UClass> {
|
||||||
let class: Option<UClass> = #static_class_impl;
|
let class: ::core::option::Option<crate::engine::UClass> =
|
||||||
|
#static_class_impl;
|
||||||
class
|
class
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -350,12 +550,13 @@ pub mod rust {
|
||||||
fn generate_struct(
|
fn generate_struct(
|
||||||
&self,
|
&self,
|
||||||
class: &Class,
|
class: &Class,
|
||||||
name: &str,
|
name: &Ident,
|
||||||
ctor: Option<TokenStream>,
|
ctor: Option<TokenStream>,
|
||||||
) -> anyhow::Result<(TokenStream, TokenStream)> {
|
) -> anyhow::Result<(TokenStream, TokenStream)> {
|
||||||
let size = class.size;
|
let size = class.size;
|
||||||
|
|
||||||
let typedef = quote! {
|
let typedef = quote! {
|
||||||
pub struct #name(pub ::core::cell::UnsafeCell<u8; #size>);
|
pub struct #name(pub ::core::cell::UnsafeCell<[u8; #size]>);
|
||||||
};
|
};
|
||||||
|
|
||||||
let impls = quote! {
|
let impls = quote! {
|
||||||
|
@ -372,7 +573,7 @@ pub mod rust {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl AsPtr for #name {
|
impl crate::engine::AsPtr for #name {
|
||||||
fn as_ptr(&self) -> *const u8 {
|
fn as_ptr(&self) -> *const u8 {
|
||||||
self.0.get().cast()
|
self.0.get().cast()
|
||||||
}
|
}
|
||||||
|
@ -403,7 +604,7 @@ pub mod rust {
|
||||||
fn generate_struct_methods(
|
fn generate_struct_methods(
|
||||||
&self,
|
&self,
|
||||||
class: &Class,
|
class: &Class,
|
||||||
name: &str,
|
name: &Ident,
|
||||||
) -> anyhow::Result<(Vec<TokenStream>, Vec<TokenStream>)> {
|
) -> anyhow::Result<(Vec<TokenStream>, Vec<TokenStream>)> {
|
||||||
let methods = class
|
let methods = class
|
||||||
.methods
|
.methods
|
||||||
|
@ -421,27 +622,30 @@ pub mod rust {
|
||||||
/// - the method wrapper.
|
/// - the method wrapper.
|
||||||
fn generate_method(
|
fn generate_method(
|
||||||
&self,
|
&self,
|
||||||
struct_name: &str,
|
struct_name: &Ident,
|
||||||
method: &ClassMethod,
|
method: &ClassMethod,
|
||||||
) -> anyhow::Result<(TokenStream, TokenStream)> {
|
) -> anyhow::Result<(TokenStream, TokenStream)> {
|
||||||
let method_name = canonicalize_name(&method.unique_name());
|
let method_name = canonicalize_ident(&method.unique_name());
|
||||||
|
|
||||||
|
// all parameters collected as (parameter, canonicalized_name, type_ident)
|
||||||
let parameters = method
|
let parameters = method
|
||||||
.parameters
|
.parameters
|
||||||
.iter()
|
.iter()
|
||||||
.map(|parameter| {
|
.map(|parameter| {
|
||||||
let name = canonicalize_name(¶meter.unique_name());
|
let name = canonicalize_ident(¶meter.unique_name());
|
||||||
let type_name = self.type_name(¶meter.ty)?;
|
let type_name = Type2::new(self, parameter.ty.clone());
|
||||||
|
|
||||||
anyhow::Ok((parameter, name, type_name))
|
anyhow::Ok((parameter, name, type_name))
|
||||||
})
|
})
|
||||||
.collect::<Result<Vec<_>, _>>()?;
|
.collect::<Result<Vec<_>, _>>()?;
|
||||||
|
|
||||||
|
// all parameters converted into "arg: Type" format of tokens.
|
||||||
let all_params = parameters
|
let all_params = parameters
|
||||||
.iter()
|
.iter()
|
||||||
.map(|(param, name, ty)| (param, quote! {#name: #ty}))
|
.map(|(param, name, ty)| (param, quote! {#name: #ty}))
|
||||||
.collect::<Vec<_>>();
|
.collect::<Vec<_>>();
|
||||||
|
|
||||||
|
// params that the function will accept as arguments.
|
||||||
let params = all_params
|
let params = all_params
|
||||||
.iter()
|
.iter()
|
||||||
.filter(|(param, _)| {
|
.filter(|(param, _)| {
|
||||||
|
@ -449,13 +653,16 @@ pub mod rust {
|
||||||
})
|
})
|
||||||
.map(|(_, tokens)| tokens.clone());
|
.map(|(_, tokens)| tokens.clone());
|
||||||
|
|
||||||
|
// tokens of all params, for the Params struct definition.
|
||||||
let all_params = all_params.iter().map(|(_, tokens)| tokens.clone());
|
let all_params = all_params.iter().map(|(_, tokens)| tokens.clone());
|
||||||
|
|
||||||
|
// param token streams for setting the fields of the params struct
|
||||||
|
// with the arguments of the function.
|
||||||
let init_params = parameters.iter().map(|(_, name, _)| {
|
let init_params = parameters.iter().map(|(_, name, _)| {
|
||||||
quote! {params.#name = #name;}
|
quote! {params.#name = #name;}
|
||||||
});
|
});
|
||||||
|
|
||||||
let (return_type, handle_return) = {
|
let (return_type, return_expression) = {
|
||||||
let (names, types) = parameters
|
let (names, types) = parameters
|
||||||
.iter()
|
.iter()
|
||||||
.filter(|(param, _, _)| {
|
.filter(|(param, _, _)| {
|
||||||
|
@ -506,7 +713,7 @@ pub mod rust {
|
||||||
process_event(self.as_uobject(), func, &mut params);
|
process_event(self.as_uobject(), func, &mut params);
|
||||||
func.set_function_flags(flags);
|
func.set_function_flags(flags);
|
||||||
|
|
||||||
#handle_return
|
#return_expression
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
|
@ -517,8 +724,8 @@ pub mod rust {
|
||||||
fn generate_field_accessors(
|
fn generate_field_accessors(
|
||||||
&self,
|
&self,
|
||||||
field: &ClassField,
|
field: &ClassField,
|
||||||
field_name: &Cow<str>,
|
field_name: &Ident,
|
||||||
type_name: &String,
|
type_name: &Type2,
|
||||||
) -> TokenStream {
|
) -> TokenStream {
|
||||||
let setter = format_ident!("set_{}", field_name);
|
let setter = format_ident!("set_{}", field_name);
|
||||||
let getter = format_ident!("get_{}", field_name);
|
let getter = format_ident!("get_{}", field_name);
|
||||||
|
@ -590,19 +797,21 @@ pub mod rust {
|
||||||
fn generate_struct_ctor(
|
fn generate_struct_ctor(
|
||||||
&self,
|
&self,
|
||||||
_class: &Class,
|
_class: &Class,
|
||||||
type_name: &str,
|
type_ident: &Ident,
|
||||||
fields: &Vec<(&ClassField, Cow<str>, String)>,
|
fields: &Vec<(&ClassField, Ident, Type2)>,
|
||||||
) -> TokenStream {
|
) -> TokenStream {
|
||||||
let fields_defs = fields.iter().map(|(_, name, ty)| quote! {#name: #ty});
|
let fields_defs = fields.iter().map(|(_, name, ty)| quote! {#name: #ty});
|
||||||
|
|
||||||
let this_field_asignments = fields.iter().map(|(_, name, _ty)| {
|
let this_field_asignments = fields.iter().map(|(_, name, _ty)| {
|
||||||
let setter = format_ident!("set_{}", name);
|
let setter = format_ident!("set_{}", name);
|
||||||
let field_trait = format_ident!("{type_name}Fields");
|
let field_trait = format_ident!("{type_ident}Fields");
|
||||||
|
|
||||||
quote! {<Self as #field_trait>::#setter(this, #name);}
|
quote! {<Self as #field_trait>::#setter(this, #name);}
|
||||||
});
|
});
|
||||||
|
|
||||||
// FIXME: handle super struct fields aswell, ARK doesnt seem to have those anyways.
|
// FIXME: handle super struct fields aswell, ARK doesnt seem to have those anyways.
|
||||||
|
// export getting fields into a seperate function, this function will be fallible then.
|
||||||
|
// it is a lot of work for nothing currently.
|
||||||
|
|
||||||
quote! {
|
quote! {
|
||||||
pub fn new(#(#fields_defs),*) -> Self {
|
pub fn new(#(#fields_defs),*) -> Self {
|
||||||
|
@ -620,14 +829,14 @@ pub mod rust {
|
||||||
fn generate_struct_fields(
|
fn generate_struct_fields(
|
||||||
&self,
|
&self,
|
||||||
class: &Class,
|
class: &Class,
|
||||||
name: &str,
|
name: &Ident,
|
||||||
) -> anyhow::Result<(TokenStream, Option<TokenStream>)> {
|
) -> anyhow::Result<(TokenStream, Option<TokenStream>)> {
|
||||||
let fields = class
|
let fields = class
|
||||||
.fields
|
.fields
|
||||||
.iter()
|
.iter()
|
||||||
.map(|field| {
|
.map(|field| {
|
||||||
let name = canonicalize_name(&field.unique_name());
|
let name = canonicalize_ident(&field.unique_name());
|
||||||
let ty = self.type_name(&field.ty)?;
|
let ty = Type2::new(self, field.ty.clone());
|
||||||
|
|
||||||
anyhow::Ok((field, name, ty))
|
anyhow::Ok((field, name, ty))
|
||||||
})
|
})
|
||||||
|
@ -656,9 +865,11 @@ pub mod rust {
|
||||||
Ok((fields_trait, ctor))
|
Ok((fields_trait, ctor))
|
||||||
}
|
}
|
||||||
|
|
||||||
fn generate_find_object(name: &str) -> TokenStream {
|
fn generate_find_object<S: ToString>(name: S) -> TokenStream {
|
||||||
|
let name = name.to_string();
|
||||||
let not_found = format!("static object \"{name}\" not found!");
|
let not_found = format!("static object \"{name}\" not found!");
|
||||||
quote! {
|
quote! {
|
||||||
|
{
|
||||||
static OBJECT: ::once_cell::sync::OnceCell<::core::option::Option<UObject>> = ::once_cell::sync::OnceCell::new();
|
static OBJECT: ::once_cell::sync::OnceCell<::core::option::Option<UObject>> = ::once_cell::sync::OnceCell::new();
|
||||||
OBJECT.get_or_init(|| {
|
OBJECT.get_or_init(|| {
|
||||||
match find_object(::obfstr::obfstr!(#name)) {
|
match find_object(::obfstr::obfstr!(#name)) {
|
||||||
|
@ -669,12 +880,30 @@ pub mod rust {
|
||||||
}
|
}
|
||||||
})
|
})
|
||||||
.map(|object| unsafe {object.cast()})
|
.map(|object| unsafe {object.cast()})
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn iter_super_types(&self, class: &Class) -> impl Iterator<Item = &UnrealType> {
|
||||||
|
let super_traits = core::iter::from_fn({
|
||||||
|
let mut sup = class.super_class;
|
||||||
|
move || {
|
||||||
|
if let Some(key) = sup {
|
||||||
|
let next = self.sdk.get_object(&key);
|
||||||
|
sup = next.and_then(|next| next.super_class());
|
||||||
|
next
|
||||||
|
} else {
|
||||||
|
None
|
||||||
|
}
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
super_traits
|
||||||
|
}
|
||||||
|
|
||||||
fn generate_class(&self, class: &Class) -> anyhow::Result<TokenStream> {
|
fn generate_class(&self, class: &Class) -> anyhow::Result<TokenStream> {
|
||||||
let name = &self
|
let name = &self
|
||||||
.get_type_name(&class.obj_ref)
|
.get_type_ident(&class.obj_ref)
|
||||||
.context("enum name was not previously canonicalized and cached.")?;
|
.context("enum name was not previously canonicalized and cached.")?;
|
||||||
|
|
||||||
let (field_trait, ctor) = self.generate_struct_fields(class, name)?;
|
let (field_trait, ctor) = self.generate_struct_fields(class, name)?;
|
||||||
|
@ -698,29 +927,26 @@ pub mod rust {
|
||||||
impl #method_trait for #name {}
|
impl #method_trait for #name {}
|
||||||
};
|
};
|
||||||
|
|
||||||
let mut sup = class.super_class;
|
let super_traits = self
|
||||||
let super_traits = core::iter::from_fn(|| {
|
.iter_super_types(class)
|
||||||
if let Some(key) = sup {
|
// SAFETY: we already got this type by its obj_ref, so it must be there.
|
||||||
let next = self.sdk.get_object(&key);
|
.map(|ty| {
|
||||||
sup = next.and_then(|next| next.super_class());
|
(
|
||||||
next
|
self.get_type_package_path(&ty.obj_ref()).unwrap(),
|
||||||
} else {
|
self.get_type_ident(&ty.obj_ref()).unwrap(),
|
||||||
None
|
)
|
||||||
}
|
})
|
||||||
})
|
.map(|(super_path, super_name)| {
|
||||||
// SAFETY: we already got this type by its obj_ref, so it must be there.
|
let fields = format_ident!("{super_name}Fields");
|
||||||
.map(|ty| self.get_type_path(&ty.obj_ref()).unwrap())
|
let methods = format_ident!("{super_name}Methods");
|
||||||
.map(|super_name| {
|
|
||||||
let fields = format_ident!("{super_name}Fields");
|
|
||||||
let methods = format_ident!("{super_name}Methods");
|
|
||||||
|
|
||||||
quote! {
|
quote! {
|
||||||
impl #fields for #name {}
|
impl #super_path::#fields for #name {}
|
||||||
impl #methods for #name {}
|
impl #super_path::#methods for #name {}
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
quote! {
|
let tokens = quote! {
|
||||||
#[repr(transparent)]
|
#[repr(transparent)]
|
||||||
#[derive(Debug)]
|
#[derive(Debug)]
|
||||||
#typedef
|
#typedef
|
||||||
|
@ -736,11 +962,22 @@ pub mod rust {
|
||||||
|
|
||||||
#field_trait
|
#field_trait
|
||||||
};
|
};
|
||||||
todo!()
|
|
||||||
|
Ok(tokens)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn generate_package(&self, pkg: &ProcessedPackage) -> anyhow::Result<()> {
|
fn generate_package(
|
||||||
let pkg_name = canonicalize_name(&pkg.name);
|
&self,
|
||||||
|
pkg: &ProcessedPackage,
|
||||||
|
feature_gate: bool,
|
||||||
|
) -> anyhow::Result<TokenStream> {
|
||||||
|
let pkg_name = canonicalize_ident(&pkg.name);
|
||||||
|
log::info!(
|
||||||
|
"generating package \"{pkg_name}\" with {} types..",
|
||||||
|
pkg.types.len()
|
||||||
|
);
|
||||||
|
|
||||||
|
let mut pkg_tokens = TokenStream::new();
|
||||||
|
|
||||||
for (_id, ty) in &pkg.types {
|
for (_id, ty) in &pkg.types {
|
||||||
let tokens = match ty {
|
let tokens = match ty {
|
||||||
|
@ -750,18 +987,35 @@ pub mod rust {
|
||||||
UnrealType::Enum(enum0) => self.generate_enum(enum0)?,
|
UnrealType::Enum(enum0) => self.generate_enum(enum0)?,
|
||||||
};
|
};
|
||||||
|
|
||||||
println!("{tokens}");
|
pkg_tokens.extend(tokens);
|
||||||
}
|
}
|
||||||
|
|
||||||
quote! {
|
let deps = pkg
|
||||||
#[cfg(feature = "#pkg_name")]
|
.dependencies
|
||||||
pub mod #pkg_name {
|
.iter()
|
||||||
#![allow(dead_code, unused_imports, non_snake_case, non_camel_case_types)]
|
.filter_map(|id| self.sdk.packages.get(id))
|
||||||
|
.map(|package| format!("`{}`", package.name))
|
||||||
|
.join(",");
|
||||||
|
|
||||||
}
|
let doc_msg = format!("Package `{pkg_name}` depends on the features {deps}.");
|
||||||
|
|
||||||
|
let feature_gate = if feature_gate {
|
||||||
|
Some(quote! {
|
||||||
|
#![doc = #doc_msg]
|
||||||
|
#![cfg(feature = "#pkg_name")]
|
||||||
|
})
|
||||||
|
} else {
|
||||||
|
None
|
||||||
};
|
};
|
||||||
|
|
||||||
todo!()
|
Ok(quote! {
|
||||||
|
pub mod #pkg_name {
|
||||||
|
#feature_gate
|
||||||
|
#![allow(dead_code, unused_imports, non_snake_case, non_camel_case_types)]
|
||||||
|
|
||||||
|
#pkg_tokens
|
||||||
|
}
|
||||||
|
})
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -66,6 +66,13 @@ impl Sdk {
|
||||||
.and_then(|text| std::fs::write(path, text).context("failed to write to file."))
|
.and_then(|text| std::fs::write(path, text).context("failed to write to file."))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub fn from_path_ron<P: AsRef<Path>>(path: P) -> anyhow::Result<Self> {
|
||||||
|
let contents = std::fs::read(path)?;
|
||||||
|
let new = ron::de::from_bytes::<Self>(&contents)?;
|
||||||
|
|
||||||
|
Ok(new)
|
||||||
|
}
|
||||||
|
|
||||||
pub fn get_object(&self, key: &ObjectRef) -> Option<&UnrealType> {
|
pub fn get_object(&self, key: &ObjectRef) -> Option<&UnrealType> {
|
||||||
self.packages
|
self.packages
|
||||||
.get(&key.package)
|
.get(&key.package)
|
||||||
|
@ -478,6 +485,7 @@ fn resolve_type(prop: UProperty) -> anyhow::Result<Type> {
|
||||||
.object_ref(),
|
.object_ref(),
|
||||||
),
|
),
|
||||||
any_type::AnyAssetObjectProperty::Object(_) => {
|
any_type::AnyAssetObjectProperty::Object(_) => {
|
||||||
|
log::warn!("skipping AssetObject property.");
|
||||||
return Err(anyhow::anyhow!(
|
return Err(anyhow::anyhow!(
|
||||||
"unhandled asset object property (NOT AN ERROR)"
|
"unhandled asset object property (NOT AN ERROR)"
|
||||||
));
|
));
|
||||||
|
@ -498,10 +506,13 @@ fn resolve_type(prop: UProperty) -> anyhow::Result<Type> {
|
||||||
AnyProperty::Str(_) => Type::String,
|
AnyProperty::Str(_) => Type::String,
|
||||||
AnyProperty::Text(_) => Type::Text,
|
AnyProperty::Text(_) => Type::Text,
|
||||||
AnyProperty::Name(_) => Type::Name,
|
AnyProperty::Name(_) => Type::Name,
|
||||||
|
// TODO: handle delegates, idk if they are ever useful though.
|
||||||
AnyProperty::Delegate(_) => {
|
AnyProperty::Delegate(_) => {
|
||||||
|
log::warn!("skipping delegate property.");
|
||||||
return Err(anyhow::anyhow!("skipping delegates for now"));
|
return Err(anyhow::anyhow!("skipping delegates for now"));
|
||||||
}
|
}
|
||||||
AnyProperty::MulticastDelegate(_) => {
|
AnyProperty::MulticastDelegate(_) => {
|
||||||
|
log::warn!("skipping delegate property.");
|
||||||
return Err(anyhow::anyhow!("skipping multicast delegates for now"));
|
return Err(anyhow::anyhow!("skipping multicast delegates for now"));
|
||||||
}
|
}
|
||||||
AnyProperty::Enum(enm) => Type::Enum {
|
AnyProperty::Enum(enm) => Type::Enum {
|
||||||
|
@ -523,6 +534,7 @@ fn resolve_type(prop: UProperty) -> anyhow::Result<Type> {
|
||||||
.object_ref(),
|
.object_ref(),
|
||||||
),
|
),
|
||||||
AnyProperty::Other(_) => {
|
AnyProperty::Other(_) => {
|
||||||
|
log::warn!("skipping unknown property type.");
|
||||||
return Err(anyhow::anyhow!("unhandled property."));
|
return Err(anyhow::anyhow!("unhandled property."));
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
Loading…
Reference in a new issue