[go: nahoru, domu]

Skip to content

Commit

Permalink
fix: upstream sync conflicts
Browse files Browse the repository at this point in the history
  • Loading branch information
refcell committed Feb 26, 2023
2 parents 500df63 + 123917d commit ad791a5
Show file tree
Hide file tree
Showing 28 changed files with 2,178 additions and 358 deletions.
235 changes: 33 additions & 202 deletions Cargo.lock

Large diffs are not rendered by default.

1 change: 1 addition & 0 deletions Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,7 @@ members = [
"huff_utils",
"huff_cli",
"huff_parser",
"huff_js",
"huff_tests"
]
exclude = [ "assets", "huffup", "huff-examples" ]
Expand Down
1 change: 1 addition & 0 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -80,6 +80,7 @@ _Note: Compilation benchmarks were performed on [huff-examples erc20](https://gi
## Modules
* [huff_core](./huff_core): The core module to huff-rs. Resolves source file paths, executes compilation, and exports artifacts.
* [huff_cli](./huff_cli): The command line interface for the Huff compiler.
* [huff_js](./huff_js): A wasm compatible interface to the Huff compiler for JavaScript bindings.
* [huff_lexer](./huff_lexer): Takes in the source of a `.huff` file and generates a vector of `Token`s.
* [huff_parser](./huff_parser): Crafts a `Contract` AST from the vector of `Token`s generated by [huff_lexer](./huff_lexer).
* [huff_codegen](./huff_codegen): EVM Bytecode generation module that accepts an AST generated by [huff_parser](./huff_parser).
Expand Down
2 changes: 1 addition & 1 deletion huff_cli/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -27,4 +27,4 @@ isatty = "0.1.9"
[[bin]]
name = "huffc"
path = "src/huffc.rs"
doc = false
doc = false
10 changes: 7 additions & 3 deletions huff_cli/src/huffc.rs
Original file line number Diff line number Diff line change
Expand Up @@ -15,9 +15,12 @@ use huff_tests::{
prelude::{print_test_report, ReportKind},
HuffTester,
};
use huff_utils::prelude::{
export_interfaces, gen_sol_interfaces, str_to_bytes32, unpack_files, AstSpan, CodegenError,
CodegenErrorKind, CompilerError, FileSource, Literal, OutputLocation, Span,
use huff_utils::{
file_provider::FileSystemFileProvider,
prelude::{
export_interfaces, gen_sol_interfaces, str_to_bytes32, unpack_files, AstSpan, CodegenError,
CodegenErrorKind, CompilerError, FileSource, Literal, OutputLocation, Span,
},
};
use isatty::stdout_isatty;
use spinners::{Spinner, Spinners};
Expand Down Expand Up @@ -198,6 +201,7 @@ fn main() {
optimize: cli.optimize,
bytecode: cli.bytecode,
cached: use_cache,
file_provider: Arc::new(FileSystemFileProvider {}),
};

if let Some(TestCommands::Test { format, match_ }) = cli.test {
Expand Down
3 changes: 1 addition & 2 deletions huff_codegen/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -13,10 +13,9 @@ keywords = ["huff", "rust", "ethereum", "bytecode", "compiler"]

[dependencies]
serde_json = "1.0.81"
proptest = "1.0.0"
huff_utils = { path = "../huff_utils" }
ethers-core = "1.0.2"
hex = "0.4.3"
tracing = "0.1.34"
uuid = { version = "1.1.1", features = ["v4"] }
regex = "1.6.0"
regex = "1.6.0"
9 changes: 7 additions & 2 deletions huff_core/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -13,23 +13,28 @@ keywords = ["huff", "rust", "evm", "bytecode", "compiler"]

[dependencies]
serde_json = "1.0.81"
proptest = "1.0.0"
ethers-core = "1.0.2"
tracing = "0.1.34"
huff_codegen = { path = "../huff_codegen" }
huff_lexer = { path = "../huff_lexer" }
huff_utils = { path = "../huff_utils" }
huff_parser = { path = "../huff_parser" }
rayon = { version = "1.5.3" }
tracing-subscriber = { version = "0.3", default-features = false, features = ["env-filter", "fmt"] }
uuid = { version = "1.1.1", features = ["v4"] }
tracing-test = "0.2.2"
walkdir = "2"
cfg-if = "1"

[dev-dependencies]
criterion = "0.3.5"
rand = "0.8.5"

[target.'cfg(not(all(target_arch = "wasm32", target_os = "unknown")))'.dependencies]
rayon = { version = "1.5.3" }

[target.'cfg(all(target_arch = "wasm32", target_os = "unknown"))'.dependencies]
getrandom = { version = "0.2", features = ["js"] }

[[bench]]
name = "huff_benchmark"
harness = false
Expand Down
53 changes: 36 additions & 17 deletions huff_core/benches/huff_benchmark.rs
Original file line number Diff line number Diff line change
Expand Up @@ -3,21 +3,24 @@ use huff_codegen::*;
use huff_core::Compiler;
use huff_lexer::*;
use huff_parser::*;
use huff_utils::{files, prelude::*};
use huff_utils::{file_provider::FileSystemFileProvider, files, prelude::*};
use std::{path::PathBuf, sync::Arc};

fn lex_erc20_from_source_benchmark(c: &mut Criterion) {
let file_sources: Vec<Arc<FileSource>> = Compiler::fetch_sources(vec![PathBuf::from(
"../huff-examples/erc20/contracts/ERC20.huff".to_string(),
)])
let file_provider = Arc::new(FileSystemFileProvider::new());
let file_sources: Vec<Arc<FileSource>> = Compiler::fetch_sources(
vec![PathBuf::from("../huff-examples/erc20/contracts/ERC20.huff".to_string())],
file_provider.clone(),
)
.into_iter()
.map(|p| p.unwrap())
.collect();

// Recurse file deps + generate flattened source
let file_source = file_sources.get(0).unwrap();
let recursed_file_source =
Compiler::recurse_deps(Arc::clone(file_source), &files::Remapper::new("./")).unwrap();
Compiler::recurse_deps(Arc::clone(file_source), &files::Remapper::new("./"), file_provider)
.unwrap();
let flattened = FileSource::fully_flatten(Arc::clone(&recursed_file_source));
let full_source = FullFileSource {
source: &flattened.0,
Expand All @@ -35,17 +38,20 @@ fn lex_erc20_from_source_benchmark(c: &mut Criterion) {
}

fn parse_erc20_benchmark(c: &mut Criterion) {
let file_sources: Vec<Arc<FileSource>> = Compiler::fetch_sources(vec![PathBuf::from(
"../huff-examples/erc20/contracts/ERC20.huff".to_string(),
)])
let file_provider = Arc::new(FileSystemFileProvider::new());
let file_sources: Vec<Arc<FileSource>> = Compiler::fetch_sources(
vec![PathBuf::from("../huff-examples/erc20/contracts/ERC20.huff".to_string())],
file_provider.clone(),
)
.into_iter()
.map(|p| p.unwrap())
.collect();

// Recurse file deps + generate flattened source
let file_source = file_sources.get(0).unwrap();
let recursed_file_source =
Compiler::recurse_deps(Arc::clone(file_source), &files::Remapper::new("./")).unwrap();
Compiler::recurse_deps(Arc::clone(file_source), &files::Remapper::new("./"), file_provider)
.unwrap();
let flattened = FileSource::fully_flatten(Arc::clone(&recursed_file_source));
let full_source = FullFileSource {
source: &flattened.0,
Expand All @@ -68,17 +74,20 @@ fn parse_erc20_benchmark(c: &mut Criterion) {
}

fn codegen_erc20_benchmark(c: &mut Criterion) {
let file_sources: Vec<Arc<FileSource>> = Compiler::fetch_sources(vec![PathBuf::from(
"../huff-examples/erc20/contracts/ERC20.huff".to_string(),
)])
let file_provider = Arc::new(FileSystemFileProvider::new());
let file_sources: Vec<Arc<FileSource>> = Compiler::fetch_sources(
vec![PathBuf::from("../huff-examples/erc20/contracts/ERC20.huff".to_string())],
file_provider.clone(),
)
.into_iter()
.map(|p| p.unwrap())
.collect();

// Recurse file deps + generate flattened source
let file_source = file_sources.get(0).unwrap();
let recursed_file_source =
Compiler::recurse_deps(Arc::clone(file_source), &files::Remapper::new("./")).unwrap();
Compiler::recurse_deps(Arc::clone(file_source), &files::Remapper::new("./"), file_provider)
.unwrap();
let flattened = FileSource::fully_flatten(Arc::clone(&recursed_file_source));
let full_source = FullFileSource {
source: &flattened.0,
Expand Down Expand Up @@ -113,16 +122,21 @@ fn codegen_erc20_benchmark(c: &mut Criterion) {

fn erc20_compilation_benchmark(c: &mut Criterion) {
c.bench_function("Full ERC-20 compilation", |b| b.iter(|| {
let file_provider = Arc::new(FileSystemFileProvider::new());
let file_sources: Vec<Arc<FileSource>> = Compiler::fetch_sources(vec![PathBuf::from(
"../huff-examples/erc20/contracts/ERC20.huff".to_string(),
)])
)], file_provider.clone())
.into_iter()
.map(|p| p.unwrap())
.collect();

// Recurse file deps + generate flattened source
let file_source = file_sources.get(0).unwrap();
let recursed_file_source = Compiler::recurse_deps(Arc::clone(file_source), &files::Remapper::new("./")).unwrap();
let recursed_file_source = Compiler::recurse_deps(
Arc::clone(file_source),
&files::Remapper::new("./"),
file_provider
).unwrap();
let flattened = FileSource::fully_flatten(Arc::clone(&recursed_file_source));
let full_source = FullFileSource {
source: &flattened.0,
Expand Down Expand Up @@ -153,16 +167,21 @@ fn erc20_compilation_benchmark(c: &mut Criterion) {

fn erc721_compilation_benchmark(c: &mut Criterion) {
c.bench_function("Full ERC-721 compilation", |b| b.iter(|| {
let file_provider = Arc::new(FileSystemFileProvider::new());
let file_sources: Vec<Arc<FileSource>> = Compiler::fetch_sources(vec![PathBuf::from(
"../huff-examples/erc721/contracts/ERC721.huff".to_string(),
)])
)], file_provider.clone())
.into_iter()
.map(|p| p.unwrap())
.collect();

// Recurse file deps + generate flattened source
let file_source = file_sources.get(0).unwrap();
let recursed_file_source = Compiler::recurse_deps(Arc::clone(file_source), &files::Remapper::new("./")).unwrap();
let recursed_file_source = Compiler::recurse_deps(
Arc::clone(file_source),
&files::Remapper::new("./"),
file_provider
).unwrap();
let flattened = FileSource::fully_flatten(Arc::clone(&recursed_file_source));
let full_source = FullFileSource {
source: &flattened.0,
Expand Down
Loading

0 comments on commit ad791a5

Please sign in to comment.