mirror of
https://github.com/fluencelabs/wasm-bindgen
synced 2025-06-16 06:21:22 +00:00
Migrate all crates to the 2018 edition
Most of the CLI crates were already in the 2018 edition, and it turns out that one of the macro crates was already in the 2018 edition so we may as well move everything to the 2018 edition! Always nice to remove those `extern crate` statements nowadays! This commit also does a `cargo fmt --all` to make sure we're conforming with style again.
This commit is contained in:
@ -1,8 +1,8 @@
|
||||
use crate::Diagnostic;
|
||||
use proc_macro2::{Ident, Span};
|
||||
use shared;
|
||||
use syn;
|
||||
use Diagnostic;
|
||||
use std::hash::{Hash, Hasher};
|
||||
use syn;
|
||||
use wasm_bindgen_shared as shared;
|
||||
|
||||
/// An abstract syntax tree representing a rust program. Contains
|
||||
/// extra information for joining up this rust code with javascript.
|
||||
|
@ -1,16 +1,14 @@
|
||||
use crate::ast;
|
||||
use crate::encode;
|
||||
use crate::util::ShortHash;
|
||||
use crate::Diagnostic;
|
||||
use proc_macro2::{Ident, Literal, Span, TokenStream};
|
||||
use quote::{quote, ToTokens};
|
||||
use std::collections::HashSet;
|
||||
use std::sync::atomic::{AtomicUsize, Ordering};
|
||||
use std::sync::Mutex;
|
||||
|
||||
use proc_macro2::{Ident, Literal, Span, TokenStream};
|
||||
use quote::ToTokens;
|
||||
use shared;
|
||||
use syn;
|
||||
|
||||
use ast;
|
||||
use encode;
|
||||
use util::ShortHash;
|
||||
use Diagnostic;
|
||||
use wasm_bindgen_shared as shared;
|
||||
|
||||
pub trait TryToTokens {
|
||||
fn try_to_tokens(&self, tokens: &mut TokenStream) -> Result<(), Diagnostic>;
|
||||
@ -114,12 +112,10 @@ impl TryToTokens for ast::Program {
|
||||
// automatically rerun rustc which will rerun this macro. Other than
|
||||
// this we don't actually need the results of the `include_str!`, so
|
||||
// it's just shoved into an anonymous static.
|
||||
let file_dependencies = encoded.included_files
|
||||
.iter()
|
||||
.map(|file| {
|
||||
let file = file.to_str().unwrap();
|
||||
quote! { include_str!(#file) }
|
||||
});
|
||||
let file_dependencies = encoded.included_files.iter().map(|file| {
|
||||
let file = file.to_str().unwrap();
|
||||
quote! { include_str!(#file) }
|
||||
});
|
||||
|
||||
(quote! {
|
||||
#[allow(non_upper_case_globals)]
|
||||
@ -1180,7 +1176,7 @@ impl ToTokens for ast::ImportStatic {
|
||||
|
||||
impl ToTokens for ast::Const {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
use ast::ConstValue::*;
|
||||
use crate::ast::ConstValue::*;
|
||||
|
||||
let vis = &self.vis;
|
||||
let name = &self.name;
|
||||
@ -1405,7 +1401,7 @@ impl<'a, T: ToTokens> ToTokens for Descriptor<'a, T> {
|
||||
// It's up to the descriptors themselves to ensure they have unique
|
||||
// names for unique items imported, currently done via `ShortHash` and
|
||||
// hashing appropriate data into the symbol name.
|
||||
lazy_static! {
|
||||
lazy_static::lazy_static! {
|
||||
static ref DESCRIPTORS_EMITTED: Mutex<HashSet<String>> = Default::default();
|
||||
}
|
||||
if !DESCRIPTORS_EMITTED
|
||||
|
@ -1,4 +1,4 @@
|
||||
use ast;
|
||||
use crate::ast;
|
||||
use proc_macro2::Ident;
|
||||
use syn;
|
||||
|
||||
@ -355,7 +355,7 @@ impl RemoveUndefinedImports for ast::Program {
|
||||
let before = num_required(dictionary);
|
||||
changed = dictionary.fields.remove_undefined_imports(is_defined) || changed;
|
||||
if before != num_required(dictionary) {
|
||||
warn!(
|
||||
log::warn!(
|
||||
"removing {} due to a required field being removed",
|
||||
dictionary.name
|
||||
);
|
||||
@ -384,7 +384,7 @@ where
|
||||
x.imported_type_references(&mut |id| {
|
||||
if all_defined {
|
||||
if !is_defined(id) {
|
||||
info!("removing due to {} not being defined", id);
|
||||
log::info!("removing due to {} not being defined", id);
|
||||
all_defined = false;
|
||||
}
|
||||
}
|
||||
|
@ -1,13 +1,13 @@
|
||||
use crate::util::ShortHash;
|
||||
use proc_macro2::{Ident, Span};
|
||||
use std::cell::{RefCell, Cell};
|
||||
use std::cell::{Cell, RefCell};
|
||||
use std::collections::HashMap;
|
||||
use std::env;
|
||||
use std::fs;
|
||||
use std::path::PathBuf;
|
||||
use util::ShortHash;
|
||||
|
||||
use ast;
|
||||
use Diagnostic;
|
||||
use crate::ast;
|
||||
use crate::Diagnostic;
|
||||
|
||||
pub struct EncodeResult {
|
||||
pub custom_section: Vec<u8>,
|
||||
@ -19,8 +19,17 @@ pub fn encode(program: &ast::Program) -> Result<EncodeResult, Diagnostic> {
|
||||
let i = Interner::new();
|
||||
shared_program(program, &i)?.encode(&mut e);
|
||||
let custom_section = e.finish();
|
||||
let included_files = i.files.borrow().values().map(|p| &p.path).cloned().collect();
|
||||
Ok(EncodeResult { custom_section, included_files })
|
||||
let included_files = i
|
||||
.files
|
||||
.borrow()
|
||||
.values()
|
||||
.map(|p| &p.path)
|
||||
.cloned()
|
||||
.collect();
|
||||
Ok(EncodeResult {
|
||||
custom_section,
|
||||
included_files,
|
||||
})
|
||||
}
|
||||
|
||||
struct Interner {
|
||||
@ -67,16 +76,16 @@ impl Interner {
|
||||
fn resolve_import_module(&self, id: &str, span: Span) -> Result<&str, Diagnostic> {
|
||||
let mut files = self.files.borrow_mut();
|
||||
if let Some(file) = files.get(id) {
|
||||
return Ok(self.intern_str(&file.new_identifier))
|
||||
return Ok(self.intern_str(&file.new_identifier));
|
||||
}
|
||||
self.check_for_package_json();
|
||||
let path = if id.starts_with("/") {
|
||||
self.root.join(&id[1..])
|
||||
} else if id.starts_with("./") || id.starts_with("../") {
|
||||
let msg = "relative module paths aren't supported yet";
|
||||
return Err(Diagnostic::span_error(span, msg))
|
||||
return Err(Diagnostic::span_error(span, msg));
|
||||
} else {
|
||||
return Ok(self.intern_str(&id))
|
||||
return Ok(self.intern_str(&id));
|
||||
};
|
||||
|
||||
// Generate a unique ID which is somewhat readable as well, so mix in
|
||||
@ -98,7 +107,7 @@ impl Interner {
|
||||
|
||||
fn check_for_package_json(&self) {
|
||||
if self.has_package_json.get() {
|
||||
return
|
||||
return;
|
||||
}
|
||||
let path = self.root.join("package.json");
|
||||
if path.exists() {
|
||||
@ -139,11 +148,9 @@ fn shared_program<'a>(
|
||||
.values()
|
||||
.map(|file| {
|
||||
fs::read_to_string(&file.path)
|
||||
.map(|s| {
|
||||
LocalModule {
|
||||
identifier: intern.intern_str(&file.new_identifier),
|
||||
contents: intern.intern_str(&s),
|
||||
}
|
||||
.map(|s| LocalModule {
|
||||
identifier: intern.intern_str(&file.new_identifier),
|
||||
contents: intern.intern_str(&s),
|
||||
})
|
||||
.map_err(|e| {
|
||||
let msg = format!("failed to read file `{}`: {}", file.path.display(), e);
|
||||
@ -499,4 +506,4 @@ macro_rules! encode_api {
|
||||
encode_api!($($rest)*);
|
||||
);
|
||||
}
|
||||
shared_api!(encode_api);
|
||||
wasm_bindgen_shared::shared_api!(encode_api);
|
||||
|
@ -2,21 +2,8 @@
|
||||
#![cfg_attr(feature = "extra-traits", deny(missing_debug_implementations))]
|
||||
#![doc(html_root_url = "https://docs.rs/wasm-bindgen-backend/0.2")]
|
||||
|
||||
extern crate bumpalo;
|
||||
#[macro_use]
|
||||
extern crate log;
|
||||
extern crate proc_macro2;
|
||||
#[macro_use]
|
||||
extern crate quote;
|
||||
extern crate syn;
|
||||
#[macro_use]
|
||||
extern crate lazy_static;
|
||||
|
||||
#[macro_use]
|
||||
extern crate wasm_bindgen_shared as shared;
|
||||
|
||||
pub use codegen::TryToTokens;
|
||||
pub use error::Diagnostic;
|
||||
pub use crate::codegen::TryToTokens;
|
||||
pub use crate::error::Diagnostic;
|
||||
|
||||
#[macro_use]
|
||||
mod error;
|
||||
|
@ -7,7 +7,7 @@ use std::sync::atomic::AtomicBool;
|
||||
use std::sync::atomic::AtomicUsize;
|
||||
use std::sync::atomic::Ordering::SeqCst;
|
||||
|
||||
use ast;
|
||||
use crate::ast;
|
||||
use proc_macro2::{self, Ident};
|
||||
use syn;
|
||||
|
||||
|
Reference in New Issue
Block a user