diff --git a/AUTHORS.txt b/AUTHORS.txt index 83f9bbff8aa6e..9aa09ac176356 100644 --- a/AUTHORS.txt +++ b/AUTHORS.txt @@ -498,6 +498,7 @@ Robert Irelan Robert Knight Robert Millar Roland Tanglao +Rolf Timmermans Ron Dahlgren Roy Frostig Russell diff --git a/man/rustc.1 b/man/rustc.1 index 6d4f6aa843786..4457ac8cce7e3 100644 --- a/man/rustc.1 +++ b/man/rustc.1 @@ -12,75 +12,54 @@ This program is a compiler for the Rust language, available at .SH OPTIONS .TP -\fB\-\-crate-name NAME\fR -Specify the name of the crate being built -.TP -\fB\-\-crate-type=[bin|lib|dylib|rlib|staticlib]\fR -Configure the flavor of rust crate that is generated (default `bin`) +\fB\-h\fR, \fB\-\-help\fR +Display the help message .TP \fB\-\-cfg\fR SPEC Configure the compilation environment .TP -\fB\-\-emit=[asm,ir,bc,obj,link]\fR -Configure the output that rustc will produce -.TP -\fB\-h\fR, \fB\-\-help\fR -Display this message -.TP \fB\-L\fR PATH Add a directory to the library search path .TP -\fB\-\-no\-trans\fR -Run all passes except translation; no output +\fB\-l\fR NAME[:KIND] +Link the generated crate(s) to the specified native library NAME. The optional +KIND can be one of, static, dylib, or framework. If omitted, dylib is assumed. .TP -\fB\-\-no\-analysis\fR -Parse and expand the source, but run no analysis and produce no output +\fB\-\-crate-type\fR [bin|lib|rlib|dylib|staticlib] +Comma separated list of types of crates for the compiler to emit .TP -\fB\-g\fR -Emit DWARF debug information into object files generated. +\fB\-\-crate-name NAME\fR +Specify the name of the crate being built .TP -\fB\-\-debuginfo\fR LEVEL -Emit DWARF debug info to the objects created: 0 = no debug info, 1 = -line-tables only (for stacktraces and breakpoints), 2 = full debug -info with variable and type information (same as -g). +\fB\-\-emit\fR [asm|llvm-bc|llvm-ir|obj|link|dep-info] +Configure the output that rustc will produce +.TP +\fB\-\-print\fR [crate-name|output-file-names|sysroot] +Comma separated list of compiler information to print on stdout +.TP +\fB\-g\fR +Equivalent to \fI\-C\fR debuginfo=2 .TP \fB\-O\fR -Equivalent to \fI\-\-opt\-level=2\fR +Equivalent to \fI\-C\fR opt-level=2 .TP \fB\-o\fR FILENAME -Write output to . Ignored if more than one --emit is specified. -.TP -\fB\-\-opt\-level\fR LEVEL -Optimize with possible levels 0-3 +Write output to . Ignored if multiple \fI\-\-emit\fR outputs are +specified. .TP \fB\-\-out\-dir\fR DIR -Write output to compiler-chosen filename in . Ignored if -o is specified. -(default the current directory) -.TP -\fB\-\-parse\-only\fR -Parse only; do not compile, assemble, or link +Write output to compiler-chosen filename in . Ignored if \fI\-o\fR is +specified. Defaults to the current directory. .TP -\fB\-\-pretty\fR [TYPE] -Pretty-print the input instead of compiling; valid types are: normal -(un-annotated source), expanded (crates expanded), typed (crates -expanded, with type annotations), identified (fully parenthesized, -AST nodes and blocks with IDs), or flowgraph= (graphviz -formatted flowgraph for node) -.TP -\fB\-\-dep-info\fR [FILENAME] -Output dependency info to after compiling, in a format suitable -for use by Makefiles. -.TP -\fB\-\-sysroot\fR PATH -Override the system root +\fB\-\-explain\fR OPT +Provide a detailed explanation of an error message .TP \fB\-\-test\fR Build a test harness .TP \fB\-\-target\fR TRIPLE -Target triple cpu-manufacturer-kernel[-os] to compile for (see -http://sources.redhat.com/autobook/autobook/autobook_17.html -for details) +Target triple cpu-manufacturer-kernel[-os] to compile for (see chapter 3.4 of +http://www.sourceware.org/autobook/ for details) .TP \fB\-W\fR help Print 'lint' options and default settings @@ -97,15 +76,30 @@ Set lint denied \fB\-F\fR OPT, \fB\-\-forbid\fR OPT Set lint forbidden .TP -\fB\-Z\fR FLAG -Set internal debugging options. Use "-Z help" to print available options. -.TP \fB\-C\fR FLAG[=VAL], \fB\-\-codegen\fR FLAG[=VAL] Set a codegen-related flag to the value specified. Use "-C help" to print available flags. See CODEGEN OPTIONS below .TP -\fB\-v\fR, \fB\-\-version\fR +\fB\-V\fR, \fB\-\-version\fR Print version info and exit +.TP +\fB\-v\fR, \fB\-\-verbose\fR +Use verbose output +.TP +\fB\-\-extern\fR NAME=PATH +Specify where an external rust library is located +.TP +\fB\-\-sysroot\fR PATH +Override the system root +.TP +\fB\-Z\fR FLAG +Set internal debugging options. Use "-Z help" to print available options. +.TP +\fB\-\-color\fR auto|always|never +Configure coloring of output: + auto = colorize, if output goes to a tty (default); + always = always colorize output; + never = never colorize output .SH CODEGEN OPTIONS @@ -121,6 +115,9 @@ objects. A space-separated list of extra arguments to pass to the linker when the linker is invoked. .TP +\fBlto\fR +Perform LLVM link-time optimizations. +.TP \fBtarget-cpu\fR=help Selects a target processor. If the value is 'help', then a list of available CPUs is printed. @@ -167,8 +164,38 @@ Prefers dynamic linking to static linking. \fBno-integrated-as\fR Force usage of an external assembler rather than LLVM's integrated one. .TP +\fBno-redzone\fR +Disable the use of the redzone. +.TP \fBrelocation-model\fR=[pic,static,dynamic-no-pic] -The relocation model to use. (default: pic) +The relocation model to use. (Default: pic) +.TP +\fBcode-model\fR=[small,kernel,medium,large] +Choose the code model to use. +.TP +\fBmetadata\fR=val +Metadata to mangle symbol names with. +.TP +\fBextra-filename\fR=val +Extra data to put in each output filename. +.TP +\fBcodegen-units\fR=val +Divide crate into N units to optimize in parallel. +.TP +\fBremark\fR=val +Print remarks for these optimization passes (space separated, or "all"). +.TP +\fBno-stack-check\fR +Disable checks for stack exhaustion (a memory-safety hazard!). +.TP +\fBdebuginfo\fR=val +Debug info emission level: + 0 = no debug info; + 1 = line-tables only (for stacktraces and breakpoints); + 2 = full debug info with variable and type information. +.TP +\fBopt-level\fR=val +Optimize with possible levels 0-3 .SH "EXAMPLES" To build an executable from a source file with a main function: diff --git a/mk/crates.mk b/mk/crates.mk index fafe77c78da4e..e20cb06e3a8ca 100644 --- a/mk/crates.mk +++ b/mk/crates.mk @@ -53,7 +53,8 @@ TARGET_CRATES := libc std flate arena term \ serialize getopts collections test time rand \ log regex graphviz core rbml alloc \ unicode -RUSTC_CRATES := rustc rustc_typeck rustc_borrowck rustc_driver rustc_trans rustc_back rustc_llvm +RUSTC_CRATES := rustc rustc_typeck rustc_borrowck rustc_resolve rustc_driver \ + rustc_trans rustc_back rustc_llvm HOST_CRATES := syntax $(RUSTC_CRATES) rustdoc regex_macros fmt_macros CRATES := $(TARGET_CRATES) $(HOST_CRATES) TOOLS := compiletest rustdoc rustc @@ -67,11 +68,12 @@ DEPS_std := core libc rand alloc collections unicode \ DEPS_graphviz := std DEPS_syntax := std term serialize log fmt_macros arena libc DEPS_rustc_driver := arena flate getopts graphviz libc rustc rustc_back rustc_borrowck \ - rustc_typeck log syntax serialize rustc_llvm rustc_trans + rustc_typeck rustc_resolve log syntax serialize rustc_llvm rustc_trans DEPS_rustc_trans := arena flate getopts graphviz libc rustc rustc_back \ log syntax serialize rustc_llvm DEPS_rustc_typeck := rustc syntax DEPS_rustc_borrowck := rustc log graphviz syntax +DEPS_rustc_resolve := rustc log syntax DEPS_rustc := syntax flate arena serialize getopts rbml \ time log graphviz rustc_llvm rustc_back DEPS_rustc_llvm := native:rustllvm libc std @@ -118,9 +120,11 @@ DOC_CRATES := $(filter-out rustc, \ $(filter-out rustc_trans, \ $(filter-out rustc_typeck, \ $(filter-out rustc_borrowck, \ + $(filter-out rustc_resolve, \ $(filter-out rustc_driver, \ - $(filter-out syntax, $(CRATES))))))) -COMPILER_DOC_CRATES := rustc rustc_trans rustc_borrowck rustc_typeck rustc_driver syntax + $(filter-out syntax, $(CRATES)))))))) +COMPILER_DOC_CRATES := rustc rustc_trans rustc_borrowck rustc_resolve \ + rustc_typeck rustc_driver syntax # This macro creates some simple definitions for each crate being built, just # some munging of all of the parameters above. diff --git a/mk/docs.mk b/mk/docs.mk index 6d1a3bfa7a326..9a924916ec861 100644 --- a/mk/docs.mk +++ b/mk/docs.mk @@ -216,36 +216,6 @@ endef $(foreach docname,$(DOCS),$(eval $(call DEF_DOC,$(docname)))) -# Localized documentation - -# FIXME: I (huonw) haven't actually been able to test properly, since -# e.g. (by default) I'm doing an out-of-tree build (#12763), but even -# adjusting for that, the files are too old(?) and are rejected by -# po4a. -# -# As such, I've attempted to get it working as much as possible (and -# switching from pandoc to rustdoc), but preserving the old behaviour -# (e.g. only running on the guide) -.PHONY: l10n-mds -l10n-mds: $(D)/po4a.conf \ - $(foreach lang,$(L10N_LANG),$(D)/po/$(lang)/*.md.po) - $(warning WARNING: localized documentation is experimental) - po4a --copyright-holder="The Rust Project Developers" \ - --package-name="Rust" \ - --package-version="$(CFG_RELEASE)" \ - -M UTF-8 -L UTF-8 \ - $(D)/po4a.conf - -define DEF_L10N_DOC -DOC_L10N_TARGETS += doc/l10n/$(1)/$(2).html -doc/l10n/$(1)/$(2).html: l10n-mds $$(HTML_DEPS) $$(RUSTDOC_DEPS_$(2)) - @$$(call E, rustdoc: $$@) - $$(RUSTDOC) $$(RUSTDOC_HTML_OPTS) $$(RUSTDOC_FLAGS_$(1)) doc/l10n/$(1)/$(2).md -endef - -$(foreach lang,$(L10N_LANGS),$(eval $(call DEF_L10N_DOC,$(lang),guide))) - - ###################################################################### # Rustdoc (libstd/extra) ###################################################################### @@ -294,7 +264,3 @@ endif docs: $(DOC_TARGETS) compiler-docs: $(COMPILER_DOC_TARGETS) - -docs-l10n: $(DOC_L10N_TARGETS) - -.PHONY: docs-l10n diff --git a/mk/rt.mk b/mk/rt.mk index 38aec83631605..a1d18aae1b459 100644 --- a/mk/rt.mk +++ b/mk/rt.mk @@ -35,7 +35,7 @@ # that's per-target so you're allowed to conditionally add files based on the # target. ################################################################################ -NATIVE_LIBS := rust_builtin hoedown morestack miniz context_switch \ +NATIVE_LIBS := rust_builtin hoedown morestack miniz \ rustrt_native rust_test_helpers # $(1) is the target triple @@ -58,8 +58,7 @@ NATIVE_DEPS_rustrt_native_$(1) := \ arch/$$(HOST_$(1))/record_sp.S NATIVE_DEPS_rust_test_helpers_$(1) := rust_test_helpers.c NATIVE_DEPS_morestack_$(1) := arch/$$(HOST_$(1))/morestack.S -NATIVE_DEPS_context_switch_$(1) := \ - arch/$$(HOST_$(1))/_context.S + ################################################################################ # You shouldn't find it that necessary to edit anything below this line. diff --git a/mk/tests.mk b/mk/tests.mk index 3340f9b4969ea..1a122572e434c 100644 --- a/mk/tests.mk +++ b/mk/tests.mk @@ -21,7 +21,8 @@ $(eval $(call RUST_CRATE,coretest)) TEST_TARGET_CRATES = $(filter-out core unicode,$(TARGET_CRATES)) coretest TEST_DOC_CRATES = $(DOC_CRATES) -TEST_HOST_CRATES = $(filter-out rustc_typeck rustc_borrowck rustc_trans,$(HOST_CRATES)) +TEST_HOST_CRATES = $(filter-out rustc_typeck rustc_borrowck rustc_resolve rustc_trans,\ + $(HOST_CRATES)) TEST_CRATES = $(TEST_TARGET_CRATES) $(TEST_HOST_CRATES) ###################################################################### @@ -73,21 +74,6 @@ endif TEST_LOG_FILE=tmp/check-stage$(1)-T-$(2)-H-$(3)-$(4).log TEST_OK_FILE=tmp/check-stage$(1)-T-$(2)-H-$(3)-$(4).ok -TEST_RATCHET_FILE=tmp/check-stage$(1)-T-$(2)-H-$(3)-$(4)-metrics.json -TEST_RATCHET_NOISE_PERCENT=10.0 - -# Whether to ratchet or merely save benchmarks -ifdef CFG_RATCHET_BENCH -CRATE_TEST_EXTRA_ARGS= \ - --test $(TEST_BENCH) \ - --ratchet-metrics $(call TEST_RATCHET_FILE,$(1),$(2),$(3),$(4)) \ - --ratchet-noise-percent $(TEST_RATCHET_NOISE_PERCENT) -else -CRATE_TEST_EXTRA_ARGS= \ - --test $(TEST_BENCH) \ - --save-metrics $(call TEST_RATCHET_FILE,$(1),$(2),$(3),$(4)) -endif - # If we're sharding the testsuite between parallel testers, # pass this argument along to the compiletest and crate test # invocations. @@ -454,7 +440,6 @@ $$(call TEST_OK_FILE,$(1),$(2),$(3),$(4)): \ $$(Q)touch tmp/check-stage$(1)-T-$(2)-H-$(3)-$(4).log $$(Q)$(CFG_ADB) pull $(CFG_ADB_TEST_DIR)/check-stage$(1)-T-$(2)-H-$(3)-$(4).log tmp/ $$(Q)$(CFG_ADB) shell rm $(CFG_ADB_TEST_DIR)/check-stage$(1)-T-$(2)-H-$(3)-$(4).log - $$(Q)$(CFG_ADB) pull $(CFG_ADB_TEST_DIR)/$$(call TEST_RATCHET_FILE,$(1),$(2),$(3),$(4)) tmp/ @if grep -q "result: ok" tmp/check-stage$(1)-T-$(2)-H-$(3)-$(4).tmp; \ then \ rm tmp/check-stage$(1)-T-$(2)-H-$(3)-$(4).tmp; \ @@ -696,7 +681,6 @@ CTEST_ARGS$(1)-T-$(2)-H-$(3)-$(4) := \ $$(CTEST_COMMON_ARGS$(1)-T-$(2)-H-$(3)) \ --src-base $$(S)src/test/$$(CTEST_SRC_BASE_$(4))/ \ --build-base $(3)/test/$$(CTEST_BUILD_BASE_$(4))/ \ - --ratchet-metrics $(call TEST_RATCHET_FILE,$(1),$(2),$(3),$(4)) \ --mode $$(CTEST_MODE_$(4)) \ $$(CTEST_RUNTOOL_$(4)) diff --git a/src/compiletest/compiletest.rs b/src/compiletest/compiletest.rs index 59be0152d5876..bdbfbfd7c89d6 100644 --- a/src/compiletest/compiletest.rs +++ b/src/compiletest/compiletest.rs @@ -152,7 +152,7 @@ pub fn parse_config(args: Vec ) -> Config { matches.opt_str("ratchet-metrics").map(|s| Path::new(s)), ratchet_noise_percent: matches.opt_str("ratchet-noise-percent") - .and_then(|s| from_str::(s.as_slice())), + .and_then(|s| s.as_slice().parse::()), runtool: matches.opt_str("runtool"), host_rustcflags: matches.opt_str("host-rustcflags"), target_rustcflags: matches.opt_str("target-rustcflags"), @@ -190,9 +190,7 @@ pub fn log_config(config: &Config) { logv(c, format!("filter: {}", opt_str(&config.filter .as_ref() - .map(|re| { - re.to_string().into_string() - })))); + .map(|re| re.to_string())))); logv(c, format!("runtool: {}", opt_str(&config.runtool))); logv(c, format!("host-rustcflags: {}", opt_str(&config.host_rustcflags))); diff --git a/src/compiletest/header.rs b/src/compiletest/header.rs index 60ef76528e849..27be6c6d83568 100644 --- a/src/compiletest/header.rs +++ b/src/compiletest/header.rs @@ -351,8 +351,8 @@ pub fn gdb_version_to_int(version_string: &str) -> int { panic!("{}", error_string); } - let major: int = from_str(components[0]).expect(error_string); - let minor: int = from_str(components[1]).expect(error_string); + let major: int = components[0].parse().expect(error_string); + let minor: int = components[1].parse().expect(error_string); return major * 1000 + minor; } @@ -362,6 +362,6 @@ pub fn lldb_version_to_int(version_string: &str) -> int { "Encountered LLDB version string with unexpected format: {}", version_string); let error_string = error_string.as_slice(); - let major: int = from_str(version_string).expect(error_string); + let major: int = version_string.parse().expect(error_string); return major; } diff --git a/src/compiletest/runtest.rs b/src/compiletest/runtest.rs index 1b445a6e7367f..bf72250c4705a 100644 --- a/src/compiletest/runtest.rs +++ b/src/compiletest/runtest.rs @@ -1361,7 +1361,7 @@ fn split_maybe_args(argstr: &Option) -> Vec { s.as_slice() .split(' ') .filter_map(|s| { - if s.is_whitespace() { + if s.chars().all(|c| c.is_whitespace()) { None } else { Some(s.to_string()) @@ -1609,7 +1609,7 @@ fn _arm_exec_compiled_test(config: &Config, stderr_out.as_slice()); ProcRes { - status: process::ExitStatus(exitcode), + status: process::ProcessExit::ExitStatus(exitcode), stdout: stdout_out, stderr: stderr_out, cmdline: cmdline @@ -1666,7 +1666,7 @@ fn compile_test_and_save_bitcode(config: &Config, props: &TestProps, // FIXME (#9639): This needs to handle non-utf8 paths let mut link_args = vec!("-L".to_string(), aux_dir.as_str().unwrap().to_string()); - let llvm_args = vec!("--emit=bc,obj".to_string(), + let llvm_args = vec!("--emit=llvm-bc,obj".to_string(), "--crate-type=lib".to_string()); link_args.extend(llvm_args.into_iter()); let args = make_compile_args(config, diff --git a/src/doc/README.md b/src/doc/README.md index 50222973509ad..3b12ffe8f11e1 100644 --- a/src/doc/README.md +++ b/src/doc/README.md @@ -6,12 +6,6 @@ document converter, is required to generate docs as HTML from Rust's source code. -[po4a](http://po4a.alioth.debian.org/) is required for generating translated -docs from the master (English) docs. - -[GNU gettext](http://www.gnu.org/software/gettext/) is required for managing -the translation data. - ## Building To generate all the docs, just run `make docs` from the root of the repository. @@ -44,43 +38,3 @@ The syntax for pandoc flavored markdown can be found at: A nice quick reference (for non-pandoc markdown) is at: - http://kramdown.gettalong.org/quickref.html - -## Notes for translators - -Notice: The procedure described below is a work in progress. We are working on -translation system but the procedure contains some manual operations for now. - -To start the translation for a new language, see `po4a.conf` at first. - -To generate `.pot` and `.po` files, do something like: - -~~~~ -po4a --copyright-holder="The Rust Project Developers" \ - --package-name="Rust" \ - --package-version="0.13.0" \ - -M UTF-8 -L UTF-8 \ - src/doc/po4a.conf -~~~~ - -(the version number must be changed if it is not `0.13.0` now.) - -Now you can translate documents with `.po` files, commonly used with gettext. If -you are not familiar with gettext-based translation, please read the online -manual linked from http://www.gnu.org/software/gettext/ . We use UTF-8 as the -file encoding of `.po` files. - -When you want to make a commit, do the command below before staging your -change: - -~~~~ -for f in src/doc/po/**/*.po; do - msgattrib --translated $f -o $f.strip - if [ -e $f.strip ]; then - mv $f.strip $f - else - rm $f - fi -done -~~~~ - -This removes untranslated entries from `.po` files to save disk space. diff --git a/src/doc/guide-ownership.md b/src/doc/guide-ownership.md index 1a46970414371..bf750ecaa8f67 100644 --- a/src/doc/guide-ownership.md +++ b/src/doc/guide-ownership.md @@ -324,7 +324,7 @@ fn main() { let f = Foo { x: y }; // -+ f goes into scope // stuff // | // | -} // -+ f & y go out of scope +} // -+ f and y go out of scope ``` Our `f` lives within the scope of `y`, so everything works. What if it didn't? @@ -342,7 +342,7 @@ fn main() { let y = &5i; // ---+ y goes into scope let f = Foo { x: y }; // ---+ f goes into scope x = &f.x; // | | error here - } // ---+ f & y go out of scope + } // ---+ f and y go out of scope // | println!("{}", x); // | } // -+ x goes out of scope @@ -395,7 +395,7 @@ struct Wheel { } fn main() { - let car = Car { name: "DeLorian".to_string() }; + let car = Car { name: "DeLorean".to_string() }; for _ in range(0u, 4) { Wheel { size: 360, owner: car }; @@ -431,7 +431,7 @@ struct Wheel { } fn main() { - let car = Car { name: "DeLorian".to_string() }; + let car = Car { name: "DeLorean".to_string() }; let car_owner = Rc::new(car); diff --git a/src/doc/guide.md b/src/doc/guide.md index d739ad105fc87..22cbd18a86520 100644 --- a/src/doc/guide.md +++ b/src/doc/guide.md @@ -31,7 +31,6 @@ below.) If you're on Windows, please download either the [32-bit installer](https://static.rust-lang.org/dist/rust-nightly-i686-pc-windows-gnu.exe) - or the [64-bit installer](https://static.rust-lang.org/dist/rust-nightly-x86_64-pc-windows-gnu.exe) and run it. @@ -1011,8 +1010,8 @@ in the original declaration. Finally, because fields have names, we can access the field through dot notation: `origin.x`. -The values in structs are immutable, like other bindings in Rust. However, you -can use `mut` to make them mutable: +The values in structs are immutable by default, like other bindings in Rust. +Use `mut` to make them mutable: ```{rust} struct Point { @@ -1170,7 +1169,7 @@ enum StringResult { ErrorReason(String), } ``` -Where a `StringResult` is either an `StringOK`, with the result of a computation, or an +Where a `StringResult` is either a `StringOK`, with the result of a computation, or an `ErrorReason` with a `String` explaining what caused the computation to fail. These kinds of `enum`s are actually very useful and are even part of the standard library. @@ -1192,7 +1191,7 @@ fn respond(greeting: &str) -> StringResult { ``` Notice that we need both the enum name and the variant name: `StringResult::StringOK`, but -we didn't need to with `Ordering`, we just said `Greater` rather than `Ordering::Greater`. +we didn't need to with `Ordering` – we just said `Greater` rather than `Ordering::Greater`. There's a reason: the Rust prelude imports the variants of `Ordering` as well as the enum itself. We can use the `use` keyword to do something similar with `StringResult`: @@ -1223,16 +1222,16 @@ now, rather than the full `StringResult::StringOK`. Importing variants can be co also cause name conflicts, so do this with caution. It's considered good style to rarely import variants for this reason. -As you can see `enum`s with values are quite a powerful tool for data representation, -and can be even more useful when they're generic across types. But before we get to -generics, let's talk about how to use them with pattern matching, a tool that will +As you can see, `enum`s with values are quite a powerful tool for data representation, +and can be even more useful when they're generic across types. Before we get to generics, +though, let's talk about how to use them with pattern matching, a tool that will let us deconstruct this sum type (the type theory term for enums) in a very elegant way and avoid all these messy `if`/`else`s. # Match Often, a simple `if`/`else` isn't enough, because you have more than two -possible options. And `else` conditions can get incredibly complicated. So +possible options. Also, `else` conditions can get incredibly complicated, so what's the solution? Rust has a keyword, `match`, that allows you to replace complicated `if`/`else` @@ -1251,13 +1250,13 @@ match x { } ``` -`match` takes an expression, and then branches based on its value. Each 'arm' of +`match` takes an expression and then branches based on its value. Each 'arm' of the branch is of the form `val => expression`. When the value matches, that arm's expression will be evaluated. It's called `match` because of the term 'pattern -matching,' which `match` is an implementation of. +matching', which `match` is an implementation of. So what's the big advantage here? Well, there are a few. First of all, `match` -enforces 'exhaustiveness checking.' Do you see that last arm, the one with the +enforces 'exhaustiveness checking'. Do you see that last arm, the one with the underscore (`_`)? If we remove that arm, Rust will give us an error: ```text @@ -1265,11 +1264,11 @@ error: non-exhaustive patterns: `_` not covered ``` In other words, Rust is trying to tell us we forgot a value. Because `x` is an -integer, Rust knows that it can have a number of different values. For example, -`6i`. But without the `_`, there is no arm that could match, and so Rust refuses -to compile. `_` is sort of like a catch-all arm. If none of the other arms match, -the arm with `_` will. And since we have this catch-all arm, we now have an arm -for every possible value of `x`, and so our program will now compile. +integer, Rust knows that it can have a number of different values – for example, +`6i`. Without the `_`, however, there is no arm that could match, and so Rust refuses +to compile. `_` acts like a 'catch-all arm'. If none of the other arms match, +the arm with `_` will, and since we have this catch-all arm, we now have an arm +for every possible value of `x`, and so our program will compile successfully. `match` statements also destructure enums, as well. Remember this code from the section on enums? @@ -1350,14 +1349,14 @@ fn main() { ``` That is how you can get and use the values contained in `enum`s. -It can also allow us to treat errors or unexpected computations, for example, a -function that is not guaranteed to be able to compute a result (an `int` here), +It can also allow us to handle errors or unexpected computations; for example, a +function that is not guaranteed to be able to compute a result (an `int` here) could return an `OptionalInt`, and we would handle that value with a `match`. As you can see, `enum` and `match` used together are quite useful! -`match` is also an expression, which means we can use it on the right -hand side of a `let` binding or directly where an expression is -used. We could also implement the previous line like this: +`match` is also an expression, which means we can use it on the right-hand +side of a `let` binding or directly where an expression is used. We could +also implement the previous line like this: ```{rust} fn cmp(a: int, b: int) -> Ordering { @@ -1389,7 +1388,7 @@ two main looping constructs: `for` and `while`. The `for` loop is used to loop a particular number of times. Rust's `for` loops work a bit differently than in other systems languages, however. Rust's `for` -loop doesn't look like this "C style" `for` loop: +loop doesn't look like this "C-style" `for` loop: ```{c} for (x = 0; x < 10; x++) { @@ -1424,7 +1423,7 @@ In our example, `range` is a function that takes a start and an end position, and gives an iterator over those values. The upper bound is exclusive, though, so our loop will print `0` through `9`, not `10`. -Rust does not have the "C style" `for` loop on purpose. Manually controlling +Rust does not have the "C-style" `for` loop on purpose. Manually controlling each element of the loop is complicated and error prone, even for experienced C developers. @@ -1455,7 +1454,7 @@ If you need an infinite loop, you may be tempted to write this: while true { ``` -Rust has a dedicated keyword, `loop`, to handle this case: +However, Rust has a dedicated keyword, `loop`, to handle this case: ```{rust,ignore} loop { @@ -1465,7 +1464,7 @@ Rust's control-flow analysis treats this construct differently than a `while true`, since we know that it will always loop. The details of what that _means_ aren't super important to understand at this stage, but in general, the more information we can give to the compiler, the better it -can do with safety and code generation. So you should always prefer +can do with safety and code generation, so you should always prefer `loop` when you plan to loop infinitely. ## Ending iteration early @@ -1484,7 +1483,7 @@ while !done { ``` We had to keep a dedicated `mut` boolean variable binding, `done`, to know -when we should skip out of the loop. Rust has two keywords to help us with +when we should exit out of the loop. Rust has two keywords to help us with modifying iteration: `break` and `continue`. In this case, we can write the loop in a better way with `break`: @@ -1499,10 +1498,10 @@ loop { } ``` -We now loop forever with `loop`, and use `break` to break out early. +We now loop forever with `loop` and use `break` to break out early. `continue` is similar, but instead of ending the loop, goes to the next -iteration: This will only print the odd numbers: +iteration. This will only print the odd numbers: ```{rust} for x in range(0i, 10i) { @@ -1519,8 +1518,8 @@ Both `continue` and `break` are valid in both kinds of loops. Strings are an important concept for any programmer to master. Rust's string handling system is a bit different from other languages, due to its systems focus. Any time you have a data structure of variable size, things can get -tricky, and strings are a re-sizable data structure. That said, Rust's strings -also work differently than in some other systems languages, such as C. +tricky, and strings are a re-sizable data structure. That being said, Rust's +strings also work differently than in some other systems languages, such as C. Let's dig into the details. A **string** is a sequence of Unicode scalar values encoded as a stream of UTF-8 bytes. All strings are guaranteed to be @@ -1793,22 +1792,22 @@ fn main() { } ``` -We had to match each time, to see if we had a value or not. In this case, -though, we _know_ that `x` has a `Value`. But `match` forces us to handle +We had to match each time to see if we had a value or not. In this case, +though, we _know_ that `x` has a `Value`, but `match` forces us to handle the `missing` case. This is what we want 99% of the time, but sometimes, we know better than the compiler. Likewise, `read_line()` does not return a line of input. It _might_ return a -line of input. It might also fail to do so. This could happen if our program +line of input, though it might also fail to do so. This could happen if our program isn't running in a terminal, but as part of a cron job, or some other context where there's no standard input. Because of this, `read_line` returns a type very similar to our `OptionalInt`: an `IoResult`. We haven't talked about `IoResult` yet because it is the **generic** form of our `OptionalInt`. -Until then, you can think of it as being the same thing, just for any type, not -just `int`s. +Until then, you can think of it as being the same thing, just for any type – +not just `int`s. Rust provides a method on these `IoResult`s called `ok()`, which does the -same thing as our `match` statement, but assuming that we have a valid value. +same thing as our `match` statement but assumes that we have a valid value. We then call `expect()` on the result, which will terminate our program if we don't have a valid value. In this case, if we can't get input, our program doesn't work, so we're okay with that. In most cases, we would want to handle @@ -1852,7 +1851,7 @@ fn main() { } ``` -Sometimes, this makes things more readable. Sometimes, less. Use your judgment +Sometimes, this makes things more readable – sometimes, less. Use your judgement here. That's all you need to get basic input from the standard input! It's not too @@ -1972,10 +1971,8 @@ You can find that page [here](std/index.html). There's a lot of information on that page, but the best part is the search bar. Right up at the top, there's a box that you can enter in a search term. The search is pretty primitive right now, but is getting better all the time. If you type 'random' in that -box, the page will update to [this -one](std/index.html?search=random). The very first -result is a link to -[std::rand::random](std/rand/fn.random.html). If we +box, the page will update to [this one](std/index.html?search=random). The very +first result is a link to [`std::rand::random`](std/rand/fn.random.html). If we click on that result, we'll be taken to its documentation page. This page shows us a few things: the type signature of the function, some @@ -2039,7 +2036,7 @@ rand::random::(); ``` This says "please give me a random `int` value." We can change our code to use -this hint... +this hint: ```{rust,no_run} use std::io; @@ -2260,10 +2257,10 @@ a function for that: let input = io::stdin().read_line() .ok() .expect("Failed to read line"); -let input_num: Option = from_str(input.as_slice()); +let input_num: Option = input.parse(); ``` -The `from_str` function takes in a `&str` value and converts it into something. +The `parse` function takes in a `&str` value and converts it into something. We tell it what kind of something with a type hint. Remember our type hint with `random()`? It looked like this: @@ -2282,8 +2279,8 @@ In this case, we say `x` is a `uint` explicitly, so Rust is able to properly tell `random()` what to generate. In a similar fashion, both of these work: ```{rust,ignore} -let input_num = from_str::("5"); // input_num: Option -let input_num: Option = from_str("5"); // input_num: Option +let input_num = "5".parse::(); // input_num: Option +let input_num: Option = "5".parse(); // input_num: Option ``` Anyway, with us now converting our input to a number, our code looks like this: @@ -2304,7 +2301,7 @@ fn main() { let input = io::stdin().read_line() .ok() .expect("Failed to read line"); - let input_num: Option = from_str(input.as_slice()); + let input_num: Option = input.parse(); println!("You guessed: {}", input_num); @@ -2353,7 +2350,7 @@ fn main() { let input = io::stdin().read_line() .ok() .expect("Failed to read line"); - let input_num: Option = from_str(input.as_slice()); + let input_num: Option = input.parse(); let num = match input_num { Some(num) => num, @@ -2380,7 +2377,7 @@ fn cmp(a: uint, b: uint) -> Ordering { } ``` -We use a `match` to either give us the `uint` inside of the `Option`, or we +We use a `match` to either give us the `uint` inside of the `Option`, or else print an error message and return. Let's give this a shot: ```bash @@ -2398,8 +2395,8 @@ Uh, what? But we did! ... actually, we didn't. See, when you get a line of input from `stdin()`, you get all the input. Including the `\n` character from you pressing Enter. -So, `from_str()` sees the string `"5\n"` and says "nope, that's not a number, -there's non-number stuff in there!" Luckily for us, `&str`s have an easy +Therefore, `parse()` sees the string `"5\n"` and says "nope, that's not a +number; there's non-number stuff in there!" Luckily for us, `&str`s have an easy method we can use defined on them: `trim()`. One small modification, and our code looks like this: @@ -2419,7 +2416,7 @@ fn main() { let input = io::stdin().read_line() .ok() .expect("Failed to read line"); - let input_num: Option = from_str(input.as_slice().trim()); + let input_num: Option = input.trim().parse(); let num = match input_num { Some(num) => num, @@ -2465,7 +2462,7 @@ out that I guessed 76. Run the program a few times, and verify that guessing the number works, as well as guessing a number too small. The Rust compiler helped us out quite a bit there! This technique is called -"lean on the compiler," and it's often useful when working on some code. Let +"lean on the compiler", and it's often useful when working on some code. Let the error messages help guide you towards the correct types. Now we've got most of the game working, but we can only make one guess. Let's @@ -2473,8 +2470,8 @@ change that by adding loops! ## Looping -As we already discussed, the `loop` keyword gives us an infinite loop. So -let's add that in: +As we already discussed, the `loop` keyword gives us an infinite loop. +Let's add that in: ```{rust,no_run} use std::io; @@ -2494,7 +2491,7 @@ fn main() { let input = io::stdin().read_line() .ok() .expect("Failed to read line"); - let input_num: Option = from_str(input.as_slice().trim()); + let input_num: Option = input.trim().parse(); let num = match input_num { Some(num) => num, @@ -2569,7 +2566,7 @@ fn main() { let input = io::stdin().read_line() .ok() .expect("Failed to read line"); - let input_num: Option = from_str(input.as_slice().trim()); + let input_num: Option = input.trim().parse(); let num = match input_num { Some(num) => num, @@ -2624,7 +2621,7 @@ fn main() { let input = io::stdin().read_line() .ok() .expect("Failed to read line"); - let input_num: Option = from_str(input.as_slice().trim()); + let input_num: Option = input.trim().parse(); let num = match input_num { Some(num) => num, @@ -2700,7 +2697,7 @@ fn main() { let input = io::stdin().read_line() .ok() .expect("Failed to read line"); - let input_num: Option = from_str(input.as_slice().trim()); + let input_num: Option = input.trim().parse(); let num = match input_num { Some(num) => num, @@ -2780,12 +2777,11 @@ $ cargo run Hello, world! ``` -Excellent! So, we already have a single crate here: our `src/main.rs` is a crate. +Excellent! We already have a single crate here: our `src/main.rs` is a crate. Everything in that file is in the crate root. A crate that generates an executable defines a `main` function inside its root, as we've done here. -Let's define a new module inside our crate. Edit `src/main.rs` to look -like this: +Let's define a new module inside our crate. Edit `src/main.rs` to look like this: ``` fn main() { @@ -2803,7 +2799,7 @@ We now have a module named `hello` inside of our crate root. Modules use `snake_case` naming, like functions and variable bindings. Inside the `hello` module, we've defined a `print_hello` function. This will -also print out our hello world message. Modules allow you to split up your +also print out our "hello world" message. Modules allow you to split up your program into nice neat boxes of functionality, grouping common things together, and keeping different things apart. It's kinda like having a set of shelves: a place for everything and everything in its place. @@ -2963,7 +2959,7 @@ You'll get a warning if you use something marked unstable. You may have noticed an exclamation point in the `warn` attribute declaration. The `!` in this attribute means that this attribute applies to the enclosing -item, rather than to the item that follows the attribute. So this `warn` +item, rather than to the item that follows the attribute. This `warn` attribute declaration applies to the enclosing crate itself, rather than to whatever item statement follows it: @@ -3003,9 +2999,9 @@ Hello, world! Great. Rust's infrastructure supports tests in two sorts of places, and they're for two kinds of tests: you include **unit test**s inside of the crate itself, and you place **integration test**s inside a `tests` directory. "Unit tests" -are small tests that test one focused unit, "integration tests" tests multiple -units in integration. That said, this is a social convention, they're no different -in syntax. Let's make a `tests` directory: +are small tests that test one focused unit; "integration tests" test multiple +units in integration. That being said, this is a social convention – they're no +different in syntax. Let's make a `tests` directory: ```{bash,ignore} $ mkdir tests @@ -3085,7 +3081,7 @@ test foo ... FAILED Now we're getting somewhere. Remember when we talked about naming our tests with good names? This is why. Here, it says 'test foo' because we called our -test 'foo.' If we had given it a good name, it'd be more clear which test +test 'foo'. If we had given it a good name, it'd be more clear which test failed, especially as we accumulate more tests. ```text @@ -3156,7 +3152,7 @@ our tests, it sets things up so that `cfg(test)` is true. But we want to only include `main` when it's _not_ true. So we use `not` to negate things: `cfg(not(test))` will only compile our code when the `cfg(test)` is false. -With this attribute we won't get the warning (even +With this attribute, we won't get the warning (even though `src/main.rs` gets recompiled this time): ```bash @@ -3200,7 +3196,7 @@ error: aborting due to previous error Build failed, waiting for other jobs to finish... Could not compile `testing`. -To learn more, run the command again with --verbose. +To learn more, run the command again with `--verbose`. ``` Rust can't find this function. That makes sense, as we didn't write it yet! @@ -3208,7 +3204,7 @@ Rust can't find this function. That makes sense, as we didn't write it yet! In order to share this code with our tests, we'll need to make a library crate. This is also just good software design: as we mentioned before, it's a good idea to put most of your functionality into a library crate, and have your executable -crate use that library. This allows for code re-use. +crate use that library. This allows for code reuse. To do that, we'll need to make a new module. Make a new file, `src/lib.rs`, and put this in it: @@ -3282,8 +3278,8 @@ test result: ok. 0 passed; 0 failed; 0 ignored; 0 measured Great! One test passed. We've got an integration test showing that our public method works, but maybe we want to test some of the internal logic as well. While this function is simple, if it were more complicated, you can imagine -we'd need more tests. So let's break it up into two helper functions, and -write some unit tests to test those. +we'd need more tests. Let's break it up into two helper functions and write +some unit tests to test those. Change your `src/lib.rs` to look like this: @@ -3532,7 +3528,7 @@ error: cannot borrow immutable local variable `x` as mutable ``` We don't want a mutable reference to immutable data! This error message uses a -term we haven't talked about yet, 'borrow.' We'll get to that in just a moment. +term we haven't talked about yet, 'borrow'. We'll get to that in just a moment. This simple example actually illustrates a lot of Rust's power: Rust has prevented us, at compile time, from breaking our own rules. Because Rust's @@ -3653,9 +3649,10 @@ all of Rust. Let's see this syntax in action: fn foo(x: &int) -> &int { x } { - let x = 5i; // x is the owner of this integer, which is memory on the stack. + // x is the owner of the integer, which is memory on the stack. + let x = 5i; - // privilege 2: you may lend that resource, to as many borrowers as you'd like + // privilege 2: you may lend that resource to as many borrowers as you like let y = &x; let z = &x; @@ -3665,10 +3662,11 @@ fn foo(x: &int) -> &int { x } } { - let mut x = 5i; // x is the owner of this integer, which is memory on the stack. + // x is the owner of this integer, which is memory on the stack. + let mut x = 5i; - let y = &mut x; // privilege 3: you may lend that resource to a single borrower, - // mutably + // privilege 3: you may lend that resource to a single borrower, mutably + let y = &mut x; } ``` @@ -3684,7 +3682,7 @@ This last requirement can seem odd, but it also makes sense. If you have to return something, and you've lent it to someone, they need to give it back to you for you to give it back! If we didn't, then the owner could deallocate the memory, and the person we've loaned it out to would have a pointer to -invalid memory. This is called a 'dangling pointer.' +invalid memory. This is called a 'dangling pointer'. Let's re-examine the error that led us to talk about all of this, which was a violation of the restrictions placed on owners who lend something out mutably. @@ -3807,8 +3805,8 @@ an integer `5` and makes `x` a pointer to it: ``` The great thing about boxes is that we don't have to manually free this -allocation! Instead, when `x` reaches the end of its lifetime -- in this case, -when it goes out of scope at the end of the block -- Rust `free`s `x`. This +allocation! Instead, when `x` reaches the end of its lifetime – in this case, +when it goes out of scope at the end of the block – Rust `free`s `x`. This isn't because Rust has a garbage collector (it doesn't). Instead, by tracking the ownership and lifetime of a variable (with a little help from you, the programmer), the compiler knows precisely when it is no longer used. @@ -3873,12 +3871,12 @@ Sometimes you need a variable that is referenced from multiple places (immutably!), lasting as long as any of those places, and disappearing when it is no longer referenced. For instance, in a graph-like data structure, a node might be referenced from all of its neighbors. In this case, it is not possible -for the compiler to determine ahead of time when the value can be freed -- it +for the compiler to determine ahead of time when the value can be freed – it needs a little run-time support. Rust's **Rc** type provides shared ownership of a dynamically allocated value that is automatically freed at the end of its last owner's lifetime. (`Rc` -stands for 'reference counted,' referring to the way these library types are +stands for 'reference counted', referring to the way these library types are implemented.) This provides more flexibility than single-owner boxes, but has some runtime overhead. @@ -4300,7 +4298,7 @@ This line is more interesting. Here, we call our function, `twice`, and we pass it two arguments: an integer, `5`, and our closure, `square`. This is just like passing any other two variable bindings to a function, but if you've never worked with closures before, it can seem a little complex. Just think: "I'm -passing two variables, one is an int, and one is a function." +passing two variables: one is an int, and one is a function." Next, let's look at how `twice` is defined: @@ -4336,7 +4334,7 @@ fn twice(x: int, f: |int| -> int) -> int { ``` Since our closure is named `f`, we can call it just like we called our closures -before. And we pass in our `x` argument to each one. Hence 'twice.' +before, and we pass in our `x` argument to each one, hence the name `twice`. If you do the math, `(5 * 5) + (5 * 5) == 50`, so that's the output we get. @@ -4807,7 +4805,7 @@ enum Result { ``` if we wanted to. Convention says that the first generic parameter should be -`T`, for 'type,' and that we use `E` for 'error.' Rust doesn't care, however. +`T`, for 'type,' and that we use `E` for 'error'. Rust doesn't care, however. The `Result` type is intended to be used to return the result of a computation, and to have the ability to @@ -5212,7 +5210,7 @@ fn main() { The names don't actually change to this, it's just for illustration. But as you can see, there's no overhead of deciding which version to call here, -hence 'statically dispatched.' The downside is that we have two copies of +hence 'statically dispatched'. The downside is that we have two copies of the same function, so our binary is a little bit larger. # Tasks @@ -5412,7 +5410,7 @@ fn main() { } ``` -You can have the macros expanded like this: `rustc print.rs --pretty=expanded` – which will +You can have the macros expanded like this: `rustc print.rs --pretty=expanded`, which will give us this huge result: ```{rust,ignore} @@ -5498,7 +5496,6 @@ We covered a lot of ground here. When you've mastered everything in this Guide, you will have a firm grasp of basic Rust development. There's a whole lot more out there, we've just covered the surface. There's tons of topics that you can dig deeper into, and we've built specialized guides for many of them. To learn -more, dig into the [full documentation -index](index.html). +more, dig into the [full documentation index](index.html). Happy hacking! diff --git a/src/doc/po4a.conf b/src/doc/po4a.conf deleted file mode 100644 index 80f8b748814cb..0000000000000 --- a/src/doc/po4a.conf +++ /dev/null @@ -1,28 +0,0 @@ -# Add here a list of target languages; po4a will automatically -# generates .po for them and build .md when translated, eg: -# [po4a_langs] es fr it pt_BR -[po4a_langs] ja -[po4a_paths] doc/po/$master.pot $lang:src/doc/po/$lang/$master.po - -# Add here below all source documents to be translated -[type: text] src/doc/complement-bugreport.md $lang:doc/l10n/$lang/complement-bugreport.md -[type: text] src/doc/complement-design-faq.md $lang:doc/l10n/$lang/complement-design-faq.md -[type: text] src/doc/complement-lang-faq.md $lang:doc/l10n/$lang/complement-lang-faq.md -[type: text] src/doc/complement-project-faq.md $lang:doc/l10n/$lang/complement-project-faq.md -[type: text] src/doc/guide-container.md $lang:doc/l10n/$lang/guide-container.md -[type: text] src/doc/guide-ffi.md $lang:doc/l10n/$lang/guide-ffi.md -[type: text] src/doc/guide-ownership.md $lang:doc/l10n/$lang/guide-ownership.md -[type: text] src/doc/guide-macros.md $lang:doc/l10n/$lang/guide-macros.md -[type: text] src/doc/guide-plugin.md $lang:doc/l10n/$lang/guide-plugin.md -[type: text] src/doc/guide-pointers.md $lang:doc/l10n/$lang/guide-pointers.md -[type: text] src/doc/guide-strings.md $lang:doc/l10n/$lang/guide-strings.md -[type: text] src/doc/guide-tasks.md $lang:doc/l10n/$lang/guide-tasks.md -[type: text] src/doc/guide-testing.md $lang:doc/l10n/$lang/guide-testing.md -[type: text] src/doc/guide-unsafe.md $lang:doc/l10n/$lang/guide-unsafe.md -[type: text] src/doc/guide-crates.md $lang:doc/l10n/$lang/guide-crates.md -[type: text] src/doc/guide-error-handling.md $lang:doc/l10n/$lang/guide-error-handling.md -[type: text] src/doc/guide.md $lang:doc/l10n/$lang/guide.md -[type: text] src/doc/index.md $lang:doc/l10n/$lang/index.md -[type: text] src/doc/intro.md $lang:doc/l10n/$lang/intro.md -[type: text] src/doc/rust.md $lang:doc/l10n/$lang/rust.md -[type: text] src/doc/rustdoc.md $lang:doc/l10n/$lang/rustdoc.md diff --git a/src/doc/reference.md b/src/doc/reference.md index 3d4791e916e6a..97184d534983c 100644 --- a/src/doc/reference.md +++ b/src/doc/reference.md @@ -518,7 +518,7 @@ This last example is different because it is not possible to use the suffix syntax with a floating point literal ending in a period. `2.f64` would attempt to call a method named `f64` on `2`. -##### Boolean literals +#### Boolean literals The two values of the boolean type are written `true` and `false`. @@ -934,7 +934,7 @@ kinds of view items: ```{.ebnf .gram} extern_crate_decl : "extern" "crate" crate_name -crate_name: ident | ( string_lit as ident ) +crate_name: ident | ( string_lit "as" ident ) ``` An _`extern crate` declaration_ specifies a dependency on an external crate. @@ -3177,7 +3177,7 @@ Some examples of call expressions: # fn add(x: int, y: int) -> int { 0 } let x: int = add(1, 2); -let pi: Option = from_str("3.14"); +let pi: Option = "3.14".parse(); ``` ### Lambda expressions diff --git a/src/doc/rust.css b/src/doc/rust.css index 9656d17721e47..128d75468e6f4 100644 --- a/src/doc/rust.css +++ b/src/doc/rust.css @@ -1,5 +1,5 @@ /** - * Copyright 2013 The Rust Project Developers. See the COPYRIGHT + * Copyright 2013-2014 The Rust Project Developers. See the COPYRIGHT * file at the top-level directory of this distribution and at * http://rust-lang.org/COPYRIGHT. * With elements taken from Bootstrap v3.0.2 (MIT licensed). @@ -209,7 +209,6 @@ pre { code { padding: 0 2px; color: #8D1A38; - white-space: pre-wrap; } pre code { padding: 0; diff --git a/src/etc/rust-lldb b/src/etc/rust-lldb index 19f36df7dbab4..42902b06aee13 100755 --- a/src/etc/rust-lldb +++ b/src/etc/rust-lldb @@ -19,7 +19,7 @@ TMPFILE=`mktemp /tmp/rust-lldb-commands.XXXXXX` trap "rm -f $TMPFILE; exit" INT TERM EXIT # Find out where to look for the pretty printer Python module -RUSTC_SYSROOT=`rustc -Zprint-sysroot` +RUSTC_SYSROOT=`rustc --print sysroot` # Write the LLDB script to the tempfile echo "command script import \"$RUSTC_SYSROOT/lib/rustlib/etc/lldb_rust_formatters.py\"" >> $TMPFILE diff --git a/src/liballoc/arc.rs b/src/liballoc/arc.rs index e909947ab08b0..893c9d250b723 100644 --- a/src/liballoc/arc.rs +++ b/src/liballoc/arc.rs @@ -10,8 +10,62 @@ #![stable] -//! Concurrency-enabled mechanisms for sharing mutable and/or immutable state -//! between tasks. +//! Threadsafe reference-counted boxes (the `Arc` type). +//! +//! The `Arc` type provides shared ownership of an immutable value. Destruction is +//! deterministic, and will occur as soon as the last owner is gone. It is marked as `Send` because +//! it uses atomic reference counting. +//! +//! If you do not need thread-safety, and just need shared ownership, consider the [`Rc` +//! type](../rc/struct.Rc.html). It is the same as `Arc`, but does not use atomics, making it +//! both thread-unsafe as well as significantly faster when updating the reference count. +//! +//! The `downgrade` method can be used to create a non-owning `Weak` pointer to the box. A +//! `Weak` pointer can be upgraded to an `Arc` pointer, but will return `None` if the value +//! has already been dropped. +//! +//! For example, a tree with parent pointers can be represented by putting the nodes behind strong +//! `Arc` pointers, and then storing the parent pointers as `Weak` pointers. +//! +//! # Examples +//! +//! Sharing some immutable data between tasks: +//! +//! ``` +//! use std::sync::Arc; +//! use std::thread::Thread; +//! +//! let five = Arc::new(5i); +//! +//! for i in range(0u, 10) { +//! let five = five.clone(); +//! +//! Thread::spawn(move || { +//! println!("{}", five); +//! }).detach(); +//! } +//! ``` +//! +//! Sharing mutable data safely between tasks with a `Mutex`: +//! +//! ``` +//! use std::sync::{Arc, Mutex}; +//! use std::thread::Thread; +//! +//! let five = Arc::new(Mutex::new(5i)); +//! +//! for _ in range(0u, 10) { +//! let five = five.clone(); +//! +//! Thread::spawn(move || { +//! let mut number = five.lock(); +//! +//! *number += 1; +//! +//! println!("{}", *number); // prints 6 +//! }).detach(); +//! } +//! ``` use core::atomic; use core::borrow::BorrowFrom; @@ -33,9 +87,8 @@ use heap::deallocate; /// /// # Example /// -/// In this example, a large vector of floats is shared between several tasks. -/// With simple pipes, without `Arc`, a copy would have to be made for each -/// task. +/// In this example, a large vector of floats is shared between several tasks. With simple pipes, +/// without `Arc`, a copy would have to be made for each task. /// /// ```rust /// use std::sync::Arc; @@ -66,8 +119,8 @@ pub struct Arc { /// A weak pointer to an `Arc`. /// -/// Weak pointers will not keep the data inside of the `Arc` alive, and can be -/// used to break cycles between `Arc` pointers. +/// Weak pointers will not keep the data inside of the `Arc` alive, and can be used to break cycles +/// between `Arc` pointers. #[unsafe_no_drop_flag] #[experimental = "Weak pointers may not belong in this module."] pub struct Weak { @@ -83,7 +136,15 @@ struct ArcInner { } impl Arc { - /// Creates an atomically reference counted wrapper. + /// Constructs a new `Arc`. + /// + /// # Examples + /// + /// ``` + /// use std::sync::Arc; + /// + /// let five = Arc::new(5i); + /// ``` #[inline] #[stable] pub fn new(data: T) -> Arc { @@ -97,11 +158,17 @@ impl Arc { Arc { _ptr: unsafe { mem::transmute(x) } } } - /// Downgrades a strong pointer to a weak pointer. + /// Downgrades the `Arc` to a `Weak` reference. + /// + /// # Examples + /// + /// ``` + /// use std::sync::Arc; + /// + /// let five = Arc::new(5i); /// - /// Weak pointers will not keep the data alive. Once all strong references - /// to the underlying data have been dropped, the data itself will be - /// destroyed. + /// let weak_five = five.downgrade(); + /// ``` #[experimental = "Weak pointers may not belong in this module."] pub fn downgrade(&self) -> Weak { // See the clone() impl for why this is relaxed @@ -113,11 +180,10 @@ impl Arc { impl Arc { #[inline] fn inner(&self) -> &ArcInner { - // This unsafety is ok because while this arc is alive we're guaranteed - // that the inner pointer is valid. Furthermore, we know that the - // `ArcInner` structure itself is `Sync` because the inner data is - // `Sync` as well, so we're ok loaning out an immutable pointer to - // these contents. + // This unsafety is ok because while this arc is alive we're guaranteed that the inner + // pointer is valid. Furthermore, we know that the `ArcInner` structure itself is `Sync` + // because the inner data is `Sync` as well, so we're ok loaning out an immutable pointer + // to these contents. unsafe { &*self._ptr } } } @@ -132,24 +198,30 @@ pub fn weak_count(this: &Arc) -> uint { this.inner().weak.load(atomic::Seq #[experimental] pub fn strong_count(this: &Arc) -> uint { this.inner().strong.load(atomic::SeqCst) } -#[unstable = "waiting on stability of Clone"] +#[stable] impl Clone for Arc { - /// Duplicate an atomically reference counted wrapper. + /// Makes a clone of the `Arc`. + /// + /// This increases the strong reference count. + /// + /// # Examples + /// + /// ``` + /// use std::sync::Arc; + /// + /// let five = Arc::new(5i); /// - /// The resulting two `Arc` objects will point to the same underlying data - /// object. However, one of the `Arc` objects can be sent to another task, - /// allowing them to share the underlying data. + /// five.clone(); + /// ``` #[inline] fn clone(&self) -> Arc { - // Using a relaxed ordering is alright here, as knowledge of the - // original reference prevents other threads from erroneously deleting - // the object. + // Using a relaxed ordering is alright here, as knowledge of the original reference + // prevents other threads from erroneously deleting the object. // - // As explained in the [Boost documentation][1], Increasing the - // reference counter can always be done with memory_order_relaxed: New - // references to an object can only be formed from an existing - // reference, and passing an existing reference from one thread to - // another must already provide any required synchronization. + // As explained in the [Boost documentation][1], Increasing the reference counter can + // always be done with memory_order_relaxed: New references to an object can only be formed + // from an existing reference, and passing an existing reference from one thread to another + // must already provide any required synchronization. // // [1]: (www.boost.org/doc/libs/1_55_0/doc/html/atomic/usage_examples.html) self.inner().strong.fetch_add(1, atomic::Relaxed); @@ -172,26 +244,33 @@ impl Deref for Arc { } impl Arc { - /// Acquires a mutable pointer to the inner contents by guaranteeing that - /// the reference count is one (no sharing is possible). + /// Make a mutable reference from the given `Arc`. /// - /// This is also referred to as a copy-on-write operation because the inner - /// data is cloned if the reference count is greater than one. + /// This is also referred to as a copy-on-write operation because the inner data is cloned if + /// the reference count is greater than one. + /// + /// # Examples + /// + /// ``` + /// use std::sync::Arc; + /// + /// let mut five = Arc::new(5i); + /// + /// let mut_five = five.make_unique(); + /// ``` #[inline] #[experimental] pub fn make_unique(&mut self) -> &mut T { - // Note that we hold a strong reference, which also counts as - // a weak reference, so we only clone if there is an - // additional reference of either kind. + // Note that we hold a strong reference, which also counts as a weak reference, so we only + // clone if there is an additional reference of either kind. if self.inner().strong.load(atomic::SeqCst) != 1 || self.inner().weak.load(atomic::SeqCst) != 1 { *self = Arc::new((**self).clone()) } - // This unsafety is ok because we're guaranteed that the pointer - // returned is the *only* pointer that will ever be returned to T. Our - // reference count is guaranteed to be 1 at this point, and we required - // the Arc itself to be `mut`, so we're returning the only possible - // reference to the inner data. + // This unsafety is ok because we're guaranteed that the pointer returned is the *only* + // pointer that will ever be returned to T. Our reference count is guaranteed to be 1 at + // this point, and we required the Arc itself to be `mut`, so we're returning the only + // possible reference to the inner data. let inner = unsafe { &mut *self._ptr }; &mut inner.data } @@ -200,38 +279,59 @@ impl Arc { #[unsafe_destructor] #[experimental = "waiting on stability of Drop"] impl Drop for Arc { + /// Drops the `Arc`. + /// + /// This will decrement the strong reference count. If the strong reference count becomes zero + /// and the only other references are `Weak` ones, `drop`s the inner value. + /// + /// # Examples + /// + /// ``` + /// use std::sync::Arc; + /// + /// { + /// let five = Arc::new(5i); + /// + /// // stuff + /// + /// drop(five); // explict drop + /// } + /// { + /// let five = Arc::new(5i); + /// + /// // stuff + /// + /// } // implicit drop + /// ``` fn drop(&mut self) { - // This structure has #[unsafe_no_drop_flag], so this drop glue may run - // more than once (but it is guaranteed to be zeroed after the first if - // it's run more than once) + // This structure has #[unsafe_no_drop_flag], so this drop glue may run more than once (but + // it is guaranteed to be zeroed after the first if it's run more than once) if self._ptr.is_null() { return } - // Because `fetch_sub` is already atomic, we do not need to synchronize - // with other threads unless we are going to delete the object. This - // same logic applies to the below `fetch_sub` to the `weak` count. + // Because `fetch_sub` is already atomic, we do not need to synchronize with other threads + // unless we are going to delete the object. This same logic applies to the below + // `fetch_sub` to the `weak` count. if self.inner().strong.fetch_sub(1, atomic::Release) != 1 { return } - // This fence is needed to prevent reordering of use of the data and - // deletion of the data. Because it is marked `Release`, the - // decreasing of the reference count synchronizes with this `Acquire` - // fence. This means that use of the data happens before decreasing - // the reference count, which happens before this fence, which - // happens before the deletion of the data. + // This fence is needed to prevent reordering of use of the data and deletion of the data. + // Because it is marked `Release`, the decreasing of the reference count synchronizes with + // this `Acquire` fence. This means that use of the data happens before decreasing the + // reference count, which happens before this fence, which happens before the deletion of + // the data. // // As explained in the [Boost documentation][1], // - // It is important to enforce any possible access to the object in - // one thread (through an existing reference) to *happen before* - // deleting the object in a different thread. This is achieved by a - // "release" operation after dropping a reference (any access to the - // object through this reference must obviously happened before), - // and an "acquire" operation before deleting the object. + // > It is important to enforce any possible access to the object in one thread (through an + // > existing reference) to *happen before* deleting the object in a different thread. This + // > is achieved by a "release" operation after dropping a reference (any access to the + // > object through this reference must obviously happened before), and an "acquire" + // > operation before deleting the object. // // [1]: (www.boost.org/doc/libs/1_55_0/doc/html/atomic/usage_examples.html) atomic::fence(atomic::Acquire); - // Destroy the data at this time, even though we may not free the box - // allocation itself (there may still be weak pointers lying around). + // Destroy the data at this time, even though we may not free the box allocation itself + // (there may still be weak pointers lying around). unsafe { drop(ptr::read(&self.inner().data)); } if self.inner().weak.fetch_sub(1, atomic::Release) == 1 { @@ -244,14 +344,26 @@ impl Drop for Arc { #[experimental = "Weak pointers may not belong in this module."] impl Weak { - /// Attempts to upgrade this weak reference to a strong reference. + /// Upgrades a weak reference to a strong reference. + /// + /// Upgrades the `Weak` reference to an `Arc`, if possible. + /// + /// Returns `None` if there were no strong references and the data was destroyed. + /// + /// # Examples + /// + /// ``` + /// use std::sync::Arc; /// - /// This method will not upgrade this reference if the strong reference count has already - /// reached 0, but if there are still other active strong references this function will return - /// a new strong reference to the data. + /// let five = Arc::new(5i); + /// + /// let weak_five = five.downgrade(); + /// + /// let strong_five: Option> = weak_five.upgrade(); + /// ``` pub fn upgrade(&self) -> Option> { - // We use a CAS loop to increment the strong count instead of a - // fetch_add because once the count hits 0 is must never be above 0. + // We use a CAS loop to increment the strong count instead of a fetch_add because once the + // count hits 0 is must never be above 0. let inner = self.inner(); loop { let n = inner.strong.load(atomic::SeqCst); @@ -270,6 +382,19 @@ impl Weak { #[experimental = "Weak pointers may not belong in this module."] impl Clone for Weak { + /// Makes a clone of the `Weak`. + /// + /// This increases the weak reference count. + /// + /// # Examples + /// + /// ``` + /// use std::sync::Arc; + /// + /// let weak_five = Arc::new(5i).downgrade(); + /// + /// weak_five.clone(); + /// ``` #[inline] fn clone(&self) -> Weak { // See comments in Arc::clone() for why this is relaxed @@ -281,13 +406,37 @@ impl Clone for Weak { #[unsafe_destructor] #[experimental = "Weak pointers may not belong in this module."] impl Drop for Weak { + /// Drops the `Weak`. + /// + /// This will decrement the weak reference count. + /// + /// # Examples + /// + /// ``` + /// use std::sync::Arc; + /// + /// { + /// let five = Arc::new(5i); + /// let weak_five = five.downgrade(); + /// + /// // stuff + /// + /// drop(weak_five); // explict drop + /// } + /// { + /// let five = Arc::new(5i); + /// let weak_five = five.downgrade(); + /// + /// // stuff + /// + /// } // implicit drop + /// ``` fn drop(&mut self) { // see comments above for why this check is here if self._ptr.is_null() { return } - // If we find out that we were the last weak pointer, then its time to - // deallocate the data entirely. See the discussion in Arc::drop() about - // the memory orderings + // If we find out that we were the last weak pointer, then its time to deallocate the data + // entirely. See the discussion in Arc::drop() about the memory orderings if self.inner().weak.fetch_sub(1, atomic::Release) == 1 { atomic::fence(atomic::Acquire); unsafe { deallocate(self._ptr as *mut u8, size_of::>(), @@ -298,18 +447,114 @@ impl Drop for Weak { #[unstable = "waiting on PartialEq"] impl PartialEq for Arc { + /// Equality for two `Arc`s. + /// + /// Two `Arc`s are equal if their inner value are equal. + /// + /// # Examples + /// + /// ``` + /// use std::sync::Arc; + /// + /// let five = Arc::new(5i); + /// + /// five == Arc::new(5i); + /// ``` fn eq(&self, other: &Arc) -> bool { *(*self) == *(*other) } + + /// Inequality for two `Arc`s. + /// + /// Two `Arc`s are unequal if their inner value are unequal. + /// + /// # Examples + /// + /// ``` + /// use std::sync::Arc; + /// + /// let five = Arc::new(5i); + /// + /// five != Arc::new(5i); + /// ``` fn ne(&self, other: &Arc) -> bool { *(*self) != *(*other) } } #[unstable = "waiting on PartialOrd"] impl PartialOrd for Arc { + /// Partial comparison for two `Arc`s. + /// + /// The two are compared by calling `partial_cmp()` on their inner values. + /// + /// # Examples + /// + /// ``` + /// use std::sync::Arc; + /// + /// let five = Arc::new(5i); + /// + /// five.partial_cmp(&Arc::new(5i)); + /// ``` fn partial_cmp(&self, other: &Arc) -> Option { (**self).partial_cmp(&**other) } + + /// Less-than comparison for two `Arc`s. + /// + /// The two are compared by calling `<` on their inner values. + /// + /// # Examples + /// + /// ``` + /// use std::sync::Arc; + /// + /// let five = Arc::new(5i); + /// + /// five < Arc::new(5i); + /// ``` fn lt(&self, other: &Arc) -> bool { *(*self) < *(*other) } + + /// 'Less-than or equal to' comparison for two `Arc`s. + /// + /// The two are compared by calling `<=` on their inner values. + /// + /// # Examples + /// + /// ``` + /// use std::sync::Arc; + /// + /// let five = Arc::new(5i); + /// + /// five <= Arc::new(5i); + /// ``` fn le(&self, other: &Arc) -> bool { *(*self) <= *(*other) } - fn ge(&self, other: &Arc) -> bool { *(*self) >= *(*other) } + + /// Greater-than comparison for two `Arc`s. + /// + /// The two are compared by calling `>` on their inner values. + /// + /// # Examples + /// + /// ``` + /// use std::sync::Arc; + /// + /// let five = Arc::new(5i); + /// + /// five > Arc::new(5i); + /// ``` fn gt(&self, other: &Arc) -> bool { *(*self) > *(*other) } + + /// 'Greater-than or equal to' comparison for two `Arc`s. + /// + /// The two are compared by calling `>=` on their inner values. + /// + /// # Examples + /// + /// ``` + /// use std::sync::Arc; + /// + /// let five = Arc::new(5i); + /// + /// five >= Arc::new(5i); + /// ``` + fn ge(&self, other: &Arc) -> bool { *(*self) >= *(*other) } } #[unstable = "waiting on Ord"] impl Ord for Arc { diff --git a/src/liballoc/boxed.rs b/src/liballoc/boxed.rs index 879a8cc6951a1..5fd234192c515 100644 --- a/src/liballoc/boxed.rs +++ b/src/liballoc/boxed.rs @@ -22,6 +22,7 @@ use core::option::Option; use core::raw::TraitObject; use core::result::Result; use core::result::Result::{Ok, Err}; +use core::ops::{Deref, DerefMut}; /// A value that represents the global exchange heap. This is the default /// place that the `box` keyword allocates into when no place is supplied. @@ -57,7 +58,7 @@ impl Default for Box<[T]> { fn default() -> Box<[T]> { box [] } } -#[unstable] +#[stable] impl Clone for Box { /// Returns a copy of the owned box. #[inline] @@ -147,6 +148,14 @@ impl fmt::Show for Box { } } +impl Deref for Box { + fn deref(&self) -> &T { &**self } +} + +impl DerefMut for Box { + fn deref_mut(&mut self) -> &mut T { &mut **self } +} + #[cfg(test)] mod test { #[test] @@ -193,4 +202,10 @@ mod test { let s = format!("{}", b); assert_eq!(s, "&Any"); } + + #[test] + fn deref() { + fn homura>(_: T) { } + homura(box 765i32); + } } diff --git a/src/liballoc/rc.rs b/src/liballoc/rc.rs index 0257c640d3ccd..dfa55848c90da 100644 --- a/src/liballoc/rc.rs +++ b/src/liballoc/rc.rs @@ -168,12 +168,12 @@ struct RcBox { /// An immutable reference-counted pointer type. /// -/// See the [module level documentation](../index.html) for more. +/// See the [module level documentation](../index.html) for more details. #[unsafe_no_drop_flag] #[stable] pub struct Rc { - // FIXME #12808: strange names to try to avoid interfering with - // field accesses of the contained type via Deref + // FIXME #12808: strange names to try to avoid interfering with field accesses of the contained + // type via Deref _ptr: *mut RcBox, _nosend: marker::NoSend, _noshare: marker::NoSync @@ -193,11 +193,9 @@ impl Rc { pub fn new(value: T) -> Rc { unsafe { Rc { - // there is an implicit weak pointer owned by all the - // strong pointers, which ensures that the weak - // destructor never frees the allocation while the - // strong destructor is running, even if the weak - // pointer is stored inside the strong one. + // there is an implicit weak pointer owned by all the strong pointers, which + // ensures that the weak destructor never frees the allocation while the strong + // destructor is running, even if the weak pointer is stored inside the strong one. _ptr: transmute(box RcBox { value: value, strong: Cell::new(1), @@ -341,11 +339,10 @@ impl Rc { if !is_unique(self) { *self = Rc::new((**self).clone()) } - // This unsafety is ok because we're guaranteed that the pointer - // returned is the *only* pointer that will ever be returned to T. Our - // reference count is guaranteed to be 1 at this point, and we required - // the `Rc` itself to be `mut`, so we're returning the only possible - // reference to the inner value. + // This unsafety is ok because we're guaranteed that the pointer returned is the *only* + // pointer that will ever be returned to T. Our reference count is guaranteed to be 1 at + // this point, and we required the `Rc` itself to be `mut`, so we're returning the only + // possible reference to the inner value. let inner = unsafe { &mut *self._ptr }; &mut inner.value } @@ -399,8 +396,8 @@ impl Drop for Rc { if self.strong() == 0 { ptr::read(&**self); // destroy the contained object - // remove the implicit "strong weak" pointer now - // that we've destroyed the contents. + // remove the implicit "strong weak" pointer now that we've destroyed the + // contents. self.dec_weak(); if self.weak() == 0 { @@ -413,7 +410,7 @@ impl Drop for Rc { } } -#[unstable = "Clone is unstable."] +#[stable] impl Clone for Rc { /// Makes a clone of the `Rc`. /// @@ -687,8 +684,8 @@ impl Drop for Weak { unsafe { if !self._ptr.is_null() { self.dec_weak(); - // the weak count starts at 1, and will only go to - // zero if all the strong pointers have disappeared. + // the weak count starts at 1, and will only go to zero if all the strong pointers + // have disappeared. if self.weak() == 0 { deallocate(self._ptr as *mut u8, size_of::>(), min_align_of::>()) diff --git a/src/libcollections/bench.rs b/src/libcollections/bench.rs index 3346e55158a2a..fbaebd0125d0d 100644 --- a/src/libcollections/bench.rs +++ b/src/libcollections/bench.rs @@ -8,7 +8,7 @@ // option. This file may not be copied, modified, or distributed // except according to those terms. -use std::prelude::*; +use prelude::*; use std::rand; use std::rand::Rng; use test::Bencher; diff --git a/src/libcollections/binary_heap.rs b/src/libcollections/binary_heap.rs index be99c4c0bc724..589e580cc9121 100644 --- a/src/libcollections/binary_heap.rs +++ b/src/libcollections/binary_heap.rs @@ -239,8 +239,8 @@ impl BinaryHeap { /// } /// ``` #[unstable = "matches collection reform specification, waiting for dust to settle"] - pub fn iter<'a>(&'a self) -> Items<'a, T> { - Items { iter: self.data.iter() } + pub fn iter(&self) -> Iter { + Iter { iter: self.data.iter() } } /// Creates a consuming iterator, that is, one that moves each value out of @@ -260,8 +260,8 @@ impl BinaryHeap { /// } /// ``` #[unstable = "matches collection reform specification, waiting for dust to settle"] - pub fn into_iter(self) -> MoveItems { - MoveItems { iter: self.data.into_iter() } + pub fn into_iter(self) -> IntoIter { + IntoIter { iter: self.data.into_iter() } } /// Returns the greatest item in a queue, or `None` if it is empty. @@ -272,16 +272,17 @@ impl BinaryHeap { /// use std::collections::BinaryHeap; /// /// let mut heap = BinaryHeap::new(); - /// assert_eq!(heap.top(), None); + /// assert_eq!(heap.peek(), None); /// /// heap.push(1i); /// heap.push(5i); /// heap.push(2i); - /// assert_eq!(heap.top(), Some(&5i)); + /// assert_eq!(heap.peek(), Some(&5i)); /// /// ``` - pub fn top<'a>(&'a self) -> Option<&'a T> { - if self.is_empty() { None } else { Some(&self.data[0]) } + #[stable] + pub fn peek(&self) -> Option<&T> { + self.data.get(0) } /// Returns the number of elements the queue can hold without reallocating. @@ -388,13 +389,13 @@ impl BinaryHeap { /// heap.push(1i); /// /// assert_eq!(heap.len(), 3); - /// assert_eq!(heap.top(), Some(&5i)); + /// assert_eq!(heap.peek(), Some(&5i)); /// ``` #[unstable = "matches collection reform specification, waiting for dust to settle"] pub fn push(&mut self, item: T) { + let old_len = self.len(); self.data.push(item); - let new_len = self.len() - 1; - self.siftup(0, new_len); + self.siftup(0, old_len); } /// Pushes an item onto a queue then pops the greatest item off the queue in @@ -412,13 +413,19 @@ impl BinaryHeap { /// assert_eq!(heap.push_pop(3i), 5); /// assert_eq!(heap.push_pop(9i), 9); /// assert_eq!(heap.len(), 2); - /// assert_eq!(heap.top(), Some(&3i)); + /// assert_eq!(heap.peek(), Some(&3i)); /// ``` pub fn push_pop(&mut self, mut item: T) -> T { - if !self.is_empty() && *self.top().unwrap() > item { - swap(&mut item, &mut self.data[0]); - self.siftdown(0); + match self.data.get_mut(0) { + None => return item, + Some(top) => if *top > item { + swap(&mut item, top); + } else { + return item; + }, } + + self.siftdown(0); item } @@ -436,7 +443,7 @@ impl BinaryHeap { /// assert_eq!(heap.replace(1i), None); /// assert_eq!(heap.replace(3i), Some(1i)); /// assert_eq!(heap.len(), 1); - /// assert_eq!(heap.top(), Some(&3i)); + /// assert_eq!(heap.peek(), Some(&3i)); /// ``` pub fn replace(&mut self, mut item: T) -> Option { if !self.is_empty() { @@ -465,7 +472,7 @@ impl BinaryHeap { /// println!("{}", x); /// } /// ``` - pub fn into_vec(self) -> Vec { let BinaryHeap{data: v} = self; v } + pub fn into_vec(self) -> Vec { self.data } /// Consumes the `BinaryHeap` and returns a vector in sorted /// (ascending) order. @@ -482,15 +489,14 @@ impl BinaryHeap { /// let vec = heap.into_sorted_vec(); /// assert_eq!(vec, vec![1i, 2, 3, 4, 5, 6, 7]); /// ``` - pub fn into_sorted_vec(self) -> Vec { - let mut q = self; - let mut end = q.len(); + pub fn into_sorted_vec(mut self) -> Vec { + let mut end = self.len(); while end > 1 { end -= 1; - q.data.swap(0, end); - q.siftdown_range(0, end) + self.data.swap(0, end); + self.siftdown_range(0, end) } - q.into_vec() + self.into_vec() } // The implementations of siftup and siftdown use unsafe blocks in @@ -551,37 +557,66 @@ impl BinaryHeap { #[unstable = "matches collection reform specification, waiting for dust to settle"] pub fn is_empty(&self) -> bool { self.len() == 0 } + /// Clears the queue, returning an iterator over the removed elements. + #[inline] + #[unstable = "matches collection reform specification, waiting for dust to settle"] + pub fn drain<'a>(&'a mut self) -> Drain<'a, T> { + Drain { + iter: self.data.drain(), + } + } + /// Drops all items from the queue. #[unstable = "matches collection reform specification, waiting for dust to settle"] - pub fn clear(&mut self) { self.data.truncate(0) } + pub fn clear(&mut self) { self.drain(); } } /// `BinaryHeap` iterator. -pub struct Items <'a, T:'a> { - iter: slice::Items<'a, T>, +pub struct Iter <'a, T: 'a> { + iter: slice::Iter<'a, T>, } -impl<'a, T> Iterator<&'a T> for Items<'a, T> { +impl<'a, T> Iterator<&'a T> for Iter<'a, T> { #[inline] - fn next(&mut self) -> Option<(&'a T)> { self.iter.next() } + fn next(&mut self) -> Option<&'a T> { self.iter.next() } #[inline] fn size_hint(&self) -> (uint, Option) { self.iter.size_hint() } } -impl<'a, T> DoubleEndedIterator<&'a T> for Items<'a, T> { +impl<'a, T> DoubleEndedIterator<&'a T> for Iter<'a, T> { #[inline] - fn next_back(&mut self) -> Option<(&'a T)> { self.iter.next_back() } + fn next_back(&mut self) -> Option<&'a T> { self.iter.next_back() } } -impl<'a, T> ExactSizeIterator<&'a T> for Items<'a, T> {} +impl<'a, T> ExactSizeIterator<&'a T> for Iter<'a, T> {} /// An iterator that moves out of a `BinaryHeap`. -pub struct MoveItems { - iter: vec::MoveItems, +pub struct IntoIter { + iter: vec::IntoIter, +} + +impl Iterator for IntoIter { + #[inline] + fn next(&mut self) -> Option { self.iter.next() } + + #[inline] + fn size_hint(&self) -> (uint, Option) { self.iter.size_hint() } +} + +impl DoubleEndedIterator for IntoIter { + #[inline] + fn next_back(&mut self) -> Option { self.iter.next_back() } +} + +impl ExactSizeIterator for IntoIter {} + +/// An iterator that drains a `BinaryHeap`. +pub struct Drain<'a, T: 'a> { + iter: vec::Drain<'a, T>, } -impl Iterator for MoveItems { +impl<'a, T: 'a> Iterator for Drain<'a, T> { #[inline] fn next(&mut self) -> Option { self.iter.next() } @@ -589,17 +624,16 @@ impl Iterator for MoveItems { fn size_hint(&self) -> (uint, Option) { self.iter.size_hint() } } -impl DoubleEndedIterator for MoveItems { +impl<'a, T: 'a> DoubleEndedIterator for Drain<'a, T> { #[inline] fn next_back(&mut self) -> Option { self.iter.next_back() } } -impl ExactSizeIterator for MoveItems {} +impl<'a, T: 'a> ExactSizeIterator for Drain<'a, T> {} impl FromIterator for BinaryHeap { fn from_iter>(iter: Iter) -> BinaryHeap { - let vec: Vec = iter.collect(); - BinaryHeap::from_vec(vec) + BinaryHeap::from_vec(iter.collect()) } } @@ -617,10 +651,9 @@ impl Extend for BinaryHeap { #[cfg(test)] mod tests { - use std::prelude::*; + use prelude::*; use super::BinaryHeap; - use vec::Vec; #[test] fn test_iterator() { @@ -682,13 +715,13 @@ mod tests { } #[test] - fn test_top_and_pop() { + fn test_peek_and_pop() { let data = vec!(2u, 4, 6, 2, 1, 8, 10, 3, 5, 7, 0, 9, 1); let mut sorted = data.clone(); sorted.sort(); let mut heap = BinaryHeap::from_vec(data); while !heap.is_empty() { - assert_eq!(heap.top().unwrap(), sorted.last().unwrap()); + assert_eq!(heap.peek().unwrap(), sorted.last().unwrap()); assert_eq!(heap.pop().unwrap(), sorted.pop().unwrap()); } } @@ -697,44 +730,44 @@ mod tests { fn test_push() { let mut heap = BinaryHeap::from_vec(vec!(2i, 4, 9)); assert_eq!(heap.len(), 3); - assert!(*heap.top().unwrap() == 9); + assert!(*heap.peek().unwrap() == 9); heap.push(11); assert_eq!(heap.len(), 4); - assert!(*heap.top().unwrap() == 11); + assert!(*heap.peek().unwrap() == 11); heap.push(5); assert_eq!(heap.len(), 5); - assert!(*heap.top().unwrap() == 11); + assert!(*heap.peek().unwrap() == 11); heap.push(27); assert_eq!(heap.len(), 6); - assert!(*heap.top().unwrap() == 27); + assert!(*heap.peek().unwrap() == 27); heap.push(3); assert_eq!(heap.len(), 7); - assert!(*heap.top().unwrap() == 27); + assert!(*heap.peek().unwrap() == 27); heap.push(103); assert_eq!(heap.len(), 8); - assert!(*heap.top().unwrap() == 103); + assert!(*heap.peek().unwrap() == 103); } #[test] fn test_push_unique() { let mut heap = BinaryHeap::from_vec(vec!(box 2i, box 4, box 9)); assert_eq!(heap.len(), 3); - assert!(*heap.top().unwrap() == box 9); + assert!(*heap.peek().unwrap() == box 9); heap.push(box 11); assert_eq!(heap.len(), 4); - assert!(*heap.top().unwrap() == box 11); + assert!(*heap.peek().unwrap() == box 11); heap.push(box 5); assert_eq!(heap.len(), 5); - assert!(*heap.top().unwrap() == box 11); + assert!(*heap.peek().unwrap() == box 11); heap.push(box 27); assert_eq!(heap.len(), 6); - assert!(*heap.top().unwrap() == box 27); + assert!(*heap.peek().unwrap() == box 27); heap.push(box 3); assert_eq!(heap.len(), 7); - assert!(*heap.top().unwrap() == box 27); + assert!(*heap.peek().unwrap() == box 27); heap.push(box 103); assert_eq!(heap.len(), 8); - assert!(*heap.top().unwrap() == box 103); + assert!(*heap.peek().unwrap() == box 103); } #[test] @@ -794,20 +827,20 @@ mod tests { #[test] fn test_empty_pop() { - let mut heap: BinaryHeap = BinaryHeap::new(); + let mut heap = BinaryHeap::::new(); assert!(heap.pop().is_none()); } #[test] - fn test_empty_top() { - let empty: BinaryHeap = BinaryHeap::new(); - assert!(empty.top().is_none()); + fn test_empty_peek() { + let empty = BinaryHeap::::new(); + assert!(empty.peek().is_none()); } #[test] fn test_empty_replace() { - let mut heap: BinaryHeap = BinaryHeap::new(); - heap.replace(5).is_none(); + let mut heap = BinaryHeap::::new(); + assert!(heap.replace(5).is_none()); } #[test] @@ -820,4 +853,14 @@ mod tests { assert_eq!(q.pop().unwrap(), x); } } + + #[test] + fn test_drain() { + let mut q: BinaryHeap<_> = + [9u, 8, 7, 6, 5, 4, 3, 2, 1].iter().cloned().collect(); + + assert_eq!(q.drain().take(5).count(), 5); + + assert!(q.is_empty()); + } } diff --git a/src/libcollections/bit.rs b/src/libcollections/bit.rs index 7f78d56607e7a..430d7210bf69b 100644 --- a/src/libcollections/bit.rs +++ b/src/libcollections/bit.rs @@ -8,8 +8,25 @@ // option. This file may not be copied, modified, or distributed // except according to those terms. -// FIXME(Gankro): Bitv and BitvSet are very tightly coupled. Ideally (for maintenance), -// they should be in separate files/modules, with BitvSet only using Bitv's public API. +// FIXME(Gankro): Bitv and BitvSet are very tightly coupled. Ideally (for +// maintenance), they should be in separate files/modules, with BitvSet only +// using Bitv's public API. This will be hard for performance though, because +// `Bitv` will not want to leak its internal representation while its internal +// representation as `u32`s must be assumed for best performance. + +// FIXME(tbu-): `Bitv`'s methods shouldn't be `union`, `intersection`, but +// rather `or` and `and`. + +// (1) Be careful, most things can overflow here because the amount of bits in +// memory can overflow `uint`. +// (2) Make sure that the underlying vector has no excess length: +// E. g. `nbits == 16`, `storage.len() == 2` would be excess length, +// because the last word isn't used at all. This is important because some +// methods rely on it (for *CORRECTNESS*). +// (3) Make sure that the unused bits in the last word are zeroed out, again +// other methods rely on it for *CORRECTNESS*. +// (4) `BitvSet` is tightly coupled with `Bitv`, so any changes you make in +// `Bitv` will need to be reflected in `BitvSet`. //! Collections implemented with bit vectors. //! @@ -31,7 +48,7 @@ //! let primes = { //! // Assume all numbers are prime to begin, and then we //! // cross off non-primes progressively -//! let mut bv = Bitv::with_capacity(max_prime, true); +//! let mut bv = Bitv::from_elem(max_prime, true); //! //! // Neither 0 nor 1 are prime //! bv.set(0, false); @@ -68,18 +85,27 @@ use core::prelude::*; use core::cmp; use core::default::Default; use core::fmt; -use core::iter::{Chain, Enumerate, Repeat, Skip, Take, repeat}; +use core::iter::{Cloned, Chain, Enumerate, Repeat, Skip, Take}; use core::iter; use core::num::Int; -use core::slice; -use core::u32; -use std::hash; +use core::slice::{Iter, IterMut}; +use core::{u8, u32, uint}; -use vec::Vec; +use core::hash; +use Vec; -// FIXME(conventions): look, we just need to refactor this whole thing. Inside and out. +type Blocks<'a> = Cloned>; +type MutBlocks<'a> = IterMut<'a, u32>; +type MatchWords<'a> = Chain>, Skip>>>>; + +fn reverse_bits(byte: u8) -> u8 { + let mut result = 0; + for i in range(0, u8::BITS) { + result |= ((byte >> i) & 1) << (u8::BITS - 1 - i); + } + result +} -type MatchWords<'a> = Chain, Skip>>>>; // Take two BitV's, and return iterators of their words, where the shorter one // has been padded with 0's fn match_words <'a,'b>(a: &'a Bitv, b: &'b Bitv) -> (MatchWords<'a>, MatchWords<'b>) { @@ -88,11 +114,11 @@ fn match_words <'a,'b>(a: &'a Bitv, b: &'b Bitv) -> (MatchWords<'a>, MatchWords< // have to uselessly pretend to pad the longer one for type matching if a_len < b_len { - (a.mask_words(0).chain(repeat(0u32).enumerate().take(b_len).skip(a_len)), - b.mask_words(0).chain(repeat(0u32).enumerate().take(0).skip(0))) + (a.blocks().enumerate().chain(iter::repeat(0u32).enumerate().take(b_len).skip(a_len)), + b.blocks().enumerate().chain(iter::repeat(0u32).enumerate().take(0).skip(0))) } else { - (a.mask_words(0).chain(repeat(0u32).enumerate().take(0).skip(0)), - b.mask_words(0).chain(repeat(0u32).enumerate().take(a_len).skip(b_len))) + (a.blocks().enumerate().chain(iter::repeat(0u32).enumerate().take(0).skip(0)), + b.blocks().enumerate().chain(iter::repeat(0u32).enumerate().take(a_len).skip(b_len))) } } @@ -106,7 +132,7 @@ static FALSE: bool = false; /// ```rust /// use collections::Bitv; /// -/// let mut bv = Bitv::with_capacity(10, false); +/// let mut bv = Bitv::from_elem(10, false); /// /// // insert all primes less than 10 /// bv.set(2, true); @@ -133,10 +159,11 @@ pub struct Bitv { nbits: uint } +// FIXME(Gankro): NopeNopeNopeNopeNope (wait for IndexGet to be a thing) impl Index for Bitv { #[inline] fn index<'a>(&'a self, i: &uint) -> &'a bool { - if self.get(*i) { + if self.get(*i).expect("index out of bounds") { &TRUE } else { &FALSE @@ -144,46 +171,40 @@ impl Index for Bitv { } } -struct MaskWords<'a> { - iter: slice::Items<'a, u32>, - next_word: Option<&'a u32>, - last_word_mask: u32, - offset: uint +/// Computes how many blocks are needed to store that many bits +fn blocks_for_bits(bits: uint) -> uint { + // If we want 17 bits, dividing by 32 will produce 0. So we add 1 to make sure we + // reserve enough. But if we want exactly a multiple of 32, this will actually allocate + // one too many. So we need to check if that's the case. We can do that by computing if + // bitwise AND by `32 - 1` is 0. But LLVM should be able to optimize the semantically + // superior modulo operator on a power of two to this. + // + // Note that we can technically avoid this branch with the expression + // `(nbits + u32::BITS - 1) / 32::BITS`, but if nbits is almost uint::MAX this will overflow. + if bits % u32::BITS == 0 { + bits / u32::BITS + } else { + bits / u32::BITS + 1 + } } -impl<'a> Iterator<(uint, u32)> for MaskWords<'a> { - /// Returns (offset, word) - #[inline] - fn next(&mut self) -> Option<(uint, u32)> { - let ret = self.next_word; - match ret { - Some(&w) => { - self.next_word = self.iter.next(); - self.offset += 1; - // The last word may need to be masked - if self.next_word.is_none() { - Some((self.offset - 1, w & self.last_word_mask)) - } else { - Some((self.offset - 1, w)) - } - }, - None => None - } - } +/// Computes the bitmask for the final word of the vector +fn mask_for_bits(bits: uint) -> u32 { + // Note especially that a perfect multiple of u32::BITS should mask all 1s. + !0u32 >> (u32::BITS - bits % u32::BITS) % u32::BITS } impl Bitv { + /// Applies the given operation to the blocks of self and other, and sets + /// self to be the result. This relies on the caller not to corrupt the + /// last word. #[inline] fn process(&mut self, other: &Bitv, mut op: F) -> bool where F: FnMut(u32, u32) -> u32 { - let len = other.storage.len(); - assert_eq!(self.storage.len(), len); + assert_eq!(self.len(), other.len()); + // This could theoretically be a `debug_assert!`. + assert_eq!(self.storage.len(), other.storage.len()); let mut changed = false; - // Notice: `a` is *not* masked here, which is fine as long as - // `op` is a bitwise operation, since any bits that should've - // been masked were fine to change anyway. `b` is masked to - // make sure its unmasked bits do not cause damage. - for (a, (_, b)) in self.storage.iter_mut() - .zip(other.mask_words(0)) { + for (a, b) in self.blocks_mut().zip(other.blocks()) { let w = op(*a, b); if *a != w { changed = true; @@ -193,22 +214,26 @@ impl Bitv { changed } - #[inline] - fn mask_words<'a>(&'a self, mut start: uint) -> MaskWords<'a> { - if start > self.storage.len() { - start = self.storage.len(); - } - let mut iter = self.storage[start..].iter(); - MaskWords { - next_word: iter.next(), - iter: iter, - last_word_mask: { - let rem = self.nbits % u32::BITS; - if rem > 0 { - (1 << rem) - 1 - } else { !0 } - }, - offset: start + /// Iterator over mutable refs to the underlying blocks of data. + fn blocks_mut(&mut self) -> MutBlocks { + // (2) + self.storage.iter_mut() + } + + /// Iterator over the underlying blocks of data + fn blocks(&self) -> Blocks { + // (2) + self.storage.iter().cloned() + } + + /// An operation might screw up the unused bits in the last block of the + /// `Bitv`. As per (3), it's assumed to be all 0s. This method fixes it up. + fn fix_last_block(&mut self) { + let extra_bits = self.len() % u32::BITS; + if extra_bits > 0 { + let mask = (1 << extra_bits) - 1; + let storage_len = self.storage.len(); + self.storage[storage_len - 1] &= mask; } } @@ -226,61 +251,132 @@ impl Bitv { } /// Creates a `Bitv` that holds `nbits` elements, setting each element - /// to `init`. + /// to `bit`. /// /// # Examples /// /// ``` /// use std::collections::Bitv; /// - /// let mut bv = Bitv::with_capacity(10u, false); + /// let mut bv = Bitv::from_elem(10u, false); /// assert_eq!(bv.len(), 10u); /// for x in bv.iter() { /// assert_eq!(x, false); /// } /// ``` - pub fn with_capacity(nbits: uint, init: bool) -> Bitv { + pub fn from_elem(nbits: uint, bit: bool) -> Bitv { + let nblocks = blocks_for_bits(nbits); let mut bitv = Bitv { - storage: Vec::from_elem((nbits + u32::BITS - 1) / u32::BITS, - if init { !0u32 } else { 0u32 }), + storage: Vec::from_elem(nblocks, if bit { !0u32 } else { 0u32 }), nbits: nbits }; + bitv.fix_last_block(); + bitv + } + + /// Constructs a new, empty `Bitv` with the specified capacity. + /// + /// The bitvector will be able to hold at least `capacity` bits without + /// reallocating. If `capacity` is 0, it will not allocate. + /// + /// It is important to note that this function does not specify the + /// *length* of the returned bitvector, but only the *capacity*. + #[unstable = "matches collection reform specification, waiting for dust to settle"] + pub fn with_capacity(nbits: uint) -> Bitv { + Bitv { + storage: Vec::with_capacity(blocks_for_bits(nbits)), + nbits: 0, + } + } + + /// Transforms a byte-vector into a `Bitv`. Each byte becomes eight bits, + /// with the most significant bits of each byte coming first. Each + /// bit becomes `true` if equal to 1 or `false` if equal to 0. + /// + /// # Examples + /// + /// ``` + /// use std::collections::Bitv; + /// + /// let bv = Bitv::from_bytes(&[0b10100000, 0b00010010]); + /// assert!(bv.eq_vec(&[true, false, true, false, + /// false, false, false, false, + /// false, false, false, true, + /// false, false, true, false])); + /// ``` + pub fn from_bytes(bytes: &[u8]) -> Bitv { + let len = bytes.len().checked_mul(u8::BITS).expect("capacity overflow"); + let mut bitv = Bitv::with_capacity(len); + let complete_words = bytes.len() / 4; + let extra_bytes = bytes.len() % 4; + + bitv.nbits = len; + + for i in range(0, complete_words) { + bitv.storage.push( + (reverse_bits(bytes[i * 4 + 0]) as u32 << 0) | + (reverse_bits(bytes[i * 4 + 1]) as u32 << 8) | + (reverse_bits(bytes[i * 4 + 2]) as u32 << 16) | + (reverse_bits(bytes[i * 4 + 3]) as u32 << 24) + ); + } - // Zero out any unused bits in the highest word if necessary - let used_bits = bitv.nbits % u32::BITS; - if init && used_bits != 0 { - let largest_used_word = (bitv.nbits + u32::BITS - 1) / u32::BITS - 1; - bitv.storage[largest_used_word] &= (1 << used_bits) - 1; + if extra_bytes > 0 { + let mut last_word = 0u32; + for (i, &byte) in bytes[complete_words*4..].iter().enumerate() { + last_word |= reverse_bits(byte) as u32 << (i * 8); + } + bitv.storage.push(last_word); } bitv } - /// Retrieves the value at index `i`. + /// Creates a `Bitv` of the specified length where the value at each index + /// is `f(index)`. /// - /// # Panics + /// # Examples /// - /// Panics if `i` is out of bounds. + /// ``` + /// use std::collections::Bitv; + /// + /// let bv = Bitv::from_fn(5, |i| { i % 2 == 0 }); + /// assert!(bv.eq_vec(&[true, false, true, false, true])); + /// ``` + pub fn from_fn(len: uint, mut f: F) -> Bitv where F: FnMut(uint) -> bool { + let mut bitv = Bitv::from_elem(len, false); + for i in range(0u, len) { + bitv.set(i, f(i)); + } + bitv + } + + /// Retrieves the value at index `i`, or `None` if the index is out of bounds. /// /// # Examples /// /// ``` - /// use std::collections::bitv; + /// use std::collections::Bitv; /// - /// let bv = bitv::from_bytes(&[0b01100000]); - /// assert_eq!(bv.get(0), false); - /// assert_eq!(bv.get(1), true); + /// let bv = Bitv::from_bytes(&[0b01100000]); + /// assert_eq!(bv.get(0), Some(false)); + /// assert_eq!(bv.get(1), Some(true)); + /// assert_eq!(bv.get(100), None); /// /// // Can also use array indexing /// assert_eq!(bv[1], true); /// ``` #[inline] - pub fn get(&self, i: uint) -> bool { - assert!(i < self.nbits); + #[unstable = "panic semantics are likely to change in the future"] + pub fn get(&self, i: uint) -> Option { + if i >= self.nbits { + return None; + } let w = i / u32::BITS; let b = i % u32::BITS; - let x = self.storage[w] & (1 << b); - x != 0 + self.storage.get(w).map(|&block| + (block & (1 << b)) != 0 + ) } /// Sets the value of a bit at an index `i`. @@ -294,11 +390,12 @@ impl Bitv { /// ``` /// use std::collections::Bitv; /// - /// let mut bv = Bitv::with_capacity(5, false); + /// let mut bv = Bitv::from_elem(5, false); /// bv.set(3, true); /// assert_eq!(bv[3], true); /// ``` #[inline] + #[unstable = "panic semantics are likely to change in the future"] pub fn set(&mut self, i: uint, x: bool) { assert!(i < self.nbits); let w = i / u32::BITS; @@ -314,18 +411,19 @@ impl Bitv { /// # Examples /// /// ``` - /// use std::collections::bitv; + /// use std::collections::Bitv; /// /// let before = 0b01100000; /// let after = 0b11111111; /// - /// let mut bv = bitv::from_bytes(&[before]); + /// let mut bv = Bitv::from_bytes(&[before]); /// bv.set_all(); - /// assert_eq!(bv, bitv::from_bytes(&[after])); + /// assert_eq!(bv, Bitv::from_bytes(&[after])); /// ``` #[inline] pub fn set_all(&mut self) { for w in self.storage.iter_mut() { *w = !0u32; } + self.fix_last_block(); } /// Flips all bits. @@ -333,18 +431,19 @@ impl Bitv { /// # Examples /// /// ``` - /// use std::collections::bitv; + /// use std::collections::Bitv; /// /// let before = 0b01100000; /// let after = 0b10011111; /// - /// let mut bv = bitv::from_bytes(&[before]); + /// let mut bv = Bitv::from_bytes(&[before]); /// bv.negate(); - /// assert_eq!(bv, bitv::from_bytes(&[after])); + /// assert_eq!(bv, Bitv::from_bytes(&[after])); /// ``` #[inline] pub fn negate(&mut self) { for w in self.storage.iter_mut() { *w = !*w; } + self.fix_last_block(); } /// Calculates the union of two bitvectors. This acts like the bitwise `or` @@ -360,17 +459,17 @@ impl Bitv { /// # Examples /// /// ``` - /// use std::collections::bitv; + /// use std::collections::Bitv; /// /// let a = 0b01100100; /// let b = 0b01011010; /// let res = 0b01111110; /// - /// let mut a = bitv::from_bytes(&[a]); - /// let b = bitv::from_bytes(&[b]); + /// let mut a = Bitv::from_bytes(&[a]); + /// let b = Bitv::from_bytes(&[b]); /// /// assert!(a.union(&b)); - /// assert_eq!(a, bitv::from_bytes(&[res])); + /// assert_eq!(a, Bitv::from_bytes(&[res])); /// ``` #[inline] pub fn union(&mut self, other: &Bitv) -> bool { @@ -390,17 +489,17 @@ impl Bitv { /// # Examples /// /// ``` - /// use std::collections::bitv; + /// use std::collections::Bitv; /// /// let a = 0b01100100; /// let b = 0b01011010; /// let res = 0b01000000; /// - /// let mut a = bitv::from_bytes(&[a]); - /// let b = bitv::from_bytes(&[b]); + /// let mut a = Bitv::from_bytes(&[a]); + /// let b = Bitv::from_bytes(&[b]); /// /// assert!(a.intersect(&b)); - /// assert_eq!(a, bitv::from_bytes(&[res])); + /// assert_eq!(a, Bitv::from_bytes(&[res])); /// ``` #[inline] pub fn intersect(&mut self, other: &Bitv) -> bool { @@ -420,24 +519,24 @@ impl Bitv { /// # Examples /// /// ``` - /// use std::collections::bitv; + /// use std::collections::Bitv; /// /// let a = 0b01100100; /// let b = 0b01011010; /// let a_b = 0b00100100; // a - b /// let b_a = 0b00011010; // b - a /// - /// let mut bva = bitv::from_bytes(&[a]); - /// let bvb = bitv::from_bytes(&[b]); + /// let mut bva = Bitv::from_bytes(&[a]); + /// let bvb = Bitv::from_bytes(&[b]); /// /// assert!(bva.difference(&bvb)); - /// assert_eq!(bva, bitv::from_bytes(&[a_b])); + /// assert_eq!(bva, Bitv::from_bytes(&[a_b])); /// - /// let bva = bitv::from_bytes(&[a]); - /// let mut bvb = bitv::from_bytes(&[b]); + /// let bva = Bitv::from_bytes(&[a]); + /// let mut bvb = Bitv::from_bytes(&[b]); /// /// assert!(bvb.difference(&bva)); - /// assert_eq!(bvb, bitv::from_bytes(&[b_a])); + /// assert_eq!(bvb, Bitv::from_bytes(&[b_a])); /// ``` #[inline] pub fn difference(&mut self, other: &Bitv) -> bool { @@ -451,20 +550,21 @@ impl Bitv { /// ``` /// use std::collections::Bitv; /// - /// let mut bv = Bitv::with_capacity(5, true); + /// let mut bv = Bitv::from_elem(5, true); /// assert_eq!(bv.all(), true); /// /// bv.set(1, false); /// assert_eq!(bv.all(), false); /// ``` - #[inline] pub fn all(&self) -> bool { let mut last_word = !0u32; - // Check that every word but the last is all-ones... - self.mask_words(0).all(|(_, elem)| - { let tmp = last_word; last_word = elem; tmp == !0u32 }) && - // ...and that the last word is ones as far as it needs to be - (last_word == ((1 << self.nbits % u32::BITS) - 1) || last_word == !0u32) + // Check that every block but the last is all-ones... + self.blocks().all(|elem| { + let tmp = last_word; + last_word = elem; + tmp == !0u32 + // and then check the last one has enough ones + }) && (last_word == mask_for_bits(self.nbits)) } /// Returns an iterator over the elements of the vector in order. @@ -472,14 +572,15 @@ impl Bitv { /// # Examples /// /// ``` - /// use std::collections::bitv; + /// use std::collections::Bitv; /// - /// let bv = bitv::from_bytes(&[0b01110100, 0b10010010]); + /// let bv = Bitv::from_bytes(&[0b01110100, 0b10010010]); /// assert_eq!(bv.iter().filter(|x| *x).count(), 7); /// ``` #[inline] + #[unstable = "matches collection reform specification, waiting for dust to settle"] pub fn iter<'a>(&'a self) -> Bits<'a> { - Bits {bitv: self, next_idx: 0, end_idx: self.nbits} + Bits { bitv: self, next_idx: 0, end_idx: self.nbits } } /// Returns `true` if all bits are 0. @@ -489,14 +590,14 @@ impl Bitv { /// ``` /// use std::collections::Bitv; /// - /// let mut bv = Bitv::with_capacity(10, false); + /// let mut bv = Bitv::from_elem(10, false); /// assert_eq!(bv.none(), true); /// /// bv.set(3, true); /// assert_eq!(bv.none(), false); /// ``` pub fn none(&self) -> bool { - self.mask_words(0).all(|(_, w)| w == 0) + self.blocks().all(|w| w == 0) } /// Returns `true` if any bit is 1. @@ -506,7 +607,7 @@ impl Bitv { /// ``` /// use std::collections::Bitv; /// - /// let mut bv = Bitv::with_capacity(10, false); + /// let mut bv = Bitv::from_elem(10, false); /// assert_eq!(bv.any(), false); /// /// bv.set(3, true); @@ -527,24 +628,24 @@ impl Bitv { /// ``` /// use std::collections::Bitv; /// - /// let mut bv = Bitv::with_capacity(3, true); + /// let mut bv = Bitv::from_elem(3, true); /// bv.set(1, false); /// /// assert_eq!(bv.to_bytes(), vec!(0b10100000)); /// - /// let mut bv = Bitv::with_capacity(9, false); + /// let mut bv = Bitv::from_elem(9, false); /// bv.set(2, true); /// bv.set(8, true); /// /// assert_eq!(bv.to_bytes(), vec!(0b00100000, 0b10000000)); /// ``` pub fn to_bytes(&self) -> Vec { - fn bit (bitv: &Bitv, byte: uint, bit: uint) -> u8 { + fn bit(bitv: &Bitv, byte: uint, bit: uint) -> u8 { let offset = byte * 8 + bit; if offset >= bitv.nbits { 0 } else { - bitv.get(offset) as u8 << (7 - bit) + bitv[offset] as u8 << (7 - bit) } } @@ -562,19 +663,10 @@ impl Bitv { ) } - /// Transforms `self` into a `Vec` by turning each bit into a `bool`. - /// - /// # Examples - /// - /// ``` - /// use std::collections::bitv; - /// - /// let bv = bitv::from_bytes(&[0b10100000]); - /// assert_eq!(bv.to_bools(), vec!(true, false, true, false, - /// false, false, false, false)); - /// ``` + /// Deprecated: Use `iter().collect()`. + #[deprecated = "Use `iter().collect()`"] pub fn to_bools(&self) -> Vec { - Vec::from_fn(self.nbits, |i| self.get(i)) + self.iter().collect() } /// Compares a `Bitv` to a slice of `bool`s. @@ -587,21 +679,16 @@ impl Bitv { /// # Examples /// /// ``` - /// use std::collections::bitv; + /// use std::collections::Bitv; /// - /// let bv = bitv::from_bytes(&[0b10100000]); + /// let bv = Bitv::from_bytes(&[0b10100000]); /// /// assert!(bv.eq_vec(&[true, false, true, false, /// false, false, false, false])); /// ``` pub fn eq_vec(&self, v: &[bool]) -> bool { assert_eq!(self.nbits, v.len()); - let mut i = 0; - while i < self.nbits { - if self.get(i) != v[i] { return false; } - i = i + 1; - } - true + iter::order::eq(self.iter(), v.iter().cloned()) } /// Shortens a `Bitv`, dropping excess elements. @@ -612,9 +699,9 @@ impl Bitv { /// # Examples /// /// ``` - /// use std::collections::bitv; + /// use std::collections::Bitv; /// - /// let mut bv = bitv::from_bytes(&[0b01001011]); + /// let mut bv = Bitv::from_bytes(&[0b01001011]); /// bv.truncate(2); /// assert!(bv.eq_vec(&[false, true])); /// ``` @@ -622,32 +709,65 @@ impl Bitv { pub fn truncate(&mut self, len: uint) { if len < self.len() { self.nbits = len; - let word_len = (len + u32::BITS - 1) / u32::BITS; - self.storage.truncate(word_len); - if len % u32::BITS > 0 { - let mask = (1 << len % u32::BITS) - 1; - self.storage[word_len - 1] &= mask; - } + // This fixes (2). + self.storage.truncate(blocks_for_bits(len)); + self.fix_last_block(); } } - /// Grows the vector to be able to store `size` bits without resizing. + /// Reserves capacity for at least `additional` more bits to be inserted in the given + /// `Bitv`. The collection may reserve more space to avoid frequent reallocations. + /// + /// # Panics + /// + /// Panics if the new capacity overflows `uint`. /// /// # Examples /// /// ``` /// use std::collections::Bitv; /// - /// let mut bv = Bitv::with_capacity(3, false); + /// let mut bv = Bitv::from_elem(3, false); /// bv.reserve(10); /// assert_eq!(bv.len(), 3); - /// assert!(bv.capacity() >= 10); + /// assert!(bv.capacity() >= 13); + /// ``` + #[unstable = "matches collection reform specification, waiting for dust to settle"] + pub fn reserve(&mut self, additional: uint) { + let desired_cap = self.len().checked_add(additional).expect("capacity overflow"); + let storage_len = self.storage.len(); + if desired_cap > self.capacity() { + self.storage.reserve(blocks_for_bits(desired_cap) - storage_len); + } + } + + /// Reserves the minimum capacity for exactly `additional` more bits to be inserted in the + /// given `Bitv`. Does nothing if the capacity is already sufficient. + /// + /// Note that the allocator may give the collection more space than it requests. Therefore + /// capacity can not be relied upon to be precisely minimal. Prefer `reserve` if future + /// insertions are expected. + /// + /// # Panics + /// + /// Panics if the new capacity overflows `uint`. + /// + /// # Examples + /// + /// ``` + /// use std::collections::Bitv; + /// + /// let mut bv = Bitv::from_elem(3, false); + /// bv.reserve(10); + /// assert_eq!(bv.len(), 3); + /// assert!(bv.capacity() >= 13); /// ``` - pub fn reserve(&mut self, size: uint) { - let old_size = self.storage.len(); - let new_size = (size + u32::BITS - 1) / u32::BITS; - if old_size < new_size { - self.storage.grow(new_size - old_size, 0); + #[unstable = "matches collection reform specification, waiting for dust to settle"] + pub fn reserve_exact(&mut self, additional: uint) { + let desired_cap = self.len().checked_add(additional).expect("capacity overflow"); + let storage_len = self.storage.len(); + if desired_cap > self.capacity() { + self.storage.reserve_exact(blocks_for_bits(desired_cap) - storage_len); } } @@ -664,83 +784,93 @@ impl Bitv { /// assert!(bv.capacity() >= 10); /// ``` #[inline] + #[unstable = "matches collection reform specification, waiting for dust to settle"] pub fn capacity(&self) -> uint { - self.storage.len() * u32::BITS + self.storage.capacity().checked_mul(u32::BITS).unwrap_or(uint::MAX) } /// Grows the `Bitv` in-place, adding `n` copies of `value` to the `Bitv`. /// + /// # Panics + /// + /// Panics if the new len overflows a `uint`. + /// /// # Examples /// /// ``` - /// use std::collections::bitv; + /// use std::collections::Bitv; /// - /// let mut bv = bitv::from_bytes(&[0b01001011]); + /// let mut bv = Bitv::from_bytes(&[0b01001011]); /// bv.grow(2, true); /// assert_eq!(bv.len(), 10); /// assert_eq!(bv.to_bytes(), vec!(0b01001011, 0b11000000)); /// ``` pub fn grow(&mut self, n: uint, value: bool) { - let new_nbits = self.nbits + n; - let new_nwords = (new_nbits + u32::BITS - 1) / u32::BITS; + // Note: we just bulk set all the bits in the last word in this fn in multiple places + // which is technically wrong if not all of these bits are to be used. However, at the end + // of this fn we call `fix_last_block` at the end of this fn, which should fix this. + + let new_nbits = self.nbits.checked_add(n).expect("capacity overflow"); + let new_nblocks = blocks_for_bits(new_nbits); let full_value = if value { !0 } else { 0 }; - // Correct the old tail word - let old_last_word = (self.nbits + u32::BITS - 1) / u32::BITS - 1; + + // Correct the old tail word, setting or clearing formerly unused bits + let old_last_word = blocks_for_bits(self.nbits) - 1; if self.nbits % u32::BITS > 0 { - let overhang = self.nbits % u32::BITS; // # of already-used bits - let mask = !((1 << overhang) - 1); // e.g. 5 unused bits => 111110....0 + let mask = mask_for_bits(self.nbits); if value { - self.storage[old_last_word] |= mask; + self.storage[old_last_word] |= !mask; } else { - self.storage[old_last_word] &= !mask; + // Extra bits are already zero by invariant. } } + // Fill in words after the old tail word - let stop_idx = cmp::min(self.storage.len(), new_nwords); + let stop_idx = cmp::min(self.storage.len(), new_nblocks); for idx in range(old_last_word + 1, stop_idx) { self.storage[idx] = full_value; } + // Allocate new words, if needed - if new_nwords > self.storage.len() { - let to_add = new_nwords - self.storage.len(); + if new_nblocks > self.storage.len() { + let to_add = new_nblocks - self.storage.len(); self.storage.grow(to_add, full_value); - - // Zero out and unused bits in the new tail word - if value { - let tail_word = new_nwords - 1; - let used_bits = new_nbits % u32::BITS; - self.storage[tail_word] &= (1 << used_bits) - 1; - } } + // Adjust internal bit count self.nbits = new_nbits; + + self.fix_last_block(); } - /// Shortens by one element and returns the removed element. - /// - /// # Panics - /// - /// Assert if empty. + /// Removes the last bit from the Bitv, and returns it. Returns None if the Bitv is empty. /// /// # Examples /// /// ``` - /// use std::collections::bitv; + /// use std::collections::Bitv; /// - /// let mut bv = bitv::from_bytes(&[0b01001001]); - /// assert_eq!(bv.pop(), true); - /// assert_eq!(bv.pop(), false); + /// let mut bv = Bitv::from_bytes(&[0b01001001]); + /// assert_eq!(bv.pop(), Some(true)); + /// assert_eq!(bv.pop(), Some(false)); /// assert_eq!(bv.len(), 6); - /// assert_eq!(bv.to_bytes(), vec!(0b01001000)); /// ``` - pub fn pop(&mut self) -> bool { - let ret = self.get(self.nbits - 1); - // If we are unusing a whole word, make sure it is zeroed out - if self.nbits % u32::BITS == 1 { - self.storage[self.nbits / u32::BITS] = 0; + #[unstable = "matches collection reform specification, waiting for dust to settle"] + pub fn pop(&mut self) -> Option { + if self.is_empty() { + None + } else { + let i = self.nbits - 1; + let ret = self[i]; + // (3) + self.set(i, false); + self.nbits = i; + if self.nbits % u32::BITS == 0 { + // (2) + self.storage.pop(); + } + Some(ret) } - self.nbits -= 1; - ret } /// Pushes a `bool` onto the end. @@ -755,12 +885,13 @@ impl Bitv { /// bv.push(false); /// assert!(bv.eq_vec(&[true, false])); /// ``` + #[unstable = "matches collection reform specification, waiting for dust to settle"] pub fn push(&mut self, elem: bool) { - let insert_pos = self.nbits; - self.nbits += 1; - if self.storage.len() * u32::BITS < self.nbits { + if self.nbits % u32::BITS == 0 { self.storage.push(0); } + let insert_pos = self.nbits; + self.nbits = self.nbits.checked_add(1).expect("Capacity overflow"); self.set(insert_pos, elem); } @@ -782,46 +913,16 @@ impl Bitv { } } -/// Transforms a byte-vector into a `Bitv`. Each byte becomes eight bits, -/// with the most significant bits of each byte coming first. Each -/// bit becomes `true` if equal to 1 or `false` if equal to 0. -/// -/// # Examples -/// -/// ``` -/// use std::collections::bitv; -/// -/// let bv = bitv::from_bytes(&[0b10100000, 0b00010010]); -/// assert!(bv.eq_vec(&[true, false, true, false, -/// false, false, false, false, -/// false, false, false, true, -/// false, false, true, false])); -/// ``` +/// Deprecated: Now a static method on Bitv. +#[deprecated = "Now a static method on Bitv"] pub fn from_bytes(bytes: &[u8]) -> Bitv { - from_fn(bytes.len() * 8, |i| { - let b = bytes[i / 8] as u32; - let offset = i % 8; - b >> (7 - offset) & 1 == 1 - }) + Bitv::from_bytes(bytes) } -/// Creates a `Bitv` of the specified length where the value at each -/// index is `f(index)`. -/// -/// # Examples -/// -/// ``` -/// use std::collections::bitv::from_fn; -/// -/// let bv = from_fn(5, |i| { i % 2 == 0 }); -/// assert!(bv.eq_vec(&[true, false, true, false, true])); -/// ``` -pub fn from_fn(len: uint, mut f: F) -> Bitv where F: FnMut(uint) -> bool { - let mut bitv = Bitv::with_capacity(len, false); - for i in range(0u, len) { - bitv.set(i, f(i)); - } - bitv +/// Deprecated: Now a static method on Bitv. +#[deprecated = "Now a static method on Bitv"] +pub fn from_fn(len: uint, f: F) -> Bitv where F: FnMut(uint) -> bool { + Bitv::from_fn(len, f) } #[stable] @@ -843,14 +944,14 @@ impl Extend for Bitv { #[inline] fn extend>(&mut self, mut iterator: I) { let (min, _) = iterator.size_hint(); - let nbits = self.nbits; - self.reserve(nbits + min); + self.reserve(min); for element in iterator { self.push(element) } } } +#[stable] impl Clone for Bitv { #[inline] fn clone(&self) -> Bitv { @@ -881,7 +982,7 @@ impl Ord for Bitv { impl fmt::Show for Bitv { fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { for bit in self.iter() { - try!(write!(fmt, "{}", if bit { 1u } else { 0u })); + try!(write!(fmt, "{}", if bit { 1u32 } else { 0u32 })); } Ok(()) } @@ -890,7 +991,7 @@ impl fmt::Show for Bitv { impl hash::Hash for Bitv { fn hash(&self, state: &mut S) { self.nbits.hash(state); - for (_, elem) in self.mask_words(0) { + for elem in self.blocks() { elem.hash(state); } } @@ -902,7 +1003,7 @@ impl cmp::PartialEq for Bitv { if self.nbits != other.nbits { return false; } - self.mask_words(0).zip(other.mask_words(0)).all(|((_, w1), (_, w2))| w1 == w2) + self.blocks().zip(other.blocks()).all(|(w1, w2)| w1 == w2) } } @@ -921,7 +1022,7 @@ impl<'a> Iterator for Bits<'a> { if self.next_idx != self.end_idx { let idx = self.next_idx; self.next_idx += 1; - Some(self.bitv.get(idx)) + Some(self.bitv[idx]) } else { None } @@ -938,7 +1039,7 @@ impl<'a> DoubleEndedIterator for Bits<'a> { fn next_back(&mut self) -> Option { if self.next_idx != self.end_idx { self.end_idx -= 1; - Some(self.bitv.get(self.end_idx)) + Some(self.bitv[self.end_idx]) } else { None } @@ -958,7 +1059,7 @@ impl<'a> RandomAccessIterator for Bits<'a> { if index >= self.indexable() { None } else { - Some(self.bitv.get(index)) + Some(self.bitv[index]) } } } @@ -974,7 +1075,6 @@ impl<'a> RandomAccessIterator for Bits<'a> { /// /// ``` /// use std::collections::{BitvSet, Bitv}; -/// use std::collections::bitv; /// /// // It's a regular set /// let mut s = BitvSet::new(); @@ -989,7 +1089,7 @@ impl<'a> RandomAccessIterator for Bits<'a> { /// } /// /// // Can initialize from a `Bitv` -/// let other = BitvSet::from_bitv(bitv::from_bytes(&[0b11010000])); +/// let other = BitvSet::from_bitv(Bitv::from_bytes(&[0b11010000])); /// /// s.union_with(&other); /// @@ -1000,29 +1100,32 @@ impl<'a> RandomAccessIterator for Bits<'a> { /// /// // Can convert back to a `Bitv` /// let bv: Bitv = s.into_bitv(); -/// assert!(bv.get(3)); +/// assert!(bv[3]); /// ``` #[deriving(Clone)] -pub struct BitvSet(Bitv); +pub struct BitvSet { + bitv: Bitv, +} impl Default for BitvSet { #[inline] fn default() -> BitvSet { BitvSet::new() } } -impl FromIterator for BitvSet { - fn from_iter>(iterator: I) -> BitvSet { +impl FromIterator for BitvSet { + fn from_iter>(iterator: I) -> BitvSet { let mut ret = BitvSet::new(); ret.extend(iterator); ret } } -impl Extend for BitvSet { +impl Extend for BitvSet { #[inline] - fn extend>(&mut self, iterator: I) { - let &BitvSet(ref mut self_bitv) = self; - self_bitv.extend(iterator); + fn extend>(&mut self, mut iterator: I) { + for i in iterator { + self.insert(i); + } } } @@ -1053,45 +1156,47 @@ impl cmp::PartialEq for BitvSet { impl cmp::Eq for BitvSet {} impl BitvSet { - /// Creates a new bit vector set with initially no contents. + /// Creates a new empty `BitvSet`. /// /// # Examples /// /// ``` /// use std::collections::BitvSet; + /// /// let mut s = BitvSet::new(); /// ``` #[inline] #[unstable = "matches collection reform specification, waiting for dust to settle"] pub fn new() -> BitvSet { - BitvSet(Bitv::new()) + BitvSet { bitv: Bitv::new() } } - /// Creates a new bit vector set with initially no contents, able to + /// Creates a new `BitvSet` with initially no contents, able to /// hold `nbits` elements without resizing. /// /// # Examples /// /// ``` /// use std::collections::BitvSet; + /// /// let mut s = BitvSet::with_capacity(100); /// assert!(s.capacity() >= 100); /// ``` #[inline] #[unstable = "matches collection reform specification, waiting for dust to settle"] pub fn with_capacity(nbits: uint) -> BitvSet { - let bitv = Bitv::with_capacity(nbits, false); + let bitv = Bitv::from_elem(nbits, false); BitvSet::from_bitv(bitv) } - /// Creates a new bit vector set from the given bit vector. + /// Creates a new `BitvSet` from the given bit vector. /// /// # Examples /// /// ``` - /// use std::collections::{bitv, BitvSet}; + /// use std::collections::{Bitv, BitvSet}; /// - /// let bv = bitv::from_bytes(&[0b01100000]); + /// let bv = Bitv::from_bytes(&[0b01100000]); /// let s = BitvSet::from_bitv(bv); /// /// // Print 1, 2 in arbitrary order @@ -1100,10 +1205,8 @@ impl BitvSet { /// } /// ``` #[inline] - pub fn from_bitv(mut bitv: Bitv) -> BitvSet { - // Mark every bit as valid - bitv.nbits = bitv.capacity(); - BitvSet(bitv) + pub fn from_bitv(bitv: Bitv) -> BitvSet { + BitvSet { bitv: bitv } } /// Returns the capacity in bits for this bit vector. Inserting any @@ -1120,11 +1223,41 @@ impl BitvSet { #[inline] #[unstable = "matches collection reform specification, waiting for dust to settle"] pub fn capacity(&self) -> uint { - let &BitvSet(ref bitv) = self; - bitv.capacity() + self.bitv.capacity() + } + + /// Reserves capacity for the given `BitvSet` to contain `len` distinct elements. In the case + /// of `BitvSet` this means reallocations will not occur as long as all inserted elements + /// are less than `len`. + /// + /// The collection may reserve more space to avoid frequent reallocations. + /// + /// + /// # Examples + /// + /// ``` + /// use std::collections::BitvSet; + /// + /// let mut s = BitvSet::new(); + /// s.reserve_len(10); + /// assert!(s.capacity() >= 10); + /// ``` + #[unstable = "matches collection reform specification, waiting for dust to settle"] + pub fn reserve_len(&mut self, len: uint) { + let cur_len = self.bitv.len(); + if len >= cur_len { + self.bitv.reserve(len - cur_len); + } } - /// Grows the underlying vector to be able to store `size` bits. + /// Reserves the minimum capacity for the given `BitvSet` to contain `len` distinct elements. + /// In the case of `BitvSet` this means reallocations will not occur as long as all inserted + /// elements are less than `len`. + /// + /// Note that the allocator may give the collection more space than it requests. Therefore + /// capacity can not be relied upon to be precisely minimal. Prefer `reserve_len` if future + /// insertions are expected. + /// /// /// # Examples /// @@ -1132,17 +1265,18 @@ impl BitvSet { /// use std::collections::BitvSet; /// /// let mut s = BitvSet::new(); - /// s.reserve(10); + /// s.reserve_len_exact(10); /// assert!(s.capacity() >= 10); /// ``` - pub fn reserve(&mut self, size: uint) { - let &BitvSet(ref mut bitv) = self; - bitv.reserve(size); - if bitv.nbits < size { - bitv.nbits = bitv.capacity(); + #[unstable = "matches collection reform specification, waiting for dust to settle"] + pub fn reserve_len_exact(&mut self, len: uint) { + let cur_len = self.bitv.len(); + if len >= cur_len { + self.bitv.reserve_exact(len - cur_len); } } + /// Consumes this set to return the underlying bit vector. /// /// # Examples @@ -1155,13 +1289,12 @@ impl BitvSet { /// s.insert(3); /// /// let bv = s.into_bitv(); - /// assert!(bv.get(0)); - /// assert!(bv.get(3)); + /// assert!(bv[0]); + /// assert!(bv[3]); /// ``` #[inline] pub fn into_bitv(self) -> Bitv { - let BitvSet(bitv) = self; - bitv + self.bitv } /// Returns a reference to the underlying bit vector. @@ -1179,18 +1312,22 @@ impl BitvSet { /// ``` #[inline] pub fn get_ref<'a>(&'a self) -> &'a Bitv { - let &BitvSet(ref bitv) = self; - bitv + &self.bitv } #[inline] fn other_op(&mut self, other: &BitvSet, mut f: F) where F: FnMut(u32, u32) -> u32 { - // Expand the vector if necessary - self.reserve(other.capacity()); - // Unwrap Bitvs - let &BitvSet(ref mut self_bitv) = self; - let &BitvSet(ref other_bitv) = other; + let self_bitv = &mut self.bitv; + let other_bitv = &other.bitv; + + let self_len = self_bitv.len(); + let other_len = other_bitv.len(); + + // Expand the vector if necessary + if self_len < other_len { + self_bitv.grow(other_len - self_len, false); + } // virtually pad other with 0's for equal lengths let mut other_words = { @@ -1227,7 +1364,7 @@ impl BitvSet { #[inline] #[unstable = "matches collection reform specification, waiting for dust to settle"] pub fn shrink_to_fit(&mut self) { - let &BitvSet(ref mut bitv) = self; + let bitv = &mut self.bitv; // Obtain original length let old_len = bitv.storage.len(); // Obtain coarse trailing zero length @@ -1243,10 +1380,9 @@ impl BitvSet { /// # Examples /// /// ``` - /// use std::collections::BitvSet; - /// use std::collections::bitv; + /// use std::collections::{Bitv, BitvSet}; /// - /// let s = BitvSet::from_bitv(bitv::from_bytes(&[0b01001010])); + /// let s = BitvSet::from_bitv(Bitv::from_bytes(&[0b01001010])); /// /// // Print 1, 4, 6 in arbitrary order /// for x in s.iter() { @@ -1265,11 +1401,10 @@ impl BitvSet { /// # Examples /// /// ``` - /// use std::collections::BitvSet; - /// use std::collections::bitv; + /// use std::collections::{Bitv, BitvSet}; /// - /// let a = BitvSet::from_bitv(bitv::from_bytes(&[0b01101000])); - /// let b = BitvSet::from_bitv(bitv::from_bytes(&[0b10100000])); + /// let a = BitvSet::from_bitv(Bitv::from_bytes(&[0b01101000])); + /// let b = BitvSet::from_bitv(Bitv::from_bytes(&[0b10100000])); /// /// // Print 0, 1, 2, 4 in arbitrary order /// for x in a.union(&b) { @@ -1296,11 +1431,10 @@ impl BitvSet { /// # Examples /// /// ``` - /// use std::collections::BitvSet; - /// use std::collections::bitv; + /// use std::collections::{Bitv, BitvSet}; /// - /// let a = BitvSet::from_bitv(bitv::from_bytes(&[0b01101000])); - /// let b = BitvSet::from_bitv(bitv::from_bytes(&[0b10100000])); + /// let a = BitvSet::from_bitv(Bitv::from_bytes(&[0b01101000])); + /// let b = BitvSet::from_bitv(Bitv::from_bytes(&[0b10100000])); /// /// // Print 2 /// for x in a.intersection(&b) { @@ -1311,8 +1445,7 @@ impl BitvSet { #[unstable = "matches collection reform specification, waiting for dust to settle"] pub fn intersection<'a>(&'a self, other: &'a BitvSet) -> Take> { fn bitand(w1: u32, w2: u32) -> u32 { w1 & w2 } - - let min = cmp::min(self.capacity(), other.capacity()); + let min = cmp::min(self.bitv.len(), other.bitv.len()); TwoBitPositions { set: self, other: other, @@ -1328,11 +1461,10 @@ impl BitvSet { /// # Examples /// /// ``` - /// use std::collections::BitvSet; - /// use std::collections::bitv; + /// use std::collections::{BitvSet, Bitv}; /// - /// let a = BitvSet::from_bitv(bitv::from_bytes(&[0b01101000])); - /// let b = BitvSet::from_bitv(bitv::from_bytes(&[0b10100000])); + /// let a = BitvSet::from_bitv(Bitv::from_bytes(&[0b01101000])); + /// let b = BitvSet::from_bitv(Bitv::from_bytes(&[0b10100000])); /// /// // Print 1, 4 in arbitrary order /// for x in a.difference(&b) { @@ -1367,11 +1499,10 @@ impl BitvSet { /// # Examples /// /// ``` - /// use std::collections::BitvSet; - /// use std::collections::bitv; + /// use std::collections::{BitvSet, Bitv}; /// - /// let a = BitvSet::from_bitv(bitv::from_bytes(&[0b01101000])); - /// let b = BitvSet::from_bitv(bitv::from_bytes(&[0b10100000])); + /// let a = BitvSet::from_bitv(Bitv::from_bytes(&[0b01101000])); + /// let b = BitvSet::from_bitv(Bitv::from_bytes(&[0b10100000])); /// /// // Print 0, 1, 4 in arbitrary order /// for x in a.symmetric_difference(&b) { @@ -1397,16 +1528,15 @@ impl BitvSet { /// # Examples /// /// ``` - /// use std::collections::BitvSet; - /// use std::collections::bitv; + /// use std::collections::{BitvSet, Bitv}; /// /// let a = 0b01101000; /// let b = 0b10100000; /// let res = 0b11101000; /// - /// let mut a = BitvSet::from_bitv(bitv::from_bytes(&[a])); - /// let b = BitvSet::from_bitv(bitv::from_bytes(&[b])); - /// let res = BitvSet::from_bitv(bitv::from_bytes(&[res])); + /// let mut a = BitvSet::from_bitv(Bitv::from_bytes(&[a])); + /// let b = BitvSet::from_bitv(Bitv::from_bytes(&[b])); + /// let res = BitvSet::from_bitv(Bitv::from_bytes(&[res])); /// /// a.union_with(&b); /// assert_eq!(a, res); @@ -1421,16 +1551,15 @@ impl BitvSet { /// # Examples /// /// ``` - /// use std::collections::BitvSet; - /// use std::collections::bitv; + /// use std::collections::{BitvSet, Bitv}; /// /// let a = 0b01101000; /// let b = 0b10100000; /// let res = 0b00100000; /// - /// let mut a = BitvSet::from_bitv(bitv::from_bytes(&[a])); - /// let b = BitvSet::from_bitv(bitv::from_bytes(&[b])); - /// let res = BitvSet::from_bitv(bitv::from_bytes(&[res])); + /// let mut a = BitvSet::from_bitv(Bitv::from_bytes(&[a])); + /// let b = BitvSet::from_bitv(Bitv::from_bytes(&[b])); + /// let res = BitvSet::from_bitv(Bitv::from_bytes(&[res])); /// /// a.intersect_with(&b); /// assert_eq!(a, res); @@ -1446,24 +1575,23 @@ impl BitvSet { /// # Examples /// /// ``` - /// use std::collections::BitvSet; - /// use std::collections::bitv; + /// use std::collections::{BitvSet, Bitv}; /// /// let a = 0b01101000; /// let b = 0b10100000; /// let a_b = 0b01001000; // a - b /// let b_a = 0b10000000; // b - a /// - /// let mut bva = BitvSet::from_bitv(bitv::from_bytes(&[a])); - /// let bvb = BitvSet::from_bitv(bitv::from_bytes(&[b])); - /// let bva_b = BitvSet::from_bitv(bitv::from_bytes(&[a_b])); - /// let bvb_a = BitvSet::from_bitv(bitv::from_bytes(&[b_a])); + /// let mut bva = BitvSet::from_bitv(Bitv::from_bytes(&[a])); + /// let bvb = BitvSet::from_bitv(Bitv::from_bytes(&[b])); + /// let bva_b = BitvSet::from_bitv(Bitv::from_bytes(&[a_b])); + /// let bvb_a = BitvSet::from_bitv(Bitv::from_bytes(&[b_a])); /// /// bva.difference_with(&bvb); /// assert_eq!(bva, bva_b); /// - /// let bva = BitvSet::from_bitv(bitv::from_bytes(&[a])); - /// let mut bvb = BitvSet::from_bitv(bitv::from_bytes(&[b])); + /// let bva = BitvSet::from_bitv(Bitv::from_bytes(&[a])); + /// let mut bvb = BitvSet::from_bitv(Bitv::from_bytes(&[b])); /// /// bvb.difference_with(&bva); /// assert_eq!(bvb, bvb_a); @@ -1479,16 +1607,15 @@ impl BitvSet { /// # Examples /// /// ``` - /// use std::collections::BitvSet; - /// use std::collections::bitv; + /// use std::collections::{BitvSet, Bitv}; /// /// let a = 0b01101000; /// let b = 0b10100000; /// let res = 0b11001000; /// - /// let mut a = BitvSet::from_bitv(bitv::from_bytes(&[a])); - /// let b = BitvSet::from_bitv(bitv::from_bytes(&[b])); - /// let res = BitvSet::from_bitv(bitv::from_bytes(&[res])); + /// let mut a = BitvSet::from_bitv(Bitv::from_bytes(&[a])); + /// let b = BitvSet::from_bitv(Bitv::from_bytes(&[b])); + /// let res = BitvSet::from_bitv(Bitv::from_bytes(&[res])); /// /// a.symmetric_difference_with(&b); /// assert_eq!(a, res); @@ -1502,32 +1629,29 @@ impl BitvSet { #[inline] #[unstable = "matches collection reform specification, waiting for dust to settle"] pub fn len(&self) -> uint { - let &BitvSet(ref bitv) = self; - bitv.storage.iter().fold(0, |acc, &n| acc + n.count_ones()) + self.bitv.blocks().fold(0, |acc, n| acc + n.count_ones()) } /// Returns whether there are no bits set in this set #[inline] #[unstable = "matches collection reform specification, waiting for dust to settle"] pub fn is_empty(&self) -> bool { - let &BitvSet(ref bitv) = self; - bitv.storage.iter().all(|&n| n == 0) + self.bitv.none() } /// Clears all bits in this set #[inline] #[unstable = "matches collection reform specification, waiting for dust to settle"] pub fn clear(&mut self) { - let &BitvSet(ref mut bitv) = self; - bitv.clear(); + self.bitv.clear(); } /// Returns `true` if this set contains the specified integer. #[inline] #[unstable = "matches collection reform specification, waiting for dust to settle"] pub fn contains(&self, value: &uint) -> bool { - let &BitvSet(ref bitv) = self; - *value < bitv.nbits && bitv.get(*value) + let bitv = &self.bitv; + *value < bitv.nbits && bitv[*value] } /// Returns `true` if the set has no elements in common with `other`. @@ -1542,14 +1666,14 @@ impl BitvSet { #[inline] #[unstable = "matches collection reform specification, waiting for dust to settle"] pub fn is_subset(&self, other: &BitvSet) -> bool { - let &BitvSet(ref self_bitv) = self; - let &BitvSet(ref other_bitv) = other; + let self_bitv = &self.bitv; + let other_bitv = &other.bitv; + let other_blocks = blocks_for_bits(other_bitv.len()); // Check that `self` intersect `other` is self - self_bitv.mask_words(0).zip(other_bitv.mask_words(0)) - .all(|((_, w1), (_, w2))| w1 & w2 == w1) && - // Check that `self` setminus `other` is empty - self_bitv.mask_words(other_bitv.storage.len()).all(|(_, w)| w == 0) + self_bitv.blocks().zip(other_bitv.blocks()).all(|(w1, w2)| w1 & w2 == w1) && + // Make sure if `self` has any more blocks than `other`, they're all 0 + self_bitv.blocks().skip(other_blocks).all(|w| w == 0) } /// Returns `true` if the set is a superset of another. @@ -1568,13 +1692,12 @@ impl BitvSet { } // Ensure we have enough space to hold the new element - if value >= self.capacity() { - let new_cap = cmp::max(value + 1, self.capacity() * 2); - self.reserve(new_cap); + let len = self.bitv.len(); + if value >= len { + self.bitv.grow(value - len + 1, false) } - let &BitvSet(ref mut bitv) = self; - bitv.set(value, true); + self.bitv.set(value, true); return true; } @@ -1585,8 +1708,9 @@ impl BitvSet { if !self.contains(value) { return false; } - let &BitvSet(ref mut bitv) = self; - bitv.set(*value, false); + + self.bitv.set(*value, false); + return true; } } @@ -1631,7 +1755,7 @@ pub struct TwoBitPositions<'a> { impl<'a> Iterator for BitPositions<'a> { fn next(&mut self) -> Option { - while self.next_idx < self.set.capacity() { + while self.next_idx < self.set.bitv.len() { let idx = self.next_idx; self.next_idx += 1; @@ -1645,18 +1769,18 @@ impl<'a> Iterator for BitPositions<'a> { #[inline] fn size_hint(&self) -> (uint, Option) { - (0, Some(self.set.capacity() - self.next_idx)) + (0, Some(self.set.bitv.len() - self.next_idx)) } } impl<'a> Iterator for TwoBitPositions<'a> { fn next(&mut self) -> Option { - while self.next_idx < self.set.capacity() || - self.next_idx < self.other.capacity() { + while self.next_idx < self.set.bitv.len() || + self.next_idx < self.other.bitv.len() { let bit_idx = self.next_idx % u32::BITS; if bit_idx == 0 { - let &BitvSet(ref s_bitv) = self.set; - let &BitvSet(ref o_bitv) = self.other; + let s_bitv = &self.set.bitv; + let o_bitv = &self.other.bitv; // Merging the two words is a bit of an awkward dance since // one Bitv might be longer than the other let word_idx = self.next_idx / u32::BITS; @@ -1679,32 +1803,33 @@ impl<'a> Iterator for TwoBitPositions<'a> { #[inline] fn size_hint(&self) -> (uint, Option) { - let cap = cmp::max(self.set.capacity(), self.other.capacity()); + let cap = cmp::max(self.set.bitv.len(), self.other.bitv.len()); (0, Some(cap - self.next_idx)) } } + + + + #[cfg(test)] mod tests { - use std::prelude::*; - use std::iter::range_step; + use prelude::*; + use core::iter::range_step; + use core::u32; use std::rand; use std::rand::Rng; - use std::u32; use test::{Bencher, black_box}; use super::{Bitv, BitvSet, from_fn, from_bytes}; use bitv; - use vec::Vec; - - static BENCH_BITS : uint = 1 << 14; #[test] fn test_to_str() { let zerolen = Bitv::new(); assert_eq!(zerolen.to_string(), ""); - let eightbits = Bitv::with_capacity(8u, false); + let eightbits = Bitv::from_elem(8u, false); assert_eq!(eightbits.to_string(), "00000000") } @@ -1713,22 +1838,26 @@ mod tests { let act = Bitv::new(); let exp = Vec::from_elem(0u, false); assert!(act.eq_vec(exp.as_slice())); + assert!(act.none() && act.all()); } #[test] fn test_1_element() { - let mut act = Bitv::with_capacity(1u, false); + let mut act = Bitv::from_elem(1u, false); assert!(act.eq_vec(&[false])); - act = Bitv::with_capacity(1u, true); + assert!(act.none() && !act.all()); + act = Bitv::from_elem(1u, true); assert!(act.eq_vec(&[true])); + assert!(!act.none() && act.all()); } #[test] fn test_2_elements() { - let mut b = bitv::Bitv::with_capacity(2, false); + let mut b = Bitv::from_elem(2, false); b.set(0, true); b.set(1, false); assert_eq!(b.to_string(), "10"); + assert!(!b.none() && !b.all()); } #[test] @@ -1736,39 +1865,44 @@ mod tests { let mut act; // all 0 - act = Bitv::with_capacity(10u, false); + act = Bitv::from_elem(10u, false); assert!((act.eq_vec( &[false, false, false, false, false, false, false, false, false, false]))); + assert!(act.none() && !act.all()); // all 1 - act = Bitv::with_capacity(10u, true); + act = Bitv::from_elem(10u, true); assert!((act.eq_vec(&[true, true, true, true, true, true, true, true, true, true]))); + assert!(!act.none() && act.all()); // mixed - act = Bitv::with_capacity(10u, false); + act = Bitv::from_elem(10u, false); act.set(0u, true); act.set(1u, true); act.set(2u, true); act.set(3u, true); act.set(4u, true); assert!((act.eq_vec(&[true, true, true, true, true, false, false, false, false, false]))); + assert!(!act.none() && !act.all()); // mixed - act = Bitv::with_capacity(10u, false); + act = Bitv::from_elem(10u, false); act.set(5u, true); act.set(6u, true); act.set(7u, true); act.set(8u, true); act.set(9u, true); assert!((act.eq_vec(&[false, false, false, false, false, true, true, true, true, true]))); + assert!(!act.none() && !act.all()); // mixed - act = Bitv::with_capacity(10u, false); + act = Bitv::from_elem(10u, false); act.set(0u, true); act.set(3u, true); act.set(6u, true); act.set(9u, true); assert!((act.eq_vec(&[true, false, false, true, false, false, true, false, false, true]))); + assert!(!act.none() && !act.all()); } #[test] @@ -1776,21 +1910,23 @@ mod tests { let mut act; // all 0 - act = Bitv::with_capacity(31u, false); + act = Bitv::from_elem(31u, false); assert!(act.eq_vec( &[false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false])); + assert!(act.none() && !act.all()); // all 1 - act = Bitv::with_capacity(31u, true); + act = Bitv::from_elem(31u, true); assert!(act.eq_vec( &[true, true, true, true, true, true, true, true, true, true, true, true, true, true, true, true, true, true, true, true, true, true, true, true, true, true, true, true, true, true, true])); + assert!(!act.none() && act.all()); // mixed - act = Bitv::with_capacity(31u, false); + act = Bitv::from_elem(31u, false); act.set(0u, true); act.set(1u, true); act.set(2u, true); @@ -1803,9 +1939,10 @@ mod tests { &[true, true, true, true, true, true, true, true, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false])); + assert!(!act.none() && !act.all()); // mixed - act = Bitv::with_capacity(31u, false); + act = Bitv::from_elem(31u, false); act.set(16u, true); act.set(17u, true); act.set(18u, true); @@ -1818,9 +1955,10 @@ mod tests { &[false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, true, true, true, true, true, true, true, true, false, false, false, false, false, false, false])); + assert!(!act.none() && !act.all()); // mixed - act = Bitv::with_capacity(31u, false); + act = Bitv::from_elem(31u, false); act.set(24u, true); act.set(25u, true); act.set(26u, true); @@ -1832,9 +1970,10 @@ mod tests { &[false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, true, true, true, true, true, true, true])); + assert!(!act.none() && !act.all()); // mixed - act = Bitv::with_capacity(31u, false); + act = Bitv::from_elem(31u, false); act.set(3u, true); act.set(17u, true); act.set(30u, true); @@ -1842,6 +1981,7 @@ mod tests { &[false, false, false, true, false, false, false, false, false, false, false, false, false, false, false, false, false, true, false, false, false, false, false, false, false, false, false, false, false, false, true])); + assert!(!act.none() && !act.all()); } #[test] @@ -1849,21 +1989,23 @@ mod tests { let mut act; // all 0 - act = Bitv::with_capacity(32u, false); + act = Bitv::from_elem(32u, false); assert!(act.eq_vec( &[false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false])); + assert!(act.none() && !act.all()); // all 1 - act = Bitv::with_capacity(32u, true); + act = Bitv::from_elem(32u, true); assert!(act.eq_vec( &[true, true, true, true, true, true, true, true, true, true, true, true, true, true, true, true, true, true, true, true, true, true, true, true, true, true, true, true, true, true, true, true])); + assert!(!act.none() && act.all()); // mixed - act = Bitv::with_capacity(32u, false); + act = Bitv::from_elem(32u, false); act.set(0u, true); act.set(1u, true); act.set(2u, true); @@ -1876,9 +2018,10 @@ mod tests { &[true, true, true, true, true, true, true, true, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false])); + assert!(!act.none() && !act.all()); // mixed - act = Bitv::with_capacity(32u, false); + act = Bitv::from_elem(32u, false); act.set(16u, true); act.set(17u, true); act.set(18u, true); @@ -1891,9 +2034,10 @@ mod tests { &[false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, true, true, true, true, true, true, true, true, false, false, false, false, false, false, false, false])); + assert!(!act.none() && !act.all()); // mixed - act = Bitv::with_capacity(32u, false); + act = Bitv::from_elem(32u, false); act.set(24u, true); act.set(25u, true); act.set(26u, true); @@ -1906,9 +2050,10 @@ mod tests { &[false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, true, true, true, true, true, true, true, true])); + assert!(!act.none() && !act.all()); // mixed - act = Bitv::with_capacity(32u, false); + act = Bitv::from_elem(32u, false); act.set(3u, true); act.set(17u, true); act.set(30u, true); @@ -1917,6 +2062,7 @@ mod tests { &[false, false, false, true, false, false, false, false, false, false, false, false, false, false, false, false, false, true, false, false, false, false, false, false, false, false, false, false, false, false, true, true])); + assert!(!act.none() && !act.all()); } #[test] @@ -1924,21 +2070,23 @@ mod tests { let mut act; // all 0 - act = Bitv::with_capacity(33u, false); + act = Bitv::from_elem(33u, false); assert!(act.eq_vec( &[false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false])); + assert!(act.none() && !act.all()); // all 1 - act = Bitv::with_capacity(33u, true); + act = Bitv::from_elem(33u, true); assert!(act.eq_vec( &[true, true, true, true, true, true, true, true, true, true, true, true, true, true, true, true, true, true, true, true, true, true, true, true, true, true, true, true, true, true, true, true, true])); + assert!(!act.none() && act.all()); // mixed - act = Bitv::with_capacity(33u, false); + act = Bitv::from_elem(33u, false); act.set(0u, true); act.set(1u, true); act.set(2u, true); @@ -1951,9 +2099,10 @@ mod tests { &[true, true, true, true, true, true, true, true, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false])); + assert!(!act.none() && !act.all()); // mixed - act = Bitv::with_capacity(33u, false); + act = Bitv::from_elem(33u, false); act.set(16u, true); act.set(17u, true); act.set(18u, true); @@ -1966,9 +2115,10 @@ mod tests { &[false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, true, true, true, true, true, true, true, true, false, false, false, false, false, false, false, false, false])); + assert!(!act.none() && !act.all()); // mixed - act = Bitv::with_capacity(33u, false); + act = Bitv::from_elem(33u, false); act.set(24u, true); act.set(25u, true); act.set(26u, true); @@ -1981,9 +2131,10 @@ mod tests { &[false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, true, true, true, true, true, true, true, true, false])); + assert!(!act.none() && !act.all()); // mixed - act = Bitv::with_capacity(33u, false); + act = Bitv::from_elem(33u, false); act.set(3u, true); act.set(17u, true); act.set(30u, true); @@ -1993,28 +2144,29 @@ mod tests { &[false, false, false, true, false, false, false, false, false, false, false, false, false, false, false, false, false, true, false, false, false, false, false, false, false, false, false, false, false, false, true, true, true])); + assert!(!act.none() && !act.all()); } #[test] fn test_equal_differing_sizes() { - let v0 = Bitv::with_capacity(10u, false); - let v1 = Bitv::with_capacity(11u, false); + let v0 = Bitv::from_elem(10u, false); + let v1 = Bitv::from_elem(11u, false); assert!(v0 != v1); } #[test] fn test_equal_greatly_differing_sizes() { - let v0 = Bitv::with_capacity(10u, false); - let v1 = Bitv::with_capacity(110u, false); + let v0 = Bitv::from_elem(10u, false); + let v1 = Bitv::from_elem(110u, false); assert!(v0 != v1); } #[test] fn test_equal_sneaky_small() { - let mut a = bitv::Bitv::with_capacity(1, false); + let mut a = Bitv::from_elem(1, false); a.set(0, true); - let mut b = bitv::Bitv::with_capacity(1, true); + let mut b = Bitv::from_elem(1, true); b.set(0, true); assert_eq!(a, b); @@ -2022,12 +2174,12 @@ mod tests { #[test] fn test_equal_sneaky_big() { - let mut a = bitv::Bitv::with_capacity(100, false); + let mut a = Bitv::from_elem(100, false); for i in range(0u, 100) { a.set(i, true); } - let mut b = bitv::Bitv::with_capacity(100, true); + let mut b = Bitv::from_elem(100, true); for i in range(0u, 100) { b.set(i, true); } @@ -2037,18 +2189,18 @@ mod tests { #[test] fn test_from_bytes() { - let bitv = from_bytes(&[0b10110110, 0b00000000, 0b11111111]); - let str = format!("{}{}{}", "10110110", "00000000", "11111111"); + let bitv = Bitv::from_bytes(&[0b10110110, 0b00000000, 0b11111111]); + let str = concat!("10110110", "00000000", "11111111"); assert_eq!(bitv.to_string(), str); } #[test] fn test_to_bytes() { - let mut bv = Bitv::with_capacity(3, true); + let mut bv = Bitv::from_elem(3, true); bv.set(1, false); assert_eq!(bv.to_bytes(), vec!(0b10100000)); - let mut bv = Bitv::with_capacity(9, false); + let mut bv = Bitv::from_elem(9, false); bv.set(2, true); bv.set(8, true); assert_eq!(bv.to_bytes(), vec!(0b00100000, 0b10000000)); @@ -2061,21 +2213,10 @@ mod tests { assert_eq!(bitv.to_string(), "1011"); } - #[test] - fn test_bitv_set_from_bools() { - let bools = vec![true, false, true, true]; - let a: BitvSet = bools.iter().map(|n| *n).collect(); - let mut b = BitvSet::new(); - b.insert(0); - b.insert(2); - b.insert(3); - assert_eq!(a, b); - } - #[test] fn test_to_bools() { let bools = vec!(false, false, true, false, false, true, true, false); - assert_eq!(from_bytes(&[0b00100110]).iter().collect::>(), bools); + assert_eq!(Bitv::from_bytes(&[0b00100110]).iter().collect::>(), bools); } #[test] @@ -2090,115 +2231,416 @@ mod tests { assert_eq!(bitv.iter().collect::>(), long) } - #[test] - fn test_bitv_set_iterator() { - let bools = [true, false, true, true]; - let bitv: BitvSet = bools.iter().map(|n| *n).collect(); - - let idxs: Vec = bitv.iter().collect(); - assert_eq!(idxs, vec!(0, 2, 3)); - - let long: BitvSet = range(0u, 10000).map(|n| n % 2 == 0).collect(); - let real = range_step(0, 10000, 2).collect::>(); - - let idxs: Vec = long.iter().collect(); - assert_eq!(idxs, real); - } - - #[test] - fn test_bitv_set_frombitv_init() { - let bools = [true, false]; - let lengths = [10, 64, 100]; - for &b in bools.iter() { - for &l in lengths.iter() { - let bitset = BitvSet::from_bitv(Bitv::with_capacity(l, b)); - assert_eq!(bitset.contains(&1u), b); - assert_eq!(bitset.contains(&(l-1u)), b); - assert!(!bitset.contains(&l)) - } - } - } - #[test] fn test_small_difference() { - let mut b1 = Bitv::with_capacity(3, false); - let mut b2 = Bitv::with_capacity(3, false); + let mut b1 = Bitv::from_elem(3, false); + let mut b2 = Bitv::from_elem(3, false); b1.set(0, true); b1.set(1, true); b2.set(1, true); b2.set(2, true); assert!(b1.difference(&b2)); - assert!(b1.get(0)); - assert!(!b1.get(1)); - assert!(!b1.get(2)); + assert!(b1[0]); + assert!(!b1[1]); + assert!(!b1[2]); } #[test] fn test_big_difference() { - let mut b1 = Bitv::with_capacity(100, false); - let mut b2 = Bitv::with_capacity(100, false); + let mut b1 = Bitv::from_elem(100, false); + let mut b2 = Bitv::from_elem(100, false); b1.set(0, true); b1.set(40, true); b2.set(40, true); b2.set(80, true); assert!(b1.difference(&b2)); - assert!(b1.get(0)); - assert!(!b1.get(40)); - assert!(!b1.get(80)); + assert!(b1[0]); + assert!(!b1[40]); + assert!(!b1[80]); } #[test] fn test_small_clear() { - let mut b = Bitv::with_capacity(14, true); + let mut b = Bitv::from_elem(14, true); + assert!(!b.none() && b.all()); b.clear(); - assert!(b.none()); + assert!(b.none() && !b.all()); } #[test] fn test_big_clear() { - let mut b = Bitv::with_capacity(140, true); + let mut b = Bitv::from_elem(140, true); + assert!(!b.none() && b.all()); b.clear(); - assert!(b.none()); + assert!(b.none() && !b.all()); } #[test] - fn test_bitv_masking() { - let b = Bitv::with_capacity(140, true); - let mut bs = BitvSet::from_bitv(b); - assert!(bs.contains(&139)); - assert!(!bs.contains(&140)); - assert!(bs.insert(150)); - assert!(!bs.contains(&140)); - assert!(!bs.contains(&149)); - assert!(bs.contains(&150)); - assert!(!bs.contains(&151)); + fn test_bitv_lt() { + let mut a = Bitv::from_elem(5u, false); + let mut b = Bitv::from_elem(5u, false); + + assert!(!(a < b) && !(b < a)); + b.set(2, true); + assert!(a < b); + a.set(3, true); + assert!(a < b); + a.set(2, true); + assert!(!(a < b) && b < a); + b.set(0, true); + assert!(a < b); } #[test] - fn test_bitv_set_basic() { - // calculate nbits with u32::BITS granularity - fn calc_nbits(bits: uint) -> uint { - u32::BITS * ((bits + u32::BITS - 1) / u32::BITS) - } + fn test_ord() { + let mut a = Bitv::from_elem(5u, false); + let mut b = Bitv::from_elem(5u, false); - let mut b = BitvSet::new(); - assert_eq!(b.capacity(), calc_nbits(0)); - assert!(b.insert(3)); - assert_eq!(b.capacity(), calc_nbits(3)); - assert!(!b.insert(3)); - assert!(b.contains(&3)); - assert!(b.insert(4)); - assert!(!b.insert(4)); - assert!(b.contains(&3)); - assert!(b.insert(400)); - assert_eq!(b.capacity(), calc_nbits(400)); - assert!(!b.insert(400)); - assert!(b.contains(&400)); - assert_eq!(b.len(), 3); + assert!(a <= b && a >= b); + a.set(1, true); + assert!(a > b && a >= b); + assert!(b < a && b <= a); + b.set(1, true); + b.set(2, true); + assert!(b > a && b >= a); + assert!(a < b && a <= b); } + #[test] - fn test_bitv_set_intersection() { + fn test_small_bitv_tests() { + let v = Bitv::from_bytes(&[0]); + assert!(!v.all()); + assert!(!v.any()); + assert!(v.none()); + + let v = Bitv::from_bytes(&[0b00010100]); + assert!(!v.all()); + assert!(v.any()); + assert!(!v.none()); + + let v = Bitv::from_bytes(&[0xFF]); + assert!(v.all()); + assert!(v.any()); + assert!(!v.none()); + } + + #[test] + fn test_big_bitv_tests() { + let v = Bitv::from_bytes(&[ // 88 bits + 0, 0, 0, 0, + 0, 0, 0, 0, + 0, 0, 0]); + assert!(!v.all()); + assert!(!v.any()); + assert!(v.none()); + + let v = Bitv::from_bytes(&[ // 88 bits + 0, 0, 0b00010100, 0, + 0, 0, 0, 0b00110100, + 0, 0, 0]); + assert!(!v.all()); + assert!(v.any()); + assert!(!v.none()); + + let v = Bitv::from_bytes(&[ // 88 bits + 0xFF, 0xFF, 0xFF, 0xFF, + 0xFF, 0xFF, 0xFF, 0xFF, + 0xFF, 0xFF, 0xFF]); + assert!(v.all()); + assert!(v.any()); + assert!(!v.none()); + } + + #[test] + fn test_bitv_push_pop() { + let mut s = Bitv::from_elem(5 * u32::BITS - 2, false); + assert_eq!(s.len(), 5 * u32::BITS - 2); + assert_eq!(s[5 * u32::BITS - 3], false); + s.push(true); + s.push(true); + assert_eq!(s[5 * u32::BITS - 2], true); + assert_eq!(s[5 * u32::BITS - 1], true); + // Here the internal vector will need to be extended + s.push(false); + assert_eq!(s[5 * u32::BITS], false); + s.push(false); + assert_eq!(s[5 * u32::BITS + 1], false); + assert_eq!(s.len(), 5 * u32::BITS + 2); + // Pop it all off + assert_eq!(s.pop(), Some(false)); + assert_eq!(s.pop(), Some(false)); + assert_eq!(s.pop(), Some(true)); + assert_eq!(s.pop(), Some(true)); + assert_eq!(s.len(), 5 * u32::BITS - 2); + } + + #[test] + fn test_bitv_truncate() { + let mut s = Bitv::from_elem(5 * u32::BITS, true); + + assert_eq!(s, Bitv::from_elem(5 * u32::BITS, true)); + assert_eq!(s.len(), 5 * u32::BITS); + s.truncate(4 * u32::BITS); + assert_eq!(s, Bitv::from_elem(4 * u32::BITS, true)); + assert_eq!(s.len(), 4 * u32::BITS); + // Truncating to a size > s.len() should be a noop + s.truncate(5 * u32::BITS); + assert_eq!(s, Bitv::from_elem(4 * u32::BITS, true)); + assert_eq!(s.len(), 4 * u32::BITS); + s.truncate(3 * u32::BITS - 10); + assert_eq!(s, Bitv::from_elem(3 * u32::BITS - 10, true)); + assert_eq!(s.len(), 3 * u32::BITS - 10); + s.truncate(0); + assert_eq!(s, Bitv::from_elem(0, true)); + assert_eq!(s.len(), 0); + } + + #[test] + fn test_bitv_reserve() { + let mut s = Bitv::from_elem(5 * u32::BITS, true); + // Check capacity + assert!(s.capacity() >= 5 * u32::BITS); + s.reserve(2 * u32::BITS); + assert!(s.capacity() >= 7 * u32::BITS); + s.reserve(7 * u32::BITS); + assert!(s.capacity() >= 12 * u32::BITS); + s.reserve_exact(7 * u32::BITS); + assert!(s.capacity() >= 12 * u32::BITS); + s.reserve(7 * u32::BITS + 1); + assert!(s.capacity() >= 12 * u32::BITS + 1); + // Check that length hasn't changed + assert_eq!(s.len(), 5 * u32::BITS); + s.push(true); + s.push(false); + s.push(true); + assert_eq!(s[5 * u32::BITS - 1], true); + assert_eq!(s[5 * u32::BITS - 0], true); + assert_eq!(s[5 * u32::BITS + 1], false); + assert_eq!(s[5 * u32::BITS + 2], true); + } + + #[test] + fn test_bitv_grow() { + let mut bitv = Bitv::from_bytes(&[0b10110110, 0b00000000, 0b10101010]); + bitv.grow(32, true); + assert_eq!(bitv, Bitv::from_bytes(&[0b10110110, 0b00000000, 0b10101010, + 0xFF, 0xFF, 0xFF, 0xFF])); + bitv.grow(64, false); + assert_eq!(bitv, Bitv::from_bytes(&[0b10110110, 0b00000000, 0b10101010, + 0xFF, 0xFF, 0xFF, 0xFF, 0, 0, 0, 0, 0, 0, 0, 0])); + bitv.grow(16, true); + assert_eq!(bitv, Bitv::from_bytes(&[0b10110110, 0b00000000, 0b10101010, + 0xFF, 0xFF, 0xFF, 0xFF, 0, 0, 0, 0, 0, 0, 0, 0, 0xFF, 0xFF])); + } + + #[test] + fn test_bitv_extend() { + let mut bitv = Bitv::from_bytes(&[0b10110110, 0b00000000, 0b11111111]); + let ext = Bitv::from_bytes(&[0b01001001, 0b10010010, 0b10111101]); + bitv.extend(ext.iter()); + assert_eq!(bitv, Bitv::from_bytes(&[0b10110110, 0b00000000, 0b11111111, + 0b01001001, 0b10010010, 0b10111101])); + } +} + + + + +#[cfg(test)] +mod bitv_bench { + use std::prelude::*; + use std::rand; + use std::rand::Rng; + use std::u32; + use test::{Bencher, black_box}; + + use super::Bitv; + + static BENCH_BITS : uint = 1 << 14; + + fn rng() -> rand::IsaacRng { + let seed: &[_] = &[1, 2, 3, 4, 5, 6, 7, 8, 9, 0]; + rand::SeedableRng::from_seed(seed) + } + + #[bench] + fn bench_uint_small(b: &mut Bencher) { + let mut r = rng(); + let mut bitv = 0 as uint; + b.iter(|| { + for _ in range(0u, 100) { + bitv |= 1 << ((r.next_u32() as uint) % u32::BITS); + } + black_box(&bitv) + }); + } + + #[bench] + fn bench_bitv_set_big_fixed(b: &mut Bencher) { + let mut r = rng(); + let mut bitv = Bitv::from_elem(BENCH_BITS, false); + b.iter(|| { + for _ in range(0u, 100) { + bitv.set((r.next_u32() as uint) % BENCH_BITS, true); + } + black_box(&bitv) + }); + } + + #[bench] + fn bench_bitv_set_big_variable(b: &mut Bencher) { + let mut r = rng(); + let mut bitv = Bitv::from_elem(BENCH_BITS, false); + b.iter(|| { + for _ in range(0u, 100) { + bitv.set((r.next_u32() as uint) % BENCH_BITS, r.gen()); + } + black_box(&bitv); + }); + } + + #[bench] + fn bench_bitv_set_small(b: &mut Bencher) { + let mut r = rng(); + let mut bitv = Bitv::from_elem(u32::BITS, false); + b.iter(|| { + for _ in range(0u, 100) { + bitv.set((r.next_u32() as uint) % u32::BITS, true); + } + black_box(&bitv); + }); + } + + #[bench] + fn bench_bitv_big_union(b: &mut Bencher) { + let mut b1 = Bitv::from_elem(BENCH_BITS, false); + let b2 = Bitv::from_elem(BENCH_BITS, false); + b.iter(|| { + b1.union(&b2) + }) + } + + #[bench] + fn bench_bitv_small_iter(b: &mut Bencher) { + let bitv = Bitv::from_elem(u32::BITS, false); + b.iter(|| { + let mut sum = 0u; + for _ in range(0u, 10) { + for pres in bitv.iter() { + sum += pres as uint; + } + } + sum + }) + } + + #[bench] + fn bench_bitv_big_iter(b: &mut Bencher) { + let bitv = Bitv::from_elem(BENCH_BITS, false); + b.iter(|| { + let mut sum = 0u; + for pres in bitv.iter() { + sum += pres as uint; + } + sum + }) + } +} + + + + + + + +#[cfg(test)] +mod bitv_set_test { + use prelude::*; + use std::iter::range_step; + + use super::{Bitv, BitvSet}; + + #[test] + fn test_bitv_set_show() { + let mut s = BitvSet::new(); + s.insert(1); + s.insert(10); + s.insert(50); + s.insert(2); + assert_eq!("{1, 2, 10, 50}", s.to_string()); + } + + #[test] + fn test_bitv_set_from_uints() { + let uints = vec![0, 2, 2, 3]; + let a: BitvSet = uints.into_iter().collect(); + let mut b = BitvSet::new(); + b.insert(0); + b.insert(2); + b.insert(3); + assert_eq!(a, b); + } + + #[test] + fn test_bitv_set_iterator() { + let uints = vec![0, 2, 2, 3]; + let bitv: BitvSet = uints.into_iter().collect(); + + let idxs: Vec = bitv.iter().collect(); + assert_eq!(idxs, vec![0, 2, 3]); + + let long: BitvSet = range(0u, 10000).filter(|&n| n % 2 == 0).collect(); + let real = range_step(0, 10000, 2).collect::>(); + + let idxs: Vec = long.iter().collect(); + assert_eq!(idxs, real); + } + + #[test] + fn test_bitv_set_frombitv_init() { + let bools = [true, false]; + let lengths = [10, 64, 100]; + for &b in bools.iter() { + for &l in lengths.iter() { + let bitset = BitvSet::from_bitv(Bitv::from_elem(l, b)); + assert_eq!(bitset.contains(&1u), b); + assert_eq!(bitset.contains(&(l-1u)), b); + assert!(!bitset.contains(&l)); + } + } + } + + #[test] + fn test_bitv_masking() { + let b = Bitv::from_elem(140, true); + let mut bs = BitvSet::from_bitv(b); + assert!(bs.contains(&139)); + assert!(!bs.contains(&140)); + assert!(bs.insert(150)); + assert!(!bs.contains(&140)); + assert!(!bs.contains(&149)); + assert!(bs.contains(&150)); + assert!(!bs.contains(&151)); + } + + #[test] + fn test_bitv_set_basic() { + let mut b = BitvSet::new(); + assert!(b.insert(3)); + assert!(!b.insert(3)); + assert!(b.contains(&3)); + assert!(b.insert(4)); + assert!(!b.insert(4)); + assert!(b.contains(&3)); + assert!(b.insert(400)); + assert!(!b.insert(400)); + assert!(b.contains(&400)); + assert_eq!(b.len(), 3); + } + + #[test] + fn test_bitv_set_intersection() { let mut a = BitvSet::new(); let mut b = BitvSet::new(); @@ -2313,10 +2755,10 @@ mod tests { #[test] fn test_bitv_set_is_disjoint() { - let a = BitvSet::from_bitv(from_bytes(&[0b10100010])); - let b = BitvSet::from_bitv(from_bytes(&[0b01000000])); + let a = BitvSet::from_bitv(Bitv::from_bytes(&[0b10100010])); + let b = BitvSet::from_bitv(Bitv::from_bytes(&[0b01000000])); let c = BitvSet::new(); - let d = BitvSet::from_bitv(from_bytes(&[0b00110000])); + let d = BitvSet::from_bitv(Bitv::from_bytes(&[0b00110000])); assert!(!a.is_disjoint(&d)); assert!(!d.is_disjoint(&a)); @@ -2336,13 +2778,13 @@ mod tests { a.insert(0); let mut b = BitvSet::new(); b.insert(5); - let expected = BitvSet::from_bitv(from_bytes(&[0b10000100])); + let expected = BitvSet::from_bitv(Bitv::from_bytes(&[0b10000100])); a.union_with(&b); assert_eq!(a, expected); // Standard - let mut a = BitvSet::from_bitv(from_bytes(&[0b10100010])); - let mut b = BitvSet::from_bitv(from_bytes(&[0b01100010])); + let mut a = BitvSet::from_bitv(Bitv::from_bytes(&[0b10100010])); + let mut b = BitvSet::from_bitv(Bitv::from_bytes(&[0b01100010])); let c = a.clone(); a.union_with(&b); b.union_with(&c); @@ -2353,8 +2795,8 @@ mod tests { #[test] fn test_bitv_set_intersect_with() { // Explicitly 0'ed bits - let mut a = BitvSet::from_bitv(from_bytes(&[0b10100010])); - let mut b = BitvSet::from_bitv(from_bytes(&[0b00000000])); + let mut a = BitvSet::from_bitv(Bitv::from_bytes(&[0b10100010])); + let mut b = BitvSet::from_bitv(Bitv::from_bytes(&[0b00000000])); let c = a.clone(); a.intersect_with(&b); b.intersect_with(&c); @@ -2362,7 +2804,7 @@ mod tests { assert!(b.is_empty()); // Uninitialized bits should behave like 0's - let mut a = BitvSet::from_bitv(from_bytes(&[0b10100010])); + let mut a = BitvSet::from_bitv(Bitv::from_bytes(&[0b10100010])); let mut b = BitvSet::new(); let c = a.clone(); a.intersect_with(&b); @@ -2371,8 +2813,8 @@ mod tests { assert!(b.is_empty()); // Standard - let mut a = BitvSet::from_bitv(from_bytes(&[0b10100010])); - let mut b = BitvSet::from_bitv(from_bytes(&[0b01100010])); + let mut a = BitvSet::from_bitv(Bitv::from_bytes(&[0b10100010])); + let mut b = BitvSet::from_bitv(Bitv::from_bytes(&[0b01100010])); let c = a.clone(); a.intersect_with(&b); b.intersect_with(&c); @@ -2383,20 +2825,20 @@ mod tests { #[test] fn test_bitv_set_difference_with() { // Explicitly 0'ed bits - let mut a = BitvSet::from_bitv(from_bytes(&[0b00000000])); - let b = BitvSet::from_bitv(from_bytes(&[0b10100010])); + let mut a = BitvSet::from_bitv(Bitv::from_bytes(&[0b00000000])); + let b = BitvSet::from_bitv(Bitv::from_bytes(&[0b10100010])); a.difference_with(&b); assert!(a.is_empty()); // Uninitialized bits should behave like 0's let mut a = BitvSet::new(); - let b = BitvSet::from_bitv(from_bytes(&[0b11111111])); + let b = BitvSet::from_bitv(Bitv::from_bytes(&[0b11111111])); a.difference_with(&b); assert!(a.is_empty()); // Standard - let mut a = BitvSet::from_bitv(from_bytes(&[0b10100010])); - let mut b = BitvSet::from_bitv(from_bytes(&[0b01100010])); + let mut a = BitvSet::from_bitv(Bitv::from_bytes(&[0b10100010])); + let mut b = BitvSet::from_bitv(Bitv::from_bytes(&[0b01100010])); let c = a.clone(); a.difference_with(&b); b.difference_with(&c); @@ -2413,19 +2855,19 @@ mod tests { let mut b = BitvSet::new(); b.insert(1); b.insert(5); - let expected = BitvSet::from_bitv(from_bytes(&[0b10000100])); + let expected = BitvSet::from_bitv(Bitv::from_bytes(&[0b10000100])); a.symmetric_difference_with(&b); assert_eq!(a, expected); - let mut a = BitvSet::from_bitv(from_bytes(&[0b10100010])); + let mut a = BitvSet::from_bitv(Bitv::from_bytes(&[0b10100010])); let b = BitvSet::new(); let c = a.clone(); a.symmetric_difference_with(&b); assert_eq!(a, c); // Standard - let mut a = BitvSet::from_bitv(from_bytes(&[0b11100010])); - let mut b = BitvSet::from_bitv(from_bytes(&[0b01101010])); + let mut a = BitvSet::from_bitv(Bitv::from_bytes(&[0b11100010])); + let mut b = BitvSet::from_bitv(Bitv::from_bytes(&[0b01101010])); let c = a.clone(); a.symmetric_difference_with(&b); b.symmetric_difference_with(&c); @@ -2435,8 +2877,8 @@ mod tests { #[test] fn test_bitv_set_eq() { - let a = BitvSet::from_bitv(from_bytes(&[0b10100010])); - let b = BitvSet::from_bitv(from_bytes(&[0b00000000])); + let a = BitvSet::from_bitv(Bitv::from_bytes(&[0b10100010])); + let b = BitvSet::from_bitv(Bitv::from_bytes(&[0b00000000])); let c = BitvSet::new(); assert!(a == a); @@ -2449,8 +2891,8 @@ mod tests { #[test] fn test_bitv_set_cmp() { - let a = BitvSet::from_bitv(from_bytes(&[0b10100010])); - let b = BitvSet::from_bitv(from_bytes(&[0b00000000])); + let a = BitvSet::from_bitv(Bitv::from_bytes(&[0b10100010])); + let b = BitvSet::from_bitv(Bitv::from_bytes(&[0b00000000])); let c = BitvSet::new(); assert_eq!(a.cmp(&b), Greater); @@ -2474,38 +2916,6 @@ mod tests { assert!(a.insert(1000)); assert!(a.remove(&1000)); a.shrink_to_fit(); - assert_eq!(a.capacity(), u32::BITS); - } - - #[test] - fn test_bitv_lt() { - let mut a = Bitv::with_capacity(5u, false); - let mut b = Bitv::with_capacity(5u, false); - - assert!(!(a < b) && !(b < a)); - b.set(2, true); - assert!(a < b); - a.set(3, true); - assert!(a < b); - a.set(2, true); - assert!(!(a < b) && b < a); - b.set(0, true); - assert!(a < b); - } - - #[test] - fn test_ord() { - let mut a = Bitv::with_capacity(5u, false); - let mut b = Bitv::with_capacity(5u, false); - - assert!(a <= b && a >= b); - a.set(1, true); - assert!(a > b && a >= b); - assert!(b < a && b <= a); - b.set(1, true); - b.set(2, true); - assert!(b > a && b >= a); - assert!(a < b && a <= b); } #[test] @@ -2526,206 +2936,29 @@ mod tests { assert!(a.remove(&1000)); assert!(b.contains(&1000)); } +} - #[test] - fn test_small_bitv_tests() { - let v = from_bytes(&[0]); - assert!(!v.all()); - assert!(!v.any()); - assert!(v.none()); - - let v = from_bytes(&[0b00010100]); - assert!(!v.all()); - assert!(v.any()); - assert!(!v.none()); - - let v = from_bytes(&[0xFF]); - assert!(v.all()); - assert!(v.any()); - assert!(!v.none()); - } - - #[test] - fn test_big_bitv_tests() { - let v = from_bytes(&[ // 88 bits - 0, 0, 0, 0, - 0, 0, 0, 0, - 0, 0, 0]); - assert!(!v.all()); - assert!(!v.any()); - assert!(v.none()); - - let v = from_bytes(&[ // 88 bits - 0, 0, 0b00010100, 0, - 0, 0, 0, 0b00110100, - 0, 0, 0]); - assert!(!v.all()); - assert!(v.any()); - assert!(!v.none()); - - let v = from_bytes(&[ // 88 bits - 0xFF, 0xFF, 0xFF, 0xFF, - 0xFF, 0xFF, 0xFF, 0xFF, - 0xFF, 0xFF, 0xFF]); - assert!(v.all()); - assert!(v.any()); - assert!(!v.none()); - } - #[test] - fn test_bitv_push_pop() { - let mut s = Bitv::with_capacity(5 * u32::BITS - 2, false); - assert_eq!(s.len(), 5 * u32::BITS - 2); - assert_eq!(s.get(5 * u32::BITS - 3), false); - s.push(true); - s.push(true); - assert_eq!(s.get(5 * u32::BITS - 2), true); - assert_eq!(s.get(5 * u32::BITS - 1), true); - // Here the internal vector will need to be extended - s.push(false); - assert_eq!(s.get(5 * u32::BITS), false); - s.push(false); - assert_eq!(s.get(5 * u32::BITS + 1), false); - assert_eq!(s.len(), 5 * u32::BITS + 2); - // Pop it all off - assert_eq!(s.pop(), false); - assert_eq!(s.pop(), false); - assert_eq!(s.pop(), true); - assert_eq!(s.pop(), true); - assert_eq!(s.len(), 5 * u32::BITS - 2); - } - #[test] - fn test_bitv_truncate() { - let mut s = Bitv::with_capacity(5 * u32::BITS, true); - assert_eq!(s, Bitv::with_capacity(5 * u32::BITS, true)); - assert_eq!(s.len(), 5 * u32::BITS); - s.truncate(4 * u32::BITS); - assert_eq!(s, Bitv::with_capacity(4 * u32::BITS, true)); - assert_eq!(s.len(), 4 * u32::BITS); - // Truncating to a size > s.len() should be a noop - s.truncate(5 * u32::BITS); - assert_eq!(s, Bitv::with_capacity(4 * u32::BITS, true)); - assert_eq!(s.len(), 4 * u32::BITS); - s.truncate(3 * u32::BITS - 10); - assert_eq!(s, Bitv::with_capacity(3 * u32::BITS - 10, true)); - assert_eq!(s.len(), 3 * u32::BITS - 10); - s.truncate(0); - assert_eq!(s, Bitv::with_capacity(0, true)); - assert_eq!(s.len(), 0); - } - #[test] - fn test_bitv_reserve() { - let mut s = Bitv::with_capacity(5 * u32::BITS, true); - // Check capacity - assert_eq!(s.capacity(), 5 * u32::BITS); - s.reserve(2 * u32::BITS); - assert_eq!(s.capacity(), 5 * u32::BITS); - s.reserve(7 * u32::BITS); - assert_eq!(s.capacity(), 7 * u32::BITS); - s.reserve(7 * u32::BITS); - assert_eq!(s.capacity(), 7 * u32::BITS); - s.reserve(7 * u32::BITS + 1); - assert_eq!(s.capacity(), 8 * u32::BITS); - // Check that length hasn't changed - assert_eq!(s.len(), 5 * u32::BITS); - s.push(true); - s.push(false); - s.push(true); - assert_eq!(s.get(5 * u32::BITS - 1), true); - assert_eq!(s.get(5 * u32::BITS - 0), true); - assert_eq!(s.get(5 * u32::BITS + 1), false); - assert_eq!(s.get(5 * u32::BITS + 2), true); - } - - #[test] - fn test_bitv_grow() { - let mut bitv = from_bytes(&[0b10110110, 0b00000000, 0b10101010]); - bitv.grow(32, true); - assert_eq!(bitv, from_bytes(&[0b10110110, 0b00000000, 0b10101010, - 0xFF, 0xFF, 0xFF, 0xFF])); - bitv.grow(64, false); - assert_eq!(bitv, from_bytes(&[0b10110110, 0b00000000, 0b10101010, - 0xFF, 0xFF, 0xFF, 0xFF, 0, 0, 0, 0, 0, 0, 0, 0])); - bitv.grow(16, true); - assert_eq!(bitv, from_bytes(&[0b10110110, 0b00000000, 0b10101010, - 0xFF, 0xFF, 0xFF, 0xFF, 0, 0, 0, 0, 0, 0, 0, 0, 0xFF, 0xFF])); - } +#[cfg(test)] +mod bitv_set_bench { + use std::prelude::*; + use std::rand; + use std::rand::Rng; + use std::u32; + use test::{Bencher, black_box}; - #[test] - fn test_bitv_extend() { - let mut bitv = from_bytes(&[0b10110110, 0b00000000, 0b11111111]); - let ext = from_bytes(&[0b01001001, 0b10010010, 0b10111101]); - bitv.extend(ext.iter()); - assert_eq!(bitv, from_bytes(&[0b10110110, 0b00000000, 0b11111111, - 0b01001001, 0b10010010, 0b10111101])); - } + use super::{Bitv, BitvSet}; - #[test] - fn test_bitv_set_show() { - let mut s = BitvSet::new(); - s.insert(1); - s.insert(10); - s.insert(50); - s.insert(2); - assert_eq!("{1, 2, 10, 50}", s.to_string()); - } + static BENCH_BITS : uint = 1 << 14; fn rng() -> rand::IsaacRng { let seed: &[_] = &[1, 2, 3, 4, 5, 6, 7, 8, 9, 0]; rand::SeedableRng::from_seed(seed) } - #[bench] - fn bench_uint_small(b: &mut Bencher) { - let mut r = rng(); - let mut bitv = 0 as uint; - b.iter(|| { - for _ in range(0u, 100) { - bitv |= 1 << ((r.next_u32() as uint) % u32::BITS); - } - black_box(&bitv) - }); - } - - #[bench] - fn bench_bitv_set_big_fixed(b: &mut Bencher) { - let mut r = rng(); - let mut bitv = Bitv::with_capacity(BENCH_BITS, false); - b.iter(|| { - for _ in range(0u, 100) { - bitv.set((r.next_u32() as uint) % BENCH_BITS, true); - } - black_box(&bitv) - }); - } - - #[bench] - fn bench_bitv_set_big_variable(b: &mut Bencher) { - let mut r = rng(); - let mut bitv = Bitv::with_capacity(BENCH_BITS, false); - b.iter(|| { - for _ in range(0u, 100) { - bitv.set((r.next_u32() as uint) % BENCH_BITS, r.gen()); - } - black_box(&bitv); - }); - } - - #[bench] - fn bench_bitv_set_small(b: &mut Bencher) { - let mut r = rng(); - let mut bitv = Bitv::with_capacity(u32::BITS, false); - b.iter(|| { - for _ in range(0u, 100) { - bitv.set((r.next_u32() as uint) % u32::BITS, true); - } - black_box(&bitv); - }); - } - #[bench] fn bench_bitvset_small(b: &mut Bencher) { let mut r = rng(); @@ -2750,44 +2983,9 @@ mod tests { }); } - #[bench] - fn bench_bitv_big_union(b: &mut Bencher) { - let mut b1 = Bitv::with_capacity(BENCH_BITS, false); - let b2 = Bitv::with_capacity(BENCH_BITS, false); - b.iter(|| { - b1.union(&b2) - }) - } - - #[bench] - fn bench_bitv_small_iter(b: &mut Bencher) { - let bitv = Bitv::with_capacity(u32::BITS, false); - b.iter(|| { - let mut sum = 0u; - for _ in range(0u, 10) { - for pres in bitv.iter() { - sum += pres as uint; - } - } - sum - }) - } - - #[bench] - fn bench_bitv_big_iter(b: &mut Bencher) { - let bitv = Bitv::with_capacity(BENCH_BITS, false); - b.iter(|| { - let mut sum = 0u; - for pres in bitv.iter() { - sum += pres as uint; - } - sum - }) - } - #[bench] fn bench_bitvset_iter(b: &mut Bencher) { - let bitv = BitvSet::from_bitv(from_fn(BENCH_BITS, + let bitv = BitvSet::from_bitv(Bitv::from_fn(BENCH_BITS, |idx| {idx % 3 == 0})); b.iter(|| { let mut sum = 0u; diff --git a/src/libcollections/btree/map.rs b/src/libcollections/btree/map.rs index c7cbb5a1c299e..65c644da3d892 100644 --- a/src/libcollections/btree/map.rs +++ b/src/libcollections/btree/map.rs @@ -88,7 +88,7 @@ pub struct BTreeMap { } /// An abstract base over-which all other BTree iterators are built. -struct AbsEntries { +struct AbsIter { lca: T, left: RingBuf, right: RingBuf, @@ -96,28 +96,28 @@ struct AbsEntries { } /// An iterator over a BTreeMap's entries. -pub struct Entries<'a, K: 'a, V: 'a> { - inner: AbsEntries> +pub struct Iter<'a, K: 'a, V: 'a> { + inner: AbsIter> } /// A mutable iterator over a BTreeMap's entries. -pub struct MutEntries<'a, K: 'a, V: 'a> { - inner: AbsEntries> +pub struct IterMut<'a, K: 'a, V: 'a> { + inner: AbsIter> } /// An owning iterator over a BTreeMap's entries. -pub struct MoveEntries { - inner: AbsEntries> +pub struct IntoIter { + inner: AbsIter> } /// An iterator over a BTreeMap's keys. pub struct Keys<'a, K: 'a, V: 'a> { - inner: Map<(&'a K, &'a V), &'a K, Entries<'a, K, V>, fn((&'a K, &'a V)) -> &'a K> + inner: Map<(&'a K, &'a V), &'a K, Iter<'a, K, V>, fn((&'a K, &'a V)) -> &'a K> } /// An iterator over a BTreeMap's values. pub struct Values<'a, K: 'a, V: 'a> { - inner: Map<(&'a K, &'a V), &'a V, Entries<'a, K, V>, fn((&'a K, &'a V)) -> &'a V> + inner: Map<(&'a K, &'a V), &'a V, Iter<'a, K, V>, fn((&'a K, &'a V)) -> &'a V> } /// A view into a single entry in a map, which may either be vacant or occupied. @@ -131,12 +131,12 @@ pub enum Entry<'a, K:'a, V:'a> { /// A vacant Entry. pub struct VacantEntry<'a, K:'a, V:'a> { key: K, - stack: stack::SearchStack<'a, K, V, node::Edge, node::Leaf>, + stack: stack::SearchStack<'a, K, V, node::handle::Edge, node::handle::Leaf>, } /// An occupied Entry. pub struct OccupiedEntry<'a, K:'a, V:'a> { - stack: stack::SearchStack<'a, K, V, node::KV, node::LeafOrInternal>, + stack: stack::SearchStack<'a, K, V, node::handle::KV, node::handle::LeafOrInternal>, } impl BTreeMap { @@ -496,7 +496,8 @@ mod stack { use core::kinds::marker; use core::mem; use super::BTreeMap; - use super::super::node::{mod, Node, Fit, Split, KV, Edge, Internal, Leaf, LeafOrInternal}; + use super::super::node::{mod, Node, Fit, Split, Internal, Leaf}; + use super::super::node::handle; use vec::Vec; /// A generic mutable reference, identical to `&mut` except for the fact that its lifetime @@ -520,7 +521,7 @@ mod stack { } } - type StackItem = node::Handle<*mut Node, Edge, Internal>; + type StackItem = node::Handle<*mut Node, handle::Edge, handle::Internal>; type Stack = Vec>; /// A `PartialSearchStack` handles the construction of a search stack. @@ -595,7 +596,9 @@ mod stack { /// Pushes the requested child of the stack's current top on top of the stack. If the child /// exists, then a new PartialSearchStack is yielded. Otherwise, a VacantSearchStack is /// yielded. - pub fn push(mut self, mut edge: node::Handle>, Edge, Internal>) + pub fn push(mut self, mut edge: node::Handle>, + handle::Edge, + handle::Internal>) -> PartialSearchStack<'a, K, V> { self.stack.push(edge.as_raw()); PartialSearchStack { @@ -617,7 +620,7 @@ mod stack { } } - impl<'a, K, V, NodeType> SearchStack<'a, K, V, KV, NodeType> { + impl<'a, K, V, NodeType> SearchStack<'a, K, V, handle::KV, NodeType> { /// Gets a reference to the value the stack points to. pub fn peek(&self) -> &V { unsafe { self.top.from_raw().into_kv().1 } @@ -640,7 +643,7 @@ mod stack { } } - impl<'a, K, V> SearchStack<'a, K, V, KV, Leaf> { + impl<'a, K, V> SearchStack<'a, K, V, handle::KV, handle::Leaf> { /// Removes the key and value in the top element of the stack, then handles underflows as /// described in BTree's pop function. fn remove_leaf(mut self) -> V { @@ -686,7 +689,7 @@ mod stack { } } - impl<'a, K, V> SearchStack<'a, K, V, KV, LeafOrInternal> { + impl<'a, K, V> SearchStack<'a, K, V, handle::KV, handle::LeafOrInternal> { /// Removes the key and value in the top element of the stack, then handles underflows as /// described in BTree's pop function. pub fn remove(self) -> V { @@ -703,7 +706,7 @@ mod stack { /// leaves the tree in an inconsistent state that must be repaired by the caller by /// removing the entry in question. Specifically the key-value pair and its successor will /// become swapped. - fn into_leaf(mut self) -> SearchStack<'a, K, V, KV, Leaf> { + fn into_leaf(mut self) -> SearchStack<'a, K, V, handle::KV, handle::Leaf> { unsafe { let mut top_raw = self.top; let mut top = top_raw.from_raw_mut(); @@ -757,7 +760,7 @@ mod stack { } } - impl<'a, K, V> SearchStack<'a, K, V, Edge, Leaf> { + impl<'a, K, V> SearchStack<'a, K, V, handle::Edge, handle::Leaf> { /// Inserts the key and value into the top element in the stack, and if that node has to /// split recursively inserts the split contents into the next element stack until /// splits stop. @@ -926,7 +929,7 @@ enum StackOp { } impl + DoubleEndedIterator>> - Iterator<(K, V)> for AbsEntries { + Iterator<(K, V)> for AbsIter { // This function is pretty long, but only because there's a lot of cases to consider. // Our iterator represents two search paths, left and right, to the smallest and largest // elements we have yet to yield. lca represents the least common ancestor of these two paths, @@ -992,7 +995,7 @@ impl + DoubleEndedIterator>> } impl + DoubleEndedIterator>> - DoubleEndedIterator<(K, V)> for AbsEntries { + DoubleEndedIterator<(K, V)> for AbsIter { // next_back is totally symmetric to next fn next_back(&mut self) -> Option<(K, V)> { loop { @@ -1029,34 +1032,34 @@ impl + DoubleEndedIterator>> } } -impl<'a, K, V> Iterator<(&'a K, &'a V)> for Entries<'a, K, V> { +impl<'a, K, V> Iterator<(&'a K, &'a V)> for Iter<'a, K, V> { fn next(&mut self) -> Option<(&'a K, &'a V)> { self.inner.next() } fn size_hint(&self) -> (uint, Option) { self.inner.size_hint() } } -impl<'a, K, V> DoubleEndedIterator<(&'a K, &'a V)> for Entries<'a, K, V> { +impl<'a, K, V> DoubleEndedIterator<(&'a K, &'a V)> for Iter<'a, K, V> { fn next_back(&mut self) -> Option<(&'a K, &'a V)> { self.inner.next_back() } } -impl<'a, K, V> ExactSizeIterator<(&'a K, &'a V)> for Entries<'a, K, V> {} +impl<'a, K, V> ExactSizeIterator<(&'a K, &'a V)> for Iter<'a, K, V> {} -impl<'a, K, V> Iterator<(&'a K, &'a mut V)> for MutEntries<'a, K, V> { +impl<'a, K, V> Iterator<(&'a K, &'a mut V)> for IterMut<'a, K, V> { fn next(&mut self) -> Option<(&'a K, &'a mut V)> { self.inner.next() } fn size_hint(&self) -> (uint, Option) { self.inner.size_hint() } } -impl<'a, K, V> DoubleEndedIterator<(&'a K, &'a mut V)> for MutEntries<'a, K, V> { +impl<'a, K, V> DoubleEndedIterator<(&'a K, &'a mut V)> for IterMut<'a, K, V> { fn next_back(&mut self) -> Option<(&'a K, &'a mut V)> { self.inner.next_back() } } -impl<'a, K, V> ExactSizeIterator<(&'a K, &'a mut V)> for MutEntries<'a, K, V> {} +impl<'a, K, V> ExactSizeIterator<(&'a K, &'a mut V)> for IterMut<'a, K, V> {} -impl Iterator<(K, V)> for MoveEntries { +impl Iterator<(K, V)> for IntoIter { fn next(&mut self) -> Option<(K, V)> { self.inner.next() } fn size_hint(&self) -> (uint, Option) { self.inner.size_hint() } } -impl DoubleEndedIterator<(K, V)> for MoveEntries { +impl DoubleEndedIterator<(K, V)> for IntoIter { fn next_back(&mut self) -> Option<(K, V)> { self.inner.next_back() } } -impl ExactSizeIterator<(K, V)> for MoveEntries {} +impl ExactSizeIterator<(K, V)> for IntoIter {} impl<'a, K, V> Iterator<&'a K> for Keys<'a, K, V> { @@ -1137,10 +1140,10 @@ impl BTreeMap { /// assert_eq!((*first_key, *first_value), (1u, "a")); /// ``` #[unstable = "matches collection reform specification, waiting for dust to settle"] - pub fn iter<'a>(&'a self) -> Entries<'a, K, V> { + pub fn iter<'a>(&'a self) -> Iter<'a, K, V> { let len = self.len(); - Entries { - inner: AbsEntries { + Iter { + inner: AbsIter { lca: Traverse::traverse(&self.root), left: RingBuf::new(), right: RingBuf::new(), @@ -1169,10 +1172,10 @@ impl BTreeMap { /// } /// ``` #[unstable = "matches collection reform specification, waiting for dust to settle"] - pub fn iter_mut<'a>(&'a mut self) -> MutEntries<'a, K, V> { + pub fn iter_mut<'a>(&'a mut self) -> IterMut<'a, K, V> { let len = self.len(); - MutEntries { - inner: AbsEntries { + IterMut { + inner: AbsIter { lca: Traverse::traverse(&mut self.root), left: RingBuf::new(), right: RingBuf::new(), @@ -1198,10 +1201,10 @@ impl BTreeMap { /// } /// ``` #[unstable = "matches collection reform specification, waiting for dust to settle"] - pub fn into_iter(self) -> MoveEntries { + pub fn into_iter(self) -> IntoIter { let len = self.len(); - MoveEntries { - inner: AbsEntries { + IntoIter { + inner: AbsIter { lca: Traverse::traverse(self.root), left: RingBuf::new(), right: RingBuf::new(), @@ -1227,6 +1230,7 @@ impl BTreeMap { #[unstable = "matches collection reform specification, waiting for dust to settle"] pub fn keys<'a>(&'a self) -> Keys<'a, K, V> { fn first((a, _): (A, B)) -> A { a } + let first: fn((&'a K, &'a V)) -> &'a K = first; // coerce to fn pointer Keys { inner: self.iter().map(first) } } @@ -1248,6 +1252,7 @@ impl BTreeMap { #[unstable = "matches collection reform specification, waiting for dust to settle"] pub fn values<'a>(&'a self) -> Values<'a, K, V> { fn second((_, b): (A, B)) -> B { b } + let second: fn((&'a K, &'a V)) -> &'a V = second; // coerce to fn pointer Values { inner: self.iter().map(second) } } @@ -1285,6 +1290,30 @@ impl BTreeMap { impl BTreeMap { /// Gets the given key's corresponding entry in the map for in-place manipulation. + /// + /// # Examples + /// + /// ``` + /// use std::collections::BTreeMap; + /// use std::collections::btree_map::Entry; + /// + /// let mut count: BTreeMap<&str, uint> = BTreeMap::new(); + /// + /// // count the number of occurrences of letters in the vec + /// for x in vec!["a","b","a","c","a","b"].iter() { + /// match count.entry(*x) { + /// Entry::Vacant(view) => { + /// view.set(1); + /// }, + /// Entry::Occupied(mut view) => { + /// let v = view.get_mut(); + /// *v += 1; + /// }, + /// } + /// } + /// + /// assert_eq!(count["a"], 3u); + /// ``` pub fn entry<'a>(&'a mut self, mut key: K) -> Entry<'a, K, V> { // same basic logic of `swap` and `pop`, blended together let mut stack = stack::PartialSearchStack::new(self); @@ -1332,7 +1361,7 @@ impl BTreeMap { #[cfg(test)] mod test { - use std::prelude::*; + use prelude::*; use super::{BTreeMap, Occupied, Vacant}; @@ -1534,7 +1563,7 @@ mod test { #[cfg(test)] mod bench { - use std::prelude::*; + use prelude::*; use std::rand::{weak_rng, Rng}; use test::{Bencher, black_box}; diff --git a/src/libcollections/btree/node.rs b/src/libcollections/btree/node.rs index 9698b06c7fa0f..2c3c546fdb7ff 100644 --- a/src/libcollections/btree/node.rs +++ b/src/libcollections/btree/node.rs @@ -34,9 +34,9 @@ pub enum InsertionResult { /// Represents the result of a search for a key in a single node pub enum SearchResult { /// The element was found at the given index - Found(Handle), + Found(Handle), /// The element wasn't found, but if it's anywhere, it must be beyond this edge - GoDown(Handle), + GoDown(Handle), } /// A B-Tree Node. We keep keys/edges/values separate to optimize searching for keys. @@ -390,6 +390,7 @@ impl Node { } // FIXME(gereeter) Write an efficient clone_from +#[stable] impl Clone for Node { fn clone(&self) -> Node { let mut ret = if self.is_leaf() { @@ -494,12 +495,16 @@ pub struct Handle { index: uint } -pub enum KV {} -pub enum Edge {} +pub mod handle { + // Handle types. + pub enum KV {} + pub enum Edge {} -pub enum LeafOrInternal {} -pub enum Leaf {} -pub enum Internal {} + // Handle node types. + pub enum LeafOrInternal {} + pub enum Leaf {} + pub enum Internal {} +} impl Node { /// Searches for the given key in the node. If it finds an exact match, @@ -625,7 +630,7 @@ impl Handle<*mut Node, Type, NodeType> { } } -impl<'a, K: 'a, V: 'a> Handle<&'a Node, Edge, Internal> { +impl<'a, K: 'a, V: 'a> Handle<&'a Node, handle::Edge, handle::Internal> { /// Turns the handle into a reference to the edge it points at. This is necessary because the /// returned pointer has a larger lifetime than what would be returned by `edge` or `edge_mut`, /// making it more suitable for moving down a chain of nodes. @@ -636,7 +641,7 @@ impl<'a, K: 'a, V: 'a> Handle<&'a Node, Edge, Internal> { } } -impl<'a, K: 'a, V: 'a> Handle<&'a mut Node, Edge, Internal> { +impl<'a, K: 'a, V: 'a> Handle<&'a mut Node, handle::Edge, handle::Internal> { /// Turns the handle into a mutable reference to the edge it points at. This is necessary /// because the returned pointer has a larger lifetime than what would be returned by /// `edge_mut`, making it more suitable for moving down a chain of nodes. @@ -647,7 +652,7 @@ impl<'a, K: 'a, V: 'a> Handle<&'a mut Node, Edge, Internal> { } } -impl>> Handle { +impl>> Handle { // This doesn't exist because there are no uses for it, // but is fine to add, analagous to edge_mut. // @@ -657,11 +662,11 @@ impl>> Handle { } pub enum ForceResult { - Leaf(Handle), - Internal(Handle) + Leaf(Handle), + Internal(Handle) } -impl>, Type> Handle { +impl>, Type> Handle { /// Figure out whether this handle is pointing to something in a leaf node or to something in /// an internal node, clarifying the type according to the result. pub fn force(self) -> ForceResult { @@ -679,7 +684,7 @@ impl>, Type> Handle>> Handle { +impl>> Handle { /// Tries to insert this key-value pair at the given index in this leaf node /// If the node is full, we have to split it. /// @@ -711,7 +716,7 @@ impl>> Handle { } } -impl>> Handle { +impl>> Handle { /// Returns a mutable reference to the edge pointed-to by this handle. This should not be /// confused with `node`, which references the parent node of what is returned here. pub fn edge_mut(&mut self) -> &mut Node { @@ -794,11 +799,11 @@ impl>> Handle { } } -impl>, NodeType> Handle { +impl>, NodeType> Handle { /// Gets the handle pointing to the key/value pair just to the left of the pointed-to edge. /// This is unsafe because the handle might point to the first edge in the node, which has no /// pair to its left. - unsafe fn left_kv<'a>(&'a mut self) -> Handle<&'a mut Node, KV, NodeType> { + unsafe fn left_kv<'a>(&'a mut self) -> Handle<&'a mut Node, handle::KV, NodeType> { Handle { node: &mut *self.node, index: self.index - 1 @@ -808,7 +813,7 @@ impl>, NodeType> Handle(&'a mut self) -> Handle<&'a mut Node, KV, NodeType> { + unsafe fn right_kv<'a>(&'a mut self) -> Handle<&'a mut Node, handle::KV, NodeType> { Handle { node: &mut *self.node, index: self.index @@ -816,7 +821,7 @@ impl>, NodeType> Handle Handle<&'a Node, KV, NodeType> { +impl<'a, K: 'a, V: 'a, NodeType> Handle<&'a Node, handle::KV, NodeType> { /// Turns the handle into references to the key and value it points at. This is necessary /// because the returned pointers have larger lifetimes than what would be returned by `key` /// or `val`. @@ -831,7 +836,7 @@ impl<'a, K: 'a, V: 'a, NodeType> Handle<&'a Node, KV, NodeType> { } } -impl<'a, K: 'a, V: 'a, NodeType> Handle<&'a mut Node, KV, NodeType> { +impl<'a, K: 'a, V: 'a, NodeType> Handle<&'a mut Node, handle::KV, NodeType> { /// Turns the handle into mutable references to the key and value it points at. This is /// necessary because the returned pointers have larger lifetimes than what would be returned /// by `key_mut` or `val_mut`. @@ -848,7 +853,7 @@ impl<'a, K: 'a, V: 'a, NodeType> Handle<&'a mut Node, KV, NodeType> { /// Convert this handle into one pointing at the edge immediately to the left of the key/value /// pair pointed-to by this handle. This is useful because it returns a reference with larger /// lifetime than `left_edge`. - pub fn into_left_edge(self) -> Handle<&'a mut Node, Edge, NodeType> { + pub fn into_left_edge(self) -> Handle<&'a mut Node, handle::Edge, NodeType> { Handle { node: &mut *self.node, index: self.index @@ -856,7 +861,8 @@ impl<'a, K: 'a, V: 'a, NodeType> Handle<&'a mut Node, KV, NodeType> { } } -impl<'a, K: 'a, V: 'a, NodeRef: Deref> + 'a, NodeType> Handle { +impl<'a, K: 'a, V: 'a, NodeRef: Deref> + 'a, NodeType> Handle { // These are fine to include, but are currently unneeded. // // /// Returns a reference to the key pointed-to by this handle. This doesn't return a @@ -874,7 +880,8 @@ impl<'a, K: 'a, V: 'a, NodeRef: Deref> + 'a, NodeType> Handle> + 'a, NodeType> Handle { +impl<'a, K: 'a, V: 'a, NodeRef: DerefMut> + 'a, NodeType> Handle { /// Returns a mutable reference to the key pointed-to by this handle. This doesn't return a /// reference with a lifetime as large as `into_kv_mut`, but it also does not consume the /// handle. @@ -890,10 +897,10 @@ impl<'a, K: 'a, V: 'a, NodeRef: DerefMut> + 'a, NodeType> Handle>, NodeType> Handle { +impl>, NodeType> Handle { /// Gets the handle pointing to the edge immediately to the left of the key/value pair pointed /// to by this handle. - pub fn left_edge<'a>(&'a mut self) -> Handle<&'a mut Node, Edge, NodeType> { + pub fn left_edge<'a>(&'a mut self) -> Handle<&'a mut Node, handle::Edge, NodeType> { Handle { node: &mut *self.node, index: self.index @@ -902,7 +909,7 @@ impl>, NodeType> Handle(&'a mut self) -> Handle<&'a mut Node, Edge, NodeType> { + pub fn right_edge<'a>(&'a mut self) -> Handle<&'a mut Node, handle::Edge, NodeType> { Handle { node: &mut *self.node, index: self.index + 1 @@ -910,7 +917,7 @@ impl>, NodeType> Handle>> Handle { +impl>> Handle { /// Removes the key/value pair at the handle's location. /// /// # Panics (in debug build) @@ -921,7 +928,7 @@ impl>> Handle { } } -impl>> Handle { +impl>> Handle { /// Steal! Stealing is roughly analogous to a binary tree rotation. /// In this case, we're "rotating" right. unsafe fn steal_rightward(&mut self) { @@ -1004,7 +1011,8 @@ impl Node { /// # Panics (in debug build) /// /// Panics if the given index is out of bounds. - pub fn kv_handle(&mut self, index: uint) -> Handle<&mut Node, KV, LeafOrInternal> { + pub fn kv_handle(&mut self, index: uint) -> Handle<&mut Node, handle::KV, + handle::LeafOrInternal> { // Necessary for correctness, but in a private module debug_assert!(index < self.len(), "kv_handle index out of bounds"); Handle { @@ -1374,14 +1382,14 @@ pub enum TraversalItem { } /// A traversal over a node's entries and edges -pub type Traversal<'a, K, V> = AbsTraversal, - slice::Items<'a, V>>, - slice::Items<'a, Node>>>; +pub type Traversal<'a, K, V> = AbsTraversal, + slice::Iter<'a, V>>, + slice::Iter<'a, Node>>>; /// A mutable traversal over a node's entries and edges -pub type MutTraversal<'a, K, V> = AbsTraversal, - slice::MutItems<'a, V>>, - slice::MutItems<'a, Node>>>; +pub type MutTraversal<'a, K, V> = AbsTraversal, + slice::IterMut<'a, V>>, + slice::IterMut<'a, Node>>>; /// An owning traversal over a node's entries and edges pub type MoveTraversal = AbsTraversal>; diff --git a/src/libcollections/btree/set.rs b/src/libcollections/btree/set.rs index 4ef2e681992ae..2935692ed1580 100644 --- a/src/libcollections/btree/set.rs +++ b/src/libcollections/btree/set.rs @@ -13,7 +13,7 @@ use core::prelude::*; -use btree_map::{BTreeMap, Keys, MoveEntries}; +use btree_map::{BTreeMap, Keys}; use std::hash::Hash; use core::borrow::BorrowFrom; use core::default::Default; @@ -33,37 +33,37 @@ pub struct BTreeSet{ } /// An iterator over a BTreeSet's items. -pub struct Items<'a, T: 'a> { +pub struct Iter<'a, T: 'a> { iter: Keys<'a, T, ()> } /// An owning iterator over a BTreeSet's items. -pub struct MoveItems { - iter: Map<(T, ()), T, MoveEntries, fn((T, ())) -> T> +pub struct IntoIter { + iter: Map<(T, ()), T, ::btree_map::IntoIter, fn((T, ())) -> T> } /// A lazy iterator producing elements in the set difference (in-order). -pub struct DifferenceItems<'a, T:'a> { - a: Peekable<&'a T, Items<'a, T>>, - b: Peekable<&'a T, Items<'a, T>>, +pub struct Difference<'a, T:'a> { + a: Peekable<&'a T, Iter<'a, T>>, + b: Peekable<&'a T, Iter<'a, T>>, } /// A lazy iterator producing elements in the set symmetric difference (in-order). -pub struct SymDifferenceItems<'a, T:'a> { - a: Peekable<&'a T, Items<'a, T>>, - b: Peekable<&'a T, Items<'a, T>>, +pub struct SymmetricDifference<'a, T:'a> { + a: Peekable<&'a T, Iter<'a, T>>, + b: Peekable<&'a T, Iter<'a, T>>, } /// A lazy iterator producing elements in the set intersection (in-order). -pub struct IntersectionItems<'a, T:'a> { - a: Peekable<&'a T, Items<'a, T>>, - b: Peekable<&'a T, Items<'a, T>>, +pub struct Intersection<'a, T:'a> { + a: Peekable<&'a T, Iter<'a, T>>, + b: Peekable<&'a T, Iter<'a, T>>, } /// A lazy iterator producing elements in the set union (in-order). -pub struct UnionItems<'a, T:'a> { - a: Peekable<&'a T, Items<'a, T>>, - b: Peekable<&'a T, Items<'a, T>>, +pub struct Union<'a, T:'a> { + a: Peekable<&'a T, Iter<'a, T>>, + b: Peekable<&'a T, Iter<'a, T>>, } impl BTreeSet { @@ -107,8 +107,8 @@ impl BTreeSet { /// assert_eq!(v, vec![1u,2,3,4]); /// ``` #[unstable = "matches collection reform specification, waiting for dust to settle"] - pub fn iter<'a>(&'a self) -> Items<'a, T> { - Items { iter: self.map.keys() } + pub fn iter<'a>(&'a self) -> Iter<'a, T> { + Iter { iter: self.map.keys() } } /// Gets an iterator for moving out the BtreeSet's contents. @@ -124,10 +124,11 @@ impl BTreeSet { /// assert_eq!(v, vec![1u,2,3,4]); /// ``` #[unstable = "matches collection reform specification, waiting for dust to settle"] - pub fn into_iter(self) -> MoveItems { + pub fn into_iter(self) -> IntoIter { fn first((a, _): (A, B)) -> A { a } + let first: fn((T, ())) -> T = first; // coerce to fn pointer - MoveItems { iter: self.map.into_iter().map(first) } + IntoIter { iter: self.map.into_iter().map(first) } } } @@ -151,8 +152,8 @@ impl BTreeSet { /// assert_eq!(diff, vec![1u]); /// ``` #[unstable = "matches collection reform specification, waiting for dust to settle"] - pub fn difference<'a>(&'a self, other: &'a BTreeSet) -> DifferenceItems<'a, T> { - DifferenceItems{a: self.iter().peekable(), b: other.iter().peekable()} + pub fn difference<'a>(&'a self, other: &'a BTreeSet) -> Difference<'a, T> { + Difference{a: self.iter().peekable(), b: other.iter().peekable()} } /// Visits the values representing the symmetric difference, in ascending order. @@ -175,8 +176,8 @@ impl BTreeSet { /// ``` #[unstable = "matches collection reform specification, waiting for dust to settle"] pub fn symmetric_difference<'a>(&'a self, other: &'a BTreeSet) - -> SymDifferenceItems<'a, T> { - SymDifferenceItems{a: self.iter().peekable(), b: other.iter().peekable()} + -> SymmetricDifference<'a, T> { + SymmetricDifference{a: self.iter().peekable(), b: other.iter().peekable()} } /// Visits the values representing the intersection, in ascending order. @@ -199,8 +200,8 @@ impl BTreeSet { /// ``` #[unstable = "matches collection reform specification, waiting for dust to settle"] pub fn intersection<'a>(&'a self, other: &'a BTreeSet) - -> IntersectionItems<'a, T> { - IntersectionItems{a: self.iter().peekable(), b: other.iter().peekable()} + -> Intersection<'a, T> { + Intersection{a: self.iter().peekable(), b: other.iter().peekable()} } /// Visits the values representing the union, in ascending order. @@ -220,8 +221,8 @@ impl BTreeSet { /// assert_eq!(union, vec![1u,2]); /// ``` #[unstable = "matches collection reform specification, waiting for dust to settle"] - pub fn union<'a>(&'a self, other: &'a BTreeSet) -> UnionItems<'a, T> { - UnionItems{a: self.iter().peekable(), b: other.iter().peekable()} + pub fn union<'a>(&'a self, other: &'a BTreeSet) -> Union<'a, T> { + Union{a: self.iter().peekable(), b: other.iter().peekable()} } /// Return the number of elements in the set @@ -448,30 +449,6 @@ impl Default for BTreeSet { } #[unstable = "matches collection reform specification, waiting for dust to settle"] -// NOTE(stage0): Remove impl after a snapshot -#[cfg(stage0)] -impl Sub,BTreeSet> for BTreeSet { - /// Returns the difference of `self` and `rhs` as a new `BTreeSet`. - /// - /// # Examples - /// - /// ``` - /// use std::collections::BTreeSet; - /// - /// let a: BTreeSet = vec![1,2,3].into_iter().collect(); - /// let b: BTreeSet = vec![3,4,5].into_iter().collect(); - /// - /// let result: BTreeSet = a - b; - /// let result_vec: Vec = result.into_iter().collect(); - /// assert_eq!(result_vec, vec![1,2]); - /// ``` - fn sub(&self, rhs: &BTreeSet) -> BTreeSet { - self.difference(rhs).cloned().collect() - } -} - -#[unstable = "matches collection reform specification, waiting for dust to settle"] -#[cfg(not(stage0))] // NOTE(stage0): Remove cfg after a snapshot impl<'a, 'b, T: Ord + Clone> Sub<&'b BTreeSet, BTreeSet> for &'a BTreeSet { /// Returns the difference of `self` and `rhs` as a new `BTreeSet`. /// @@ -493,30 +470,6 @@ impl<'a, 'b, T: Ord + Clone> Sub<&'b BTreeSet, BTreeSet> for &'a BTreeSet< } #[unstable = "matches collection reform specification, waiting for dust to settle"] -// NOTE(stage0): Remove impl after a snapshot -#[cfg(stage0)] -impl BitXor,BTreeSet> for BTreeSet { - /// Returns the symmetric difference of `self` and `rhs` as a new `BTreeSet`. - /// - /// # Examples - /// - /// ``` - /// use std::collections::BTreeSet; - /// - /// let a: BTreeSet = vec![1,2,3].into_iter().collect(); - /// let b: BTreeSet = vec![2,3,4].into_iter().collect(); - /// - /// let result: BTreeSet = a ^ b; - /// let result_vec: Vec = result.into_iter().collect(); - /// assert_eq!(result_vec, vec![1,4]); - /// ``` - fn bitxor(&self, rhs: &BTreeSet) -> BTreeSet { - self.symmetric_difference(rhs).cloned().collect() - } -} - -#[unstable = "matches collection reform specification, waiting for dust to settle"] -#[cfg(not(stage0))] // NOTE(stage0): Remove cfg after a snapshot impl<'a, 'b, T: Ord + Clone> BitXor<&'b BTreeSet, BTreeSet> for &'a BTreeSet { /// Returns the symmetric difference of `self` and `rhs` as a new `BTreeSet`. /// @@ -538,30 +491,6 @@ impl<'a, 'b, T: Ord + Clone> BitXor<&'b BTreeSet, BTreeSet> for &'a BTreeS } #[unstable = "matches collection reform specification, waiting for dust to settle"] -// NOTE(stage0): Remove impl after a snapshot -#[cfg(stage0)] -impl BitAnd,BTreeSet> for BTreeSet { - /// Returns the intersection of `self` and `rhs` as a new `BTreeSet`. - /// - /// # Examples - /// - /// ``` - /// use std::collections::BTreeSet; - /// - /// let a: BTreeSet = vec![1,2,3].into_iter().collect(); - /// let b: BTreeSet = vec![2,3,4].into_iter().collect(); - /// - /// let result: BTreeSet = a & b; - /// let result_vec: Vec = result.into_iter().collect(); - /// assert_eq!(result_vec, vec![2,3]); - /// ``` - fn bitand(&self, rhs: &BTreeSet) -> BTreeSet { - self.intersection(rhs).cloned().collect() - } -} - -#[unstable = "matches collection reform specification, waiting for dust to settle"] -#[cfg(not(stage0))] // NOTE(stage0): Remove cfg after a snapshot impl<'a, 'b, T: Ord + Clone> BitAnd<&'b BTreeSet, BTreeSet> for &'a BTreeSet { /// Returns the intersection of `self` and `rhs` as a new `BTreeSet`. /// @@ -583,30 +512,6 @@ impl<'a, 'b, T: Ord + Clone> BitAnd<&'b BTreeSet, BTreeSet> for &'a BTreeS } #[unstable = "matches collection reform specification, waiting for dust to settle"] -// NOTE(stage0): Remove impl after a snapshot -#[cfg(stage0)] -impl BitOr,BTreeSet> for BTreeSet { - /// Returns the union of `self` and `rhs` as a new `BTreeSet`. - /// - /// # Examples - /// - /// ``` - /// use std::collections::BTreeSet; - /// - /// let a: BTreeSet = vec![1,2,3].into_iter().collect(); - /// let b: BTreeSet = vec![3,4,5].into_iter().collect(); - /// - /// let result: BTreeSet = a | b; - /// let result_vec: Vec = result.into_iter().collect(); - /// assert_eq!(result_vec, vec![1,2,3,4,5]); - /// ``` - fn bitor(&self, rhs: &BTreeSet) -> BTreeSet { - self.union(rhs).cloned().collect() - } -} - -#[unstable = "matches collection reform specification, waiting for dust to settle"] -#[cfg(not(stage0))] // NOTE(stage0): Remove cfg after a snapshot impl<'a, 'b, T: Ord + Clone> BitOr<&'b BTreeSet, BTreeSet> for &'a BTreeSet { /// Returns the union of `self` and `rhs` as a new `BTreeSet`. /// @@ -640,24 +545,24 @@ impl Show for BTreeSet { } } -impl<'a, T> Iterator<&'a T> for Items<'a, T> { +impl<'a, T> Iterator<&'a T> for Iter<'a, T> { fn next(&mut self) -> Option<&'a T> { self.iter.next() } fn size_hint(&self) -> (uint, Option) { self.iter.size_hint() } } -impl<'a, T> DoubleEndedIterator<&'a T> for Items<'a, T> { +impl<'a, T> DoubleEndedIterator<&'a T> for Iter<'a, T> { fn next_back(&mut self) -> Option<&'a T> { self.iter.next_back() } } -impl<'a, T> ExactSizeIterator<&'a T> for Items<'a, T> {} +impl<'a, T> ExactSizeIterator<&'a T> for Iter<'a, T> {} -impl Iterator for MoveItems { +impl Iterator for IntoIter { fn next(&mut self) -> Option { self.iter.next() } fn size_hint(&self) -> (uint, Option) { self.iter.size_hint() } } -impl DoubleEndedIterator for MoveItems { +impl DoubleEndedIterator for IntoIter { fn next_back(&mut self) -> Option { self.iter.next_back() } } -impl ExactSizeIterator for MoveItems {} +impl ExactSizeIterator for IntoIter {} /// Compare `x` and `y`, but return `short` if x is None and `long` if y is None fn cmp_opt(x: Option<&T>, y: Option<&T>, @@ -669,7 +574,7 @@ fn cmp_opt(x: Option<&T>, y: Option<&T>, } } -impl<'a, T: Ord> Iterator<&'a T> for DifferenceItems<'a, T> { +impl<'a, T: Ord> Iterator<&'a T> for Difference<'a, T> { fn next(&mut self) -> Option<&'a T> { loop { match cmp_opt(self.a.peek(), self.b.peek(), Less, Less) { @@ -681,7 +586,7 @@ impl<'a, T: Ord> Iterator<&'a T> for DifferenceItems<'a, T> { } } -impl<'a, T: Ord> Iterator<&'a T> for SymDifferenceItems<'a, T> { +impl<'a, T: Ord> Iterator<&'a T> for SymmetricDifference<'a, T> { fn next(&mut self) -> Option<&'a T> { loop { match cmp_opt(self.a.peek(), self.b.peek(), Greater, Less) { @@ -693,7 +598,7 @@ impl<'a, T: Ord> Iterator<&'a T> for SymDifferenceItems<'a, T> { } } -impl<'a, T: Ord> Iterator<&'a T> for IntersectionItems<'a, T> { +impl<'a, T: Ord> Iterator<&'a T> for Intersection<'a, T> { fn next(&mut self) -> Option<&'a T> { loop { let o_cmp = match (self.a.peek(), self.b.peek()) { @@ -711,7 +616,7 @@ impl<'a, T: Ord> Iterator<&'a T> for IntersectionItems<'a, T> { } } -impl<'a, T: Ord> Iterator<&'a T> for UnionItems<'a, T> { +impl<'a, T: Ord> Iterator<&'a T> for Union<'a, T> { fn next(&mut self) -> Option<&'a T> { loop { match cmp_opt(self.a.peek(), self.b.peek(), Greater, Less) { @@ -726,7 +631,7 @@ impl<'a, T: Ord> Iterator<&'a T> for UnionItems<'a, T> { #[cfg(test)] mod test { - use std::prelude::*; + use prelude::*; use super::BTreeSet; use std::hash; diff --git a/src/libcollections/dlist.rs b/src/libcollections/dlist.rs index e7454aef51e8e..de2a730744012 100644 --- a/src/libcollections/dlist.rs +++ b/src/libcollections/dlist.rs @@ -51,21 +51,21 @@ struct Node { } /// An iterator over references to the items of a `DList`. -pub struct Items<'a, T:'a> { +pub struct Iter<'a, T:'a> { head: &'a Link, tail: Rawlink>, nelem: uint, } // FIXME #11820: the &'a Option<> of the Link stops clone working. -impl<'a, T> Clone for Items<'a, T> { - fn clone(&self) -> Items<'a, T> { *self } +impl<'a, T> Clone for Iter<'a, T> { + fn clone(&self) -> Iter<'a, T> { *self } } -impl<'a,T> Copy for Items<'a,T> {} +impl<'a,T> Copy for Iter<'a,T> {} /// An iterator over mutable references to the items of a `DList`. -pub struct MutItems<'a, T:'a> { +pub struct IterMut<'a, T:'a> { list: &'a mut DList, head: Rawlink>, tail: Rawlink>, @@ -74,7 +74,7 @@ pub struct MutItems<'a, T:'a> { /// An iterator over mutable references to the items of a `DList`. #[deriving(Clone)] -pub struct MoveItems { +pub struct IntoIter { list: DList } @@ -394,19 +394,19 @@ impl DList { /// Provides a forward iterator. #[inline] #[unstable = "matches collection reform specification, waiting for dust to settle"] - pub fn iter<'a>(&'a self) -> Items<'a, T> { - Items{nelem: self.len(), head: &self.list_head, tail: self.list_tail} + pub fn iter<'a>(&'a self) -> Iter<'a, T> { + Iter{nelem: self.len(), head: &self.list_head, tail: self.list_tail} } /// Provides a forward iterator with mutable references. #[inline] #[unstable = "matches collection reform specification, waiting for dust to settle"] - pub fn iter_mut<'a>(&'a mut self) -> MutItems<'a, T> { + pub fn iter_mut<'a>(&'a mut self) -> IterMut<'a, T> { let head_raw = match self.list_head { Some(ref mut h) => Rawlink::some(&mut **h), None => Rawlink::none(), }; - MutItems{ + IterMut{ nelem: self.len(), head: head_raw, tail: self.list_tail, @@ -417,8 +417,8 @@ impl DList { /// Consumes the list into an iterator yielding elements by value. #[inline] #[unstable = "matches collection reform specification, waiting for dust to settle"] - pub fn into_iter(self) -> MoveItems { - MoveItems{list: self} + pub fn into_iter(self) -> IntoIter { + IntoIter{list: self} } /// Returns `true` if the `DList` is empty. @@ -451,7 +451,7 @@ impl DList { /// Provides a reference to the front element, or `None` if the list is /// empty. #[inline] - #[unstable = "matches collection reform specification, waiting for dust to settle"] + #[stable] pub fn front(&self) -> Option<&T> { self.list_head.as_ref().map(|head| &head.value) } @@ -459,7 +459,7 @@ impl DList { /// Provides a mutable reference to the front element, or `None` if the list /// is empty. #[inline] - #[unstable = "matches collection reform specification, waiting for dust to settle"] + #[stable] pub fn front_mut(&mut self) -> Option<&mut T> { self.list_head.as_mut().map(|head| &mut head.value) } @@ -467,7 +467,7 @@ impl DList { /// Provides a reference to the back element, or `None` if the list is /// empty. #[inline] - #[unstable = "matches collection reform specification, waiting for dust to settle"] + #[stable] pub fn back(&self) -> Option<&T> { self.list_tail.resolve_immut().as_ref().map(|tail| &tail.value) } @@ -475,7 +475,7 @@ impl DList { /// Provides a mutable reference to the back element, or `None` if the list /// is empty. #[inline] - #[unstable = "matches collection reform specification, waiting for dust to settle"] + #[stable] pub fn back_mut(&mut self) -> Option<&mut T> { self.list_tail.resolve().map(|tail| &mut tail.value) } @@ -579,7 +579,7 @@ impl Drop for DList { } -impl<'a, A> Iterator<&'a A> for Items<'a, A> { +impl<'a, A> Iterator<&'a A> for Iter<'a, A> { #[inline] fn next(&mut self) -> Option<&'a A> { if self.nelem == 0 { @@ -598,7 +598,7 @@ impl<'a, A> Iterator<&'a A> for Items<'a, A> { } } -impl<'a, A> DoubleEndedIterator<&'a A> for Items<'a, A> { +impl<'a, A> DoubleEndedIterator<&'a A> for Iter<'a, A> { #[inline] fn next_back(&mut self) -> Option<&'a A> { if self.nelem == 0 { @@ -612,9 +612,9 @@ impl<'a, A> DoubleEndedIterator<&'a A> for Items<'a, A> { } } -impl<'a, A> ExactSizeIterator<&'a A> for Items<'a, A> {} +impl<'a, A> ExactSizeIterator<&'a A> for Iter<'a, A> {} -impl<'a, A> Iterator<&'a mut A> for MutItems<'a, A> { +impl<'a, A> Iterator<&'a mut A> for IterMut<'a, A> { #[inline] fn next(&mut self) -> Option<&'a mut A> { if self.nelem == 0 { @@ -636,7 +636,7 @@ impl<'a, A> Iterator<&'a mut A> for MutItems<'a, A> { } } -impl<'a, A> DoubleEndedIterator<&'a mut A> for MutItems<'a, A> { +impl<'a, A> DoubleEndedIterator<&'a mut A> for IterMut<'a, A> { #[inline] fn next_back(&mut self) -> Option<&'a mut A> { if self.nelem == 0 { @@ -650,7 +650,7 @@ impl<'a, A> DoubleEndedIterator<&'a mut A> for MutItems<'a, A> { } } -impl<'a, A> ExactSizeIterator<&'a mut A> for MutItems<'a, A> {} +impl<'a, A> ExactSizeIterator<&'a mut A> for IterMut<'a, A> {} /// Allows mutating a `DList` while iterating. pub trait ListInsertion { @@ -664,8 +664,8 @@ pub trait ListInsertion { fn peek_next<'a>(&'a mut self) -> Option<&'a mut A>; } -// private methods for MutItems -impl<'a, A> MutItems<'a, A> { +// private methods for IterMut +impl<'a, A> IterMut<'a, A> { fn insert_next_node(&mut self, mut ins_node: Box>) { // Insert before `self.head` so that it is between the // previously yielded element and self.head. @@ -687,7 +687,7 @@ impl<'a, A> MutItems<'a, A> { } } -impl<'a, A> ListInsertion for MutItems<'a, A> { +impl<'a, A> ListInsertion for IterMut<'a, A> { #[inline] fn insert_next(&mut self, elt: A) { self.insert_next_node(box Node::new(elt)) @@ -702,7 +702,7 @@ impl<'a, A> ListInsertion for MutItems<'a, A> { } } -impl Iterator for MoveItems { +impl Iterator for IntoIter { #[inline] fn next(&mut self) -> Option { self.list.pop_front() } @@ -712,7 +712,7 @@ impl Iterator for MoveItems { } } -impl DoubleEndedIterator for MoveItems { +impl DoubleEndedIterator for IntoIter { #[inline] fn next_back(&mut self) -> Option { self.list.pop_back() } } @@ -758,6 +758,7 @@ impl Ord for DList { } } +#[stable] impl Clone for DList { fn clone(&self) -> DList { self.iter().map(|x| x.clone()).collect() @@ -788,14 +789,14 @@ impl> Hash for DList { #[cfg(test)] mod tests { - use std::prelude::*; + use prelude::*; use std::rand; use std::hash; + use std::task::spawn; use test::Bencher; use test; use super::{DList, Node, ListInsertion}; - use vec::Vec; pub fn check_links(list: &DList) { let mut len = 0u; diff --git a/src/libcollections/enum_set.rs b/src/libcollections/enum_set.rs index caa2051c3f9ca..fd04ce94247b7 100644 --- a/src/libcollections/enum_set.rs +++ b/src/libcollections/enum_set.rs @@ -178,65 +178,29 @@ impl EnumSet { /// Returns an iterator over an `EnumSet`. #[unstable = "matches collection reform specification, waiting for dust to settle"] - pub fn iter(&self) -> Items { - Items::new(self.bits) + pub fn iter(&self) -> Iter { + Iter::new(self.bits) } } -// NOTE(stage0): Remove impl after a snapshot -#[cfg(stage0)] -impl Sub, EnumSet> for EnumSet { - fn sub(&self, e: &EnumSet) -> EnumSet { - EnumSet {bits: self.bits & !e.bits} - } -} - -#[cfg(not(stage0))] // NOTE(stage0): Remove cfg after a snapshot impl Sub, EnumSet> for EnumSet { fn sub(self, e: EnumSet) -> EnumSet { EnumSet {bits: self.bits & !e.bits} } } -// NOTE(stage0): Remove impl after a snapshot -#[cfg(stage0)] -impl BitOr, EnumSet> for EnumSet { - fn bitor(&self, e: &EnumSet) -> EnumSet { - EnumSet {bits: self.bits | e.bits} - } -} - -#[cfg(not(stage0))] // NOTE(stage0): Remove cfg after a snapshot impl BitOr, EnumSet> for EnumSet { fn bitor(self, e: EnumSet) -> EnumSet { EnumSet {bits: self.bits | e.bits} } } -// NOTE(stage0): Remove impl after a snapshot -#[cfg(stage0)] -impl BitAnd, EnumSet> for EnumSet { - fn bitand(&self, e: &EnumSet) -> EnumSet { - EnumSet {bits: self.bits & e.bits} - } -} - -#[cfg(not(stage0))] // NOTE(stage0): Remove cfg after a snapshot impl BitAnd, EnumSet> for EnumSet { fn bitand(self, e: EnumSet) -> EnumSet { EnumSet {bits: self.bits & e.bits} } } -// NOTE(stage0): Remove impl after a snapshot -#[cfg(stage0)] -impl BitXor, EnumSet> for EnumSet { - fn bitxor(&self, e: &EnumSet) -> EnumSet { - EnumSet {bits: self.bits ^ e.bits} - } -} - -#[cfg(not(stage0))] // NOTE(stage0): Remove cfg after a snapshot impl BitXor, EnumSet> for EnumSet { fn bitxor(self, e: EnumSet) -> EnumSet { EnumSet {bits: self.bits ^ e.bits} @@ -244,18 +208,18 @@ impl BitXor, EnumSet> for EnumSet { } /// An iterator over an EnumSet -pub struct Items { +pub struct Iter { index: uint, bits: uint, } -impl Items { - fn new(bits: uint) -> Items { - Items { index: 0, bits: bits } +impl Iter { + fn new(bits: uint) -> Iter { + Iter { index: 0, bits: bits } } } -impl Iterator for Items { +impl Iterator for Iter { fn next(&mut self) -> Option { if self.bits == 0 { return None; @@ -295,9 +259,9 @@ impl Extend for EnumSet { #[cfg(test)] mod test { - use std::prelude::*; use self::Foo::*; - use std::mem; + use prelude::*; + use core::mem; use super::{EnumSet, CLike}; diff --git a/src/libcollections/lib.rs b/src/libcollections/lib.rs index a8eb10e51635f..363d30abd0347 100644 --- a/src/libcollections/lib.rs +++ b/src/libcollections/lib.rs @@ -23,7 +23,7 @@ #![allow(unknown_features)] #![feature(macro_rules, default_type_params, phase, globs)] -#![feature(unsafe_destructor, import_shadowing, slicing_syntax)] +#![feature(unsafe_destructor, slicing_syntax)] #![feature(unboxed_closures)] #![no_std] @@ -95,3 +95,41 @@ mod std { pub use core::kinds; // deriving(Copy) pub use core::hash; // deriving(Hash) } + +#[cfg(test)] +mod prelude { + // from core. + pub use core::borrow::IntoCow; + pub use core::char::Char; + pub use core::clone::Clone; + pub use core::cmp::{PartialEq, Eq, Equiv, PartialOrd, Ord}; + pub use core::cmp::Ordering::{Less, Equal, Greater}; + pub use core::iter::range; + pub use core::iter::{FromIterator, Extend, IteratorExt}; + pub use core::iter::{Iterator, DoubleEndedIterator, RandomAccessIterator}; + pub use core::iter::{IteratorCloneExt, CloneIteratorExt, DoubleEndedIteratorExt}; + pub use core::iter::{IteratorOrdExt, MutableDoubleEndedIterator, ExactSizeIterator}; + pub use core::kinds::{Copy, Send, Sized, Sync}; + pub use core::mem::drop; + pub use core::ops::{Drop, Fn, FnMut, FnOnce}; + pub use core::option::Option; + pub use core::option::Option::{Some, None}; + pub use core::ptr::RawPtr; + pub use core::result::Result; + pub use core::result::Result::{Ok, Err}; + + // in core and collections (may differ). + pub use slice::{PartialEqSliceExt, OrdSliceExt}; + pub use slice::{AsSlice, SliceExt}; + pub use str::{from_str, Str}; + + // from other crates. + pub use alloc::boxed::Box; + pub use unicode::char::UnicodeChar; + + // from collections. + pub use slice::{CloneSliceExt, VectorVector}; + pub use str::{IntoMaybeOwned, StrVector}; + pub use string::{String, ToString}; + pub use vec::Vec; +} diff --git a/src/libcollections/ring_buf.rs b/src/libcollections/ring_buf.rs index cdb92d302e9d3..d4bd9b5228a8a 100644 --- a/src/libcollections/ring_buf.rs +++ b/src/libcollections/ring_buf.rs @@ -48,6 +48,7 @@ pub struct RingBuf { ptr: *mut T } +#[stable] impl Clone for RingBuf { fn clone(&self) -> RingBuf { self.iter().map(|t| t.clone()).collect() @@ -78,7 +79,13 @@ impl Default for RingBuf { impl RingBuf { /// Turn ptr into a slice #[inline] - unsafe fn buffer_as_slice(&self) -> &[T] { + unsafe fn buffer_as_slice<'a>(&'a self) -> &'a [T] { + mem::transmute(RawSlice { data: self.ptr as *const T, len: self.cap }) + } + + /// Turn ptr into a mut slice + #[inline] + unsafe fn buffer_as_mut_slice<'a>(&'a mut self) -> &'a mut [T] { mem::transmute(RawSlice { data: self.ptr as *const T, len: self.cap }) } @@ -221,6 +228,7 @@ impl RingBuf { /// assert_eq!(buf[0], 5); /// assert_eq!(buf[2], 3); /// ``` + #[stable] pub fn swap(&mut self, i: uint, j: uint) { assert!(i < self.len()); assert!(j < self.len()); @@ -369,8 +377,8 @@ impl RingBuf { /// assert_eq!(buf.iter().collect::>().as_slice(), b); /// ``` #[unstable = "matches collection reform specification, waiting for dust to settle"] - pub fn iter(&self) -> Items { - Items { + pub fn iter(&self) -> Iter { + Iter { tail: self.tail, head: self.head, ring: unsafe { self.buffer_as_slice() } @@ -395,8 +403,8 @@ impl RingBuf { /// assert_eq!(buf.iter_mut().collect::>()[], b); /// ``` #[unstable = "matches collection reform specification, waiting for dust to settle"] - pub fn iter_mut<'a>(&'a mut self) -> MutItems<'a, T> { - MutItems { + pub fn iter_mut<'a>(&'a mut self) -> IterMut<'a, T> { + IterMut { tail: self.tail, head: self.head, cap: self.cap, @@ -407,12 +415,54 @@ impl RingBuf { /// Consumes the list into an iterator yielding elements by value. #[unstable = "matches collection reform specification, waiting for dust to settle"] - pub fn into_iter(self) -> MoveItems { - MoveItems { + pub fn into_iter(self) -> IntoIter { + IntoIter { inner: self, } } + /// Returns a pair of slices which contain, in order, the contents of the + /// `RingBuf`. + #[inline] + #[unstable = "matches collection reform specification, waiting for dust to settle"] + pub fn as_slices<'a>(&'a self) -> (&'a [T], &'a [T]) { + unsafe { + let contiguous = self.is_contiguous(); + let buf = self.buffer_as_slice(); + if contiguous { + let (empty, buf) = buf.split_at(0); + (buf[self.tail..self.head], empty) + } else { + let (mid, right) = buf.split_at(self.tail); + let (left, _) = mid.split_at(self.head); + (right, left) + } + } + } + + /// Returns a pair of slices which contain, in order, the contents of the + /// `RingBuf`. + #[inline] + #[unstable = "matches collection reform specification, waiting for dust to settle"] + pub fn as_mut_slices<'a>(&'a mut self) -> (&'a mut [T], &'a mut [T]) { + unsafe { + let contiguous = self.is_contiguous(); + let head = self.head; + let tail = self.tail; + let buf = self.buffer_as_mut_slice(); + + if contiguous { + let (empty, buf) = buf.split_at_mut(0); + (buf[mut tail..head], empty) + } else { + let (mid, right) = buf.split_at_mut(tail); + let (left, _) = mid.split_at_mut(head); + + (right, left) + } + } + } + /// Returns the number of elements in the `RingBuf`. /// /// # Examples @@ -443,6 +493,27 @@ impl RingBuf { #[unstable = "matches collection reform specification, waiting for dust to settle"] pub fn is_empty(&self) -> bool { self.len() == 0 } + /// Creates a draining iterator that clears the `RingBuf` and iterates over + /// the removed items from start to end. + /// + /// # Examples + /// + /// ``` + /// use std::collections::RingBuf; + /// + /// let mut v = RingBuf::new(); + /// v.push_back(1i); + /// assert_eq!(v.drain().next(), Some(1)); + /// assert!(v.is_empty()); + /// ``` + #[inline] + #[unstable = "matches collection reform specification, waiting for dust to settle"] + pub fn drain<'a>(&'a mut self) -> Drain<'a, T> { + Drain { + inner: self, + } + } + /// Clears the buffer, removing all values. /// /// # Examples @@ -456,10 +527,9 @@ impl RingBuf { /// assert!(v.is_empty()); /// ``` #[unstable = "matches collection reform specification, waiting for dust to settle"] + #[inline] pub fn clear(&mut self) { - while self.pop_front().is_some() {} - self.head = 0; - self.tail = 0; + self.drain(); } /// Provides a reference to the front element, or `None` if the sequence is @@ -477,7 +547,7 @@ impl RingBuf { /// d.push_back(2i); /// assert_eq!(d.front(), Some(&1i)); /// ``` - #[unstable = "matches collection reform specification, waiting for dust to settle"] + #[stable] pub fn front(&self) -> Option<&T> { if !self.is_empty() { Some(&self[0]) } else { None } } @@ -501,7 +571,7 @@ impl RingBuf { /// } /// assert_eq!(d.front(), Some(&9i)); /// ``` - #[unstable = "matches collection reform specification, waiting for dust to settle"] + #[stable] pub fn front_mut(&mut self) -> Option<&mut T> { if !self.is_empty() { Some(&mut self[0]) } else { None } } @@ -521,7 +591,7 @@ impl RingBuf { /// d.push_back(2i); /// assert_eq!(d.back(), Some(&2i)); /// ``` - #[unstable = "matches collection reform specification, waiting for dust to settle"] + #[stable] pub fn back(&self) -> Option<&T> { if !self.is_empty() { Some(&self[self.len() - 1]) } else { None } } @@ -545,7 +615,7 @@ impl RingBuf { /// } /// assert_eq!(d.back(), Some(&9i)); /// ``` - #[unstable = "matches collection reform specification, waiting for dust to settle"] + #[stable] pub fn back_mut(&mut self) -> Option<&mut T> { let len = self.len(); if !self.is_empty() { Some(&mut self[len - 1]) } else { None } @@ -663,6 +733,11 @@ impl RingBuf { } } + #[inline] + fn is_contiguous(&self) -> bool { + self.tail <= self.head + } + /// Inserts an element at position `i` within the ringbuf. Whichever /// end is closer to the insertion point will be moved to make room, /// and all the affected elements will be moved to new positions. @@ -715,7 +790,7 @@ impl RingBuf { let distance_to_tail = i; let distance_to_head = self.len() - i; - let contiguous = self.tail <= self.head; + let contiguous = self.is_contiguous(); match (contiguous, distance_to_tail <= distance_to_head, idx >= self.tail) { (true, true, _) if i == 0 => { @@ -1048,13 +1123,13 @@ fn count(tail: uint, head: uint, size: uint) -> uint { } /// `RingBuf` iterator. -pub struct Items<'a, T:'a> { +pub struct Iter<'a, T:'a> { ring: &'a [T], tail: uint, head: uint } -impl<'a, T> Iterator<&'a T> for Items<'a, T> { +impl<'a, T> Iterator<&'a T> for Iter<'a, T> { #[inline] fn next(&mut self) -> Option<&'a T> { if self.tail == self.head { @@ -1072,7 +1147,7 @@ impl<'a, T> Iterator<&'a T> for Items<'a, T> { } } -impl<'a, T> DoubleEndedIterator<&'a T> for Items<'a, T> { +impl<'a, T> DoubleEndedIterator<&'a T> for Iter<'a, T> { #[inline] fn next_back(&mut self) -> Option<&'a T> { if self.tail == self.head { @@ -1083,9 +1158,9 @@ impl<'a, T> DoubleEndedIterator<&'a T> for Items<'a, T> { } } -impl<'a, T> ExactSizeIterator<&'a T> for Items<'a, T> {} +impl<'a, T> ExactSizeIterator<&'a T> for Iter<'a, T> {} -impl<'a, T> RandomAccessIterator<&'a T> for Items<'a, T> { +impl<'a, T> RandomAccessIterator<&'a T> for Iter<'a, T> { #[inline] fn indexable(&self) -> uint { let (len, _) = self.size_hint(); @@ -1103,11 +1178,11 @@ impl<'a, T> RandomAccessIterator<&'a T> for Items<'a, T> { } } -// FIXME This was implemented differently from Items because of a problem +// FIXME This was implemented differently from Iter because of a problem // with returning the mutable reference. I couldn't find a way to // make the lifetime checker happy so, but there should be a way. /// `RingBuf` mutable iterator. -pub struct MutItems<'a, T:'a> { +pub struct IterMut<'a, T:'a> { ptr: *mut T, tail: uint, head: uint, @@ -1115,7 +1190,7 @@ pub struct MutItems<'a, T:'a> { marker: marker::ContravariantLifetime<'a>, } -impl<'a, T> Iterator<&'a mut T> for MutItems<'a, T> { +impl<'a, T> Iterator<&'a mut T> for IterMut<'a, T> { #[inline] fn next(&mut self) -> Option<&'a mut T> { if self.tail == self.head { @@ -1136,7 +1211,7 @@ impl<'a, T> Iterator<&'a mut T> for MutItems<'a, T> { } } -impl<'a, T> DoubleEndedIterator<&'a mut T> for MutItems<'a, T> { +impl<'a, T> DoubleEndedIterator<&'a mut T> for IterMut<'a, T> { #[inline] fn next_back(&mut self) -> Option<&'a mut T> { if self.tail == self.head { @@ -1150,14 +1225,14 @@ impl<'a, T> DoubleEndedIterator<&'a mut T> for MutItems<'a, T> { } } -impl<'a, T> ExactSizeIterator<&'a mut T> for MutItems<'a, T> {} +impl<'a, T> ExactSizeIterator<&'a mut T> for IterMut<'a, T> {} // A by-value RingBuf iterator -pub struct MoveItems { +pub struct IntoIter { inner: RingBuf, } -impl Iterator for MoveItems { +impl Iterator for IntoIter { #[inline] fn next(&mut self) -> Option { self.inner.pop_front() @@ -1170,15 +1245,50 @@ impl Iterator for MoveItems { } } -impl DoubleEndedIterator for MoveItems { +impl DoubleEndedIterator for IntoIter { #[inline] fn next_back(&mut self) -> Option { self.inner.pop_back() } } +impl ExactSizeIterator for IntoIter {} + +/// A draining RingBuf iterator +pub struct Drain<'a, T: 'a> { + inner: &'a mut RingBuf, +} + +#[unsafe_destructor] +impl<'a, T: 'a> Drop for Drain<'a, T> { + fn drop(&mut self) { + for _ in *self {} + self.inner.head = 0; + self.inner.tail = 0; + } +} + +impl<'a, T: 'a> Iterator for Drain<'a, T> { + #[inline] + fn next(&mut self) -> Option { + self.inner.pop_front() + } -impl ExactSizeIterator for MoveItems {} + #[inline] + fn size_hint(&self) -> (uint, Option) { + let len = self.inner.len(); + (len, Some(len)) + } +} + +impl<'a, T: 'a> DoubleEndedIterator for Drain<'a, T> { + #[inline] + fn next_back(&mut self) -> Option { + self.inner.pop_back() + } +} + +impl<'a, T: 'a> ExactSizeIterator for Drain<'a, T> {} impl PartialEq for RingBuf { fn eq(&self, other: &RingBuf) -> bool { @@ -1260,18 +1370,17 @@ impl fmt::Show for RingBuf { #[cfg(test)] mod tests { - use core::iter; use self::Taggy::*; use self::Taggypar::*; - use std::cmp; + use prelude::*; + use core::cmp; + use core::iter; use std::fmt::Show; - use std::prelude::*; use std::hash; use test::Bencher; use test; use super::RingBuf; - use vec::Vec; #[test] #[allow(deprecated)] @@ -1789,9 +1898,76 @@ mod tests { } } + #[test] + fn test_drain() { + + // Empty iter + { + let mut d: RingBuf = RingBuf::new(); + + { + let mut iter = d.drain(); + + assert_eq!(iter.size_hint(), (0, Some(0))); + assert_eq!(iter.next(), None); + assert_eq!(iter.size_hint(), (0, Some(0))); + } + + assert!(d.is_empty()); + } + + // simple iter + { + let mut d = RingBuf::new(); + for i in range(0i, 5) { + d.push_back(i); + } + + assert_eq!(d.drain().collect::>(), [0, 1, 2, 3, 4]); + assert!(d.is_empty()); + } + + // wrapped iter + { + let mut d = RingBuf::new(); + for i in range(0i, 5) { + d.push_back(i); + } + for i in range(6, 9) { + d.push_front(i); + } + + assert_eq!(d.drain().collect::>(), [8,7,6,0,1,2,3,4]); + assert!(d.is_empty()); + } + + // partially used + { + let mut d = RingBuf::new(); + for i in range(0i, 5) { + d.push_back(i); + } + for i in range(6, 9) { + d.push_front(i); + } + + { + let mut it = d.drain(); + assert_eq!(it.size_hint(), (8, Some(8))); + assert_eq!(it.next(), Some(8)); + assert_eq!(it.size_hint(), (7, Some(7))); + assert_eq!(it.next_back(), Some(4)); + assert_eq!(it.size_hint(), (6, Some(6))); + assert_eq!(it.next(), Some(7)); + assert_eq!(it.size_hint(), (5, Some(5))); + } + assert!(d.is_empty()); + } + } + #[test] fn test_from_iter() { - use std::iter; + use core::iter; let v = vec!(1i,2,3,4,5,6,7); let deq: RingBuf = v.iter().map(|&x| x).collect(); let u: Vec = deq.iter().map(|&x| x).collect(); @@ -2132,4 +2308,60 @@ mod tests { ring.pop_front(); assert_eq!(ring.front(), None); } + + #[test] + fn test_as_slices() { + let mut ring: RingBuf = RingBuf::with_capacity(127); + let cap = ring.capacity() as int; + let first = cap/2; + let last = cap - first; + for i in range(0, first) { + ring.push_back(i); + + let (left, right) = ring.as_slices(); + let expected: Vec<_> = range(0, i+1).collect(); + assert_eq!(left, expected); + assert_eq!(right, []); + } + + for j in range(-last, 0) { + ring.push_front(j); + let (left, right) = ring.as_slices(); + let expected_left: Vec<_> = range(-last, j+1).rev().collect(); + let expected_right: Vec<_> = range(0, first).collect(); + assert_eq!(left, expected_left); + assert_eq!(right, expected_right); + } + + assert_eq!(ring.len() as int, cap); + assert_eq!(ring.capacity() as int, cap); + } + + #[test] + fn test_as_mut_slices() { + let mut ring: RingBuf = RingBuf::with_capacity(127); + let cap = ring.capacity() as int; + let first = cap/2; + let last = cap - first; + for i in range(0, first) { + ring.push_back(i); + + let (left, right) = ring.as_mut_slices(); + let expected: Vec<_> = range(0, i+1).collect(); + assert_eq!(left, expected); + assert_eq!(right, []); + } + + for j in range(-last, 0) { + ring.push_front(j); + let (left, right) = ring.as_mut_slices(); + let expected_left: Vec<_> = range(-last, j+1).rev().collect(); + let expected_right: Vec<_> = range(0, first).collect(); + assert_eq!(left, expected_left); + assert_eq!(right, expected_right); + } + + assert_eq!(ring.len() as int, cap); + assert_eq!(ring.capacity() as int, cap); + } } diff --git a/src/libcollections/slice.rs b/src/libcollections/slice.rs index 3bf10192e5963..d6d94f57acf45 100644 --- a/src/libcollections/slice.rs +++ b/src/libcollections/slice.rs @@ -37,7 +37,7 @@ //! //! ## Structs //! -//! There are several structs that are useful for slices, such as `Items`, which +//! There are several structs that are useful for slices, such as `Iter`, which //! represents iteration over a slice. //! //! ## Traits @@ -104,7 +104,7 @@ use self::Direction::*; use vec::Vec; pub use core::slice::{Chunks, AsSlice, SplitsN, Windows}; -pub use core::slice::{Items, MutItems, PartialEqSliceExt}; +pub use core::slice::{Iter, IterMut, PartialEqSliceExt}; pub use core::slice::{ImmutableIntSlice, MutableIntSlice}; pub use core::slice::{MutSplits, MutChunks, Splits}; pub use core::slice::{bytes, mut_ref_slice, ref_slice}; @@ -771,7 +771,7 @@ pub trait SliceExt for Sized? { /// Returns an iterator over the slice #[unstable = "iterator type may change"] - fn iter(&self) -> Items; + fn iter(&self) -> Iter; /// Returns an iterator over subslices separated by elements that match /// `pred`. The matched element is not contained in the subslices. @@ -970,7 +970,7 @@ pub trait SliceExt for Sized? { /// Returns an iterator that allows modifying each value #[unstable = "waiting on iterator type name conventions"] - fn iter_mut(&mut self) -> MutItems; + fn iter_mut(&mut self) -> IterMut; /// Returns a mutable pointer to the first element of a slice, or `None` if it is empty #[unstable = "name may change"] @@ -1137,7 +1137,7 @@ impl SliceExt for [T] { } #[inline] - fn iter<'a>(&'a self) -> Items<'a, T> { + fn iter<'a>(&'a self) -> Iter<'a, T> { core_slice::SliceExt::iter(self) } @@ -1246,7 +1246,7 @@ impl SliceExt for [T] { } #[inline] - fn iter_mut<'a>(&'a mut self) -> MutItems<'a, T> { + fn iter_mut<'a>(&'a mut self) -> IterMut<'a, T> { core_slice::SliceExt::iter_mut(self) } @@ -1343,16 +1343,13 @@ pub mod raw { #[cfg(test)] mod tests { use std::boxed::Box; - use std::cell::Cell; - use std::default::Default; - use std::mem; - use std::prelude::*; + use prelude::*; + use core::cell::Cell; + use core::default::Default; + use core::mem; use std::rand::{Rng, task_rng}; use std::rc::Rc; - use std::rt; - use slice::*; - - use vec::Vec; + use super::ElementSwaps; fn square(n: uint) -> uint { n * n } @@ -2764,14 +2761,12 @@ mod tests { #[cfg(test)] mod bench { - use std::prelude::*; + use prelude::*; + use core::mem; + use core::ptr; use std::rand::{weak_rng, Rng}; - use std::mem; - use std::ptr; use test::{Bencher, black_box}; - use vec::Vec; - #[bench] fn iterator(b: &mut Bencher) { // peculiar numbers to stop LLVM from optimising the summation diff --git a/src/libcollections/str.rs b/src/libcollections/str.rs index 9ac5f04efe5f2..5feae5e558edf 100644 --- a/src/libcollections/str.rs +++ b/src/libcollections/str.rs @@ -51,35 +51,41 @@ #![doc(primitive = "str")] -use core::prelude::*; - -pub use self::MaybeOwned::*; +use self::MaybeOwned::*; use self::RecompositionState::*; use self::DecompositionType::*; use core::borrow::{BorrowFrom, Cow, ToOwned}; +use core::char::Char; +use core::clone::Clone; +use core::cmp::{Equiv, PartialEq, Eq, PartialOrd, Ord, Ordering}; +use core::cmp; use core::default::Default; use core::fmt; use core::hash; -use core::cmp; use core::iter::AdditiveIterator; +use core::iter::{mod, range, Iterator, IteratorExt}; +use core::kinds::Sized; +use core::ops; +use core::option::Option::{mod, Some, None}; +use core::slice::AsSlice; +use core::str as core_str; +use unicode::str::{UnicodeStr, Utf16Encoder}; use ring_buf::RingBuf; +use slice::SliceExt; use string::String; use unicode; use vec::Vec; -pub use core::str::{from_utf8, CharEq, Chars, CharOffsets}; -pub use core::str::{Bytes, CharSplits}; -pub use core::str::{CharSplitsN, AnyLines, MatchIndices, StrSplits}; -pub use core::str::{Utf16Encoder, Utf16CodeUnits}; -pub use core::str::{eq_slice, is_utf8, is_utf16, Utf16Items}; -pub use core::str::{Utf16Item, ScalarValue, LoneSurrogate, utf16_items}; -pub use core::str::{truncate_utf16_at_nul, utf8_char_width, CharRange}; -pub use core::str::{FromStr, from_str}; -pub use core::str::{Str, StrPrelude}; +pub use core::str::{from_utf8, CharEq, Chars, CharIndices}; +pub use core::str::{Bytes, CharSplits, is_utf8}; +pub use core::str::{CharSplitsN, Lines, LinesAny, MatchIndices, StrSplits}; +pub use core::str::{CharRange}; +pub use core::str::{FromStr, from_str, Utf8Error}; +pub use core::str::Str; pub use core::str::{from_utf8_unchecked, from_c_str}; -pub use unicode::str::{UnicodeStrPrelude, Words, Graphemes, GraphemeIndices}; +pub use unicode::str::{Words, Graphemes, GraphemeIndices}; // FIXME(conventions): ensure bit/char conventions are followed by str's API @@ -88,6 +94,7 @@ Section: Creating a string */ /// Methods for vectors of strings. +#[unstable = "functionality may be replaced with iterators"] pub trait StrVector for Sized? { /// Concatenates a vector of strings. /// @@ -114,6 +121,7 @@ pub trait StrVector for Sized? { fn connect(&self, sep: &str) -> String; } +#[allow(deprecated)] impl StrVector for [S] { fn concat(&self) -> String { if self.is_empty() { @@ -126,7 +134,7 @@ impl StrVector for [S] { let mut result = String::with_capacity(len); for s in self.iter() { - result.push_str(s.as_slice()) + result.push_str(s.as_slice()); } result @@ -376,6 +384,21 @@ impl<'a> Iterator for Recompositions<'a> { } } +/// External iterator for a string's UTF16 codeunits. +/// Use with the `std::iter` module. +#[deriving(Clone)] +pub struct Utf16Units<'a> { + encoder: Utf16Encoder> +} + +impl<'a> Iterator for Utf16Units<'a> { + #[inline] + fn next(&mut self) -> Option { self.encoder.next() } + + #[inline] + fn size_hint(&self) -> (uint, Option) { self.encoder.size_hint() } +} + /// Replaces all occurrences of one string with another. /// /// # Arguments @@ -391,21 +414,15 @@ impl<'a> Iterator for Recompositions<'a> { /// # Examples /// /// ```rust +/// # #![allow(deprecated)] /// use std::str; /// let string = "orange"; /// let new_string = str::replace(string, "or", "str"); /// assert_eq!(new_string.as_slice(), "strange"); /// ``` +#[deprecated = "call the inherent method instead"] pub fn replace(s: &str, from: &str, to: &str) -> String { - let mut result = String::new(); - let mut last_end = 0; - for (start, end) in s.match_indices(from) { - result.push_str(unsafe { s.slice_unchecked(last_end, start) }); - result.push_str(to); - last_end = end; - } - result.push_str(unsafe { s.slice_unchecked(last_end, s.len()) }); - result + s.replace(from, to) } /* @@ -431,7 +448,7 @@ Section: MaybeOwned /// A string type that can hold either a `String` or a `&str`. /// This can be useful as an optimization when an allocation is sometimes /// needed but not always. -#[deprecated = "use std::str::CowString"] +#[deprecated = "use std::string::CowString"] pub enum MaybeOwned<'a> { /// A borrowed string. Slice(&'a str), @@ -440,9 +457,10 @@ pub enum MaybeOwned<'a> { } /// A specialization of `CowString` to be sendable. +#[deprecated = "use std::string::CowString<'static>"] pub type SendStr = CowString<'static>; -#[deprecated = "use std::str::CowString"] +#[deprecated = "use std::string::CowString"] impl<'a> MaybeOwned<'a> { /// Returns `true` if this `MaybeOwned` wraps an owned string. /// @@ -480,6 +498,7 @@ impl<'a> MaybeOwned<'a> { /// Return the number of bytes in this string. #[inline] + #[allow(deprecated)] pub fn len(&self) -> uint { self.as_slice().len() } /// Returns true if the string contains no bytes @@ -542,7 +561,8 @@ impl<'a> IntoMaybeOwned<'a> for MaybeOwned<'a> { fn into_maybe_owned(self) -> MaybeOwned<'a> { self } } -#[deprecated = "use std::str::CowString"] +#[deprecated = "use std::string::CowString"] +#[allow(deprecated)] impl<'a> PartialEq for MaybeOwned<'a> { #[inline] fn eq(&self, other: &MaybeOwned) -> bool { @@ -550,10 +570,10 @@ impl<'a> PartialEq for MaybeOwned<'a> { } } -#[deprecated = "use std::str::CowString"] +#[deprecated = "use std::string::CowString"] impl<'a> Eq for MaybeOwned<'a> {} -#[deprecated = "use std::str::CowString"] +#[deprecated = "use std::string::CowString"] impl<'a> PartialOrd for MaybeOwned<'a> { #[inline] fn partial_cmp(&self, other: &MaybeOwned) -> Option { @@ -561,16 +581,17 @@ impl<'a> PartialOrd for MaybeOwned<'a> { } } -#[deprecated = "use std::str::CowString"] +#[deprecated = "use std::string::CowString"] impl<'a> Ord for MaybeOwned<'a> { #[inline] + #[allow(deprecated)] fn cmp(&self, other: &MaybeOwned) -> Ordering { self.as_slice().cmp(other.as_slice()) } } #[allow(deprecated)] -#[deprecated = "use std::str::CowString"] +#[deprecated = "use std::string::CowString"] impl<'a, S: Str> Equiv for MaybeOwned<'a> { #[inline] fn equiv(&self, other: &S) -> bool { @@ -578,9 +599,9 @@ impl<'a, S: Str> Equiv for MaybeOwned<'a> { } } -#[deprecated = "use std::str::CowString"] +#[deprecated = "use std::string::CowString"] +#[allow(deprecated)] impl<'a> Str for MaybeOwned<'a> { - #[allow(deprecated)] #[inline] fn as_slice<'b>(&'b self) -> &'b str { match *self { @@ -590,19 +611,7 @@ impl<'a> Str for MaybeOwned<'a> { } } -#[deprecated = "use std::str::CowString"] -impl<'a> StrAllocating for MaybeOwned<'a> { - #[allow(deprecated)] - #[inline] - fn into_string(self) -> String { - match self { - Slice(s) => String::from_str(s), - Owned(s) => s - } - } -} - -#[deprecated = "use std::str::CowString"] +#[deprecated = "use std::string::CowString"] impl<'a> Clone for MaybeOwned<'a> { #[allow(deprecated)] #[inline] @@ -614,14 +623,15 @@ impl<'a> Clone for MaybeOwned<'a> { } } -#[deprecated = "use std::str::CowString"] +#[deprecated = "use std::string::CowString"] impl<'a> Default for MaybeOwned<'a> { #[allow(deprecated)] #[inline] fn default() -> MaybeOwned<'a> { Slice("") } } -#[deprecated = "use std::str::CowString"] +#[deprecated = "use std::string::CowString"] +#[allow(deprecated)] impl<'a, H: hash::Writer> hash::Hash for MaybeOwned<'a> { #[inline] fn hash(&self, hasher: &mut H) { @@ -629,7 +639,7 @@ impl<'a, H: hash::Writer> hash::Hash for MaybeOwned<'a> { } } -#[deprecated = "use std::str::CowString"] +#[deprecated = "use std::string::CowString"] impl<'a> fmt::Show for MaybeOwned<'a> { #[inline] fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { @@ -647,10 +657,15 @@ impl BorrowFrom for str { #[unstable = "trait is unstable"] impl ToOwned for str { - fn to_owned(&self) -> String { self.into_string() } + fn to_owned(&self) -> String { + unsafe { + String::from_utf8_unchecked(self.as_bytes().to_owned()) + } + } } /// Unsafe string operations. +#[deprecated] pub mod raw { pub use core::str::raw::{from_utf8, c_str_to_static_slice, slice_bytes}; pub use core::str::raw::{slice_unchecked}; @@ -661,46 +676,25 @@ Section: CowString */ /// A clone-on-write string +#[deprecated = "use std::string::CowString instead"] pub type CowString<'a> = Cow<'a, String, str>; -impl<'a> Str for CowString<'a> { - #[inline] - fn as_slice<'b>(&'b self) -> &'b str { - (**self).as_slice() - } -} - /* Section: Trait implementations */ /// Any string that can be represented as a slice. -pub trait StrAllocating: Str { - /// Converts `self` into a `String`, not making a copy if possible. - fn into_string(self) -> String; - +pub trait StrExt for Sized?: ops::Slice { /// Escapes each char in `s` with `char::escape_default`. + #[unstable = "return type may change to be an iterator"] fn escape_default(&self) -> String { - let me = self.as_slice(); - let mut out = String::with_capacity(me.len()); - for c in me.chars() { - for c in c.escape_default() { - out.push(c); - } - } - out + self.chars().flat_map(|c| c.escape_default()).collect() } /// Escapes each char in `s` with `char::escape_unicode`. + #[unstable = "return type may change to be an iterator"] fn escape_unicode(&self) -> String { - let me = self.as_slice(); - let mut out = String::with_capacity(me.len()); - for c in me.chars() { - for c in c.escape_unicode() { - out.push(c); - } - } - out + self.chars().flat_map(|c| c.escape_unicode()).collect() } /// Replaces all occurrences of one string with another. @@ -727,25 +721,31 @@ pub trait StrAllocating: Str { /// // not found, so no change. /// assert_eq!(s.replace("cookie monster", "little lamb"), s); /// ``` + #[unstable = "awaiting pattern/matcher stabilization"] fn replace(&self, from: &str, to: &str) -> String { - replace(self.as_slice(), from, to) + let mut result = String::new(); + let mut last_end = 0; + for (start, end) in self.match_indices(from) { + result.push_str(unsafe { self.slice_unchecked(last_end, start) }); + result.push_str(to); + last_end = end; + } + result.push_str(unsafe { self.slice_unchecked(last_end, self.len()) }); + result } /// Given a string, makes a new string with repeated copies of it. + #[deprecated = "use repeat(self).take(n).collect() instead"] fn repeat(&self, nn: uint) -> String { - let me = self.as_slice(); - let mut ret = String::with_capacity(nn * me.len()); - for _ in range(0, nn) { - ret.push_str(me); - } - ret + iter::repeat(self[]).take(nn).collect() } /// Returns the Levenshtein Distance between two strings. + #[deprecated = "this function will be removed"] fn lev_distance(&self, t: &str) -> uint { - let me = self.as_slice(); - if me.is_empty() { return t.char_len(); } - if t.is_empty() { return me.char_len(); } + let me = self[]; + if me.is_empty() { return t.chars().count(); } + if t.is_empty() { return me.chars().count(); } let mut dcol = Vec::from_fn(t.len() + 1, |x| x); let mut t_last = 0; @@ -777,9 +777,10 @@ pub trait StrAllocating: Str { /// Returns an iterator over the string in Unicode Normalization Form D /// (canonical decomposition). #[inline] + #[unstable = "this functionality may be moved to libunicode"] fn nfd_chars<'a>(&'a self) -> Decompositions<'a> { Decompositions { - iter: self.as_slice().chars(), + iter: self[].chars(), buffer: Vec::new(), sorted: false, kind: Canonical @@ -789,9 +790,10 @@ pub trait StrAllocating: Str { /// Returns an iterator over the string in Unicode Normalization Form KD /// (compatibility decomposition). #[inline] + #[unstable = "this functionality may be moved to libunicode"] fn nfkd_chars<'a>(&'a self) -> Decompositions<'a> { Decompositions { - iter: self.as_slice().chars(), + iter: self[].chars(), buffer: Vec::new(), sorted: false, kind: Compatible @@ -801,6 +803,7 @@ pub trait StrAllocating: Str { /// An Iterator over the string in Unicode Normalization Form C /// (canonical decomposition followed by canonical composition). #[inline] + #[unstable = "this functionality may be moved to libunicode"] fn nfc_chars<'a>(&'a self) -> Recompositions<'a> { Recompositions { iter: self.nfd_chars(), @@ -814,6 +817,7 @@ pub trait StrAllocating: Str { /// An Iterator over the string in Unicode Normalization Form KC /// (compatibility decomposition followed by canonical composition). #[inline] + #[unstable = "this functionality may be moved to libunicode"] fn nfkc_chars<'a>(&'a self) -> Recompositions<'a> { Recompositions { iter: self.nfkd_chars(), @@ -823,15 +827,922 @@ pub trait StrAllocating: Str { last_ccc: None } } -} -impl<'a> StrAllocating for &'a str { + /// Returns true if one string contains another + /// + /// # Arguments + /// + /// - needle - The string to look for + /// + /// # Example + /// + /// ```rust + /// assert!("bananas".contains("nana")); + /// ``` + #[unstable = "awaiting pattern/matcher stabilization"] + fn contains(&self, needle: &str) -> bool { + core_str::StrExt::contains(self[], needle) + } + + /// Returns true if a string contains a char. + /// + /// # Arguments + /// + /// - needle - The char to look for + /// + /// # Example + /// + /// ```rust + /// assert!("hello".contains_char('e')); + /// ``` + #[unstable = "awaiting pattern/matcher stabilization"] + fn contains_char(&self, needle: char) -> bool { + core_str::StrExt::contains_char(self[], needle) + } + + /// An iterator over the characters of `self`. Note, this iterates + /// over Unicode code-points, not Unicode graphemes. + /// + /// # Example + /// + /// ```rust + /// let v: Vec = "abc åäö".chars().collect(); + /// assert_eq!(v, vec!['a', 'b', 'c', ' ', 'å', 'ä', 'ö']); + /// ``` + #[stable] + fn chars(&self) -> Chars { + core_str::StrExt::chars(self[]) + } + + /// An iterator over the bytes of `self` + /// + /// # Example + /// + /// ```rust + /// let v: Vec = "bors".bytes().collect(); + /// assert_eq!(v, b"bors".to_vec()); + /// ``` + #[stable] + fn bytes(&self) -> Bytes { + core_str::StrExt::bytes(self[]) + } + + /// An iterator over the characters of `self` and their byte offsets. + #[stable] + fn char_indices(&self) -> CharIndices { + core_str::StrExt::char_indices(self[]) + } + + /// An iterator over substrings of `self`, separated by characters + /// matched by `sep`. + /// + /// # Example + /// + /// ```rust + /// let v: Vec<&str> = "Mary had a little lamb".split(' ').collect(); + /// assert_eq!(v, vec!["Mary", "had", "a", "little", "lamb"]); + /// + /// let v: Vec<&str> = "abc1def2ghi".split(|&: c: char| c.is_numeric()).collect(); + /// assert_eq!(v, vec!["abc", "def", "ghi"]); + /// + /// let v: Vec<&str> = "lionXXtigerXleopard".split('X').collect(); + /// assert_eq!(v, vec!["lion", "", "tiger", "leopard"]); + /// + /// let v: Vec<&str> = "".split('X').collect(); + /// assert_eq!(v, vec![""]); + /// ``` + #[unstable = "awaiting pattern/matcher stabilization"] + fn split(&self, sep: Sep) -> CharSplits { + core_str::StrExt::split(self[], sep) + } + + /// An iterator over substrings of `self`, separated by characters + /// matched by `sep`, restricted to splitting at most `count` + /// times. + /// + /// # Example + /// + /// ```rust + /// let v: Vec<&str> = "Mary had a little lambda".splitn(2, ' ').collect(); + /// assert_eq!(v, vec!["Mary", "had", "a little lambda"]); + /// + /// let v: Vec<&str> = "abc1def2ghi".splitn(1, |&: c: char| c.is_numeric()).collect(); + /// assert_eq!(v, vec!["abc", "def2ghi"]); + /// + /// let v: Vec<&str> = "lionXXtigerXleopard".splitn(2, 'X').collect(); + /// assert_eq!(v, vec!["lion", "", "tigerXleopard"]); + /// + /// let v: Vec<&str> = "abcXdef".splitn(0, 'X').collect(); + /// assert_eq!(v, vec!["abcXdef"]); + /// + /// let v: Vec<&str> = "".splitn(1, 'X').collect(); + /// assert_eq!(v, vec![""]); + /// ``` + #[unstable = "awaiting pattern/matcher stabilization"] + fn splitn(&self, count: uint, sep: Sep) -> CharSplitsN { + core_str::StrExt::splitn(self[], count, sep) + } + + /// An iterator over substrings of `self`, separated by characters + /// matched by `sep`. + /// + /// Equivalent to `split`, except that the trailing substring + /// is skipped if empty (terminator semantics). + /// + /// # Example + /// + /// ```rust + /// let v: Vec<&str> = "A.B.".split_terminator('.').collect(); + /// assert_eq!(v, vec!["A", "B"]); + /// + /// let v: Vec<&str> = "A..B..".split_terminator('.').collect(); + /// assert_eq!(v, vec!["A", "", "B", ""]); + /// + /// let v: Vec<&str> = "Mary had a little lamb".split(' ').rev().collect(); + /// assert_eq!(v, vec!["lamb", "little", "a", "had", "Mary"]); + /// + /// let v: Vec<&str> = "abc1def2ghi".split(|&: c: char| c.is_numeric()).rev().collect(); + /// assert_eq!(v, vec!["ghi", "def", "abc"]); + /// + /// let v: Vec<&str> = "lionXXtigerXleopard".split('X').rev().collect(); + /// assert_eq!(v, vec!["leopard", "tiger", "", "lion"]); + /// ``` + #[unstable = "awaiting pattern/matcher stabilization"] + fn split_terminator(&self, sep: Sep) -> CharSplits { + core_str::StrExt::split_terminator(self[], sep) + } + + /// An iterator over substrings of `self`, separated by characters + /// matched by `sep`, starting from the end of the string. + /// Restricted to splitting at most `count` times. + /// + /// # Example + /// + /// ```rust + /// let v: Vec<&str> = "Mary had a little lamb".rsplitn(2, ' ').collect(); + /// assert_eq!(v, vec!["lamb", "little", "Mary had a"]); + /// + /// let v: Vec<&str> = "abc1def2ghi".rsplitn(1, |&: c: char| c.is_numeric()).collect(); + /// assert_eq!(v, vec!["ghi", "abc1def"]); + /// + /// let v: Vec<&str> = "lionXXtigerXleopard".rsplitn(2, 'X').collect(); + /// assert_eq!(v, vec!["leopard", "tiger", "lionX"]); + /// ``` + #[unstable = "awaiting pattern/matcher stabilization"] + fn rsplitn(&self, count: uint, sep: Sep) -> CharSplitsN { + core_str::StrExt::rsplitn(self[], count, sep) + } + + /// An iterator over the start and end indices of the disjoint + /// matches of `sep` within `self`. + /// + /// That is, each returned value `(start, end)` satisfies + /// `self.slice(start, end) == sep`. For matches of `sep` within + /// `self` that overlap, only the indices corresponding to the + /// first match are returned. + /// + /// # Example + /// + /// ```rust + /// let v: Vec<(uint, uint)> = "abcXXXabcYYYabc".match_indices("abc").collect(); + /// assert_eq!(v, vec![(0,3), (6,9), (12,15)]); + /// + /// let v: Vec<(uint, uint)> = "1abcabc2".match_indices("abc").collect(); + /// assert_eq!(v, vec![(1,4), (4,7)]); + /// + /// let v: Vec<(uint, uint)> = "ababa".match_indices("aba").collect(); + /// assert_eq!(v, vec![(0, 3)]); // only the first `aba` + /// ``` + #[unstable = "awaiting pattern/matcher stabilization"] + fn match_indices<'a>(&'a self, sep: &'a str) -> MatchIndices<'a> { + core_str::StrExt::match_indices(self[], sep) + } + + /// An iterator over the substrings of `self` separated by `sep`. + /// + /// # Example + /// + /// ```rust + /// let v: Vec<&str> = "abcXXXabcYYYabc".split_str("abc").collect(); + /// assert_eq!(v, vec!["", "XXX", "YYY", ""]); + /// + /// let v: Vec<&str> = "1abcabc2".split_str("abc").collect(); + /// assert_eq!(v, vec!["1", "", "2"]); + /// ``` + #[unstable = "awaiting pattern/matcher stabilization"] + fn split_str<'a>(&'a self, s: &'a str) -> StrSplits<'a> { + core_str::StrExt::split_str(self[], s) + } + + /// An iterator over the lines of a string (subsequences separated + /// by `\n`). This does not include the empty string after a + /// trailing `\n`. + /// + /// # Example + /// + /// ```rust + /// let four_lines = "foo\nbar\n\nbaz\n"; + /// let v: Vec<&str> = four_lines.lines().collect(); + /// assert_eq!(v, vec!["foo", "bar", "", "baz"]); + /// ``` + #[stable] + fn lines(&self) -> Lines { + core_str::StrExt::lines(self[]) + } + + /// An iterator over the lines of a string, separated by either + /// `\n` or `\r\n`. As with `.lines()`, this does not include an + /// empty trailing line. + /// + /// # Example + /// + /// ```rust + /// let four_lines = "foo\r\nbar\n\r\nbaz\n"; + /// let v: Vec<&str> = four_lines.lines_any().collect(); + /// assert_eq!(v, vec!["foo", "bar", "", "baz"]); + /// ``` + #[stable] + fn lines_any(&self) -> LinesAny { + core_str::StrExt::lines_any(self[]) + } + + /// Returns the number of Unicode code points (`char`) that a + /// string holds. + /// + /// This does not perform any normalization, and is `O(n)`, since + /// UTF-8 is a variable width encoding of code points. + /// + /// *Warning*: The number of code points in a string does not directly + /// correspond to the number of visible characters or width of the + /// visible text due to composing characters, and double- and + /// zero-width ones. + /// + /// See also `.len()` for the byte length. + /// + /// # Example + /// + /// ```rust + /// # #![allow(deprecated)] + /// // composed forms of `ö` and `é` + /// let c = "Löwe 老虎 Léopard"; // German, Simplified Chinese, French + /// // decomposed forms of `ö` and `é` + /// let d = "Lo\u{0308}we 老虎 Le\u{0301}opard"; + /// + /// assert_eq!(c.char_len(), 15); + /// assert_eq!(d.char_len(), 17); + /// + /// assert_eq!(c.len(), 21); + /// assert_eq!(d.len(), 23); + /// + /// // the two strings *look* the same + /// println!("{}", c); + /// println!("{}", d); + /// ``` + #[deprecated = "call .chars().count() instead"] + fn char_len(&self) -> uint { + core_str::StrExt::char_len(self[]) + } + + /// Returns a slice of the given string from the byte range + /// [`begin`..`end`). + /// + /// This operation is `O(1)`. + /// + /// Panics when `begin` and `end` do not point to valid characters + /// or point beyond the last character of the string. + /// + /// See also `slice_to` and `slice_from` for slicing prefixes and + /// suffixes of strings, and `slice_chars` for slicing based on + /// code point counts. + /// + /// # Example + /// + /// ```rust + /// let s = "Löwe 老虎 Léopard"; + /// assert_eq!(s.slice(0, 1), "L"); + /// + /// assert_eq!(s.slice(1, 9), "öwe 老"); + /// + /// // these will panic: + /// // byte 2 lies within `ö`: + /// // s.slice(2, 3); + /// + /// // byte 8 lies within `老` + /// // s.slice(1, 8); + /// + /// // byte 100 is outside the string + /// // s.slice(3, 100); + /// ``` + #[unstable = "use slice notation [a..b] instead"] + fn slice(&self, begin: uint, end: uint) -> &str { + core_str::StrExt::slice(self[], begin, end) + } + + /// Returns a slice of the string from `begin` to its end. + /// + /// Equivalent to `self.slice(begin, self.len())`. + /// + /// Panics when `begin` does not point to a valid character, or is + /// out of bounds. + /// + /// See also `slice`, `slice_to` and `slice_chars`. + #[unstable = "use slice notation [a..] instead"] + fn slice_from(&self, begin: uint) -> &str { + core_str::StrExt::slice_from(self[], begin) + } + + /// Returns a slice of the string from the beginning to byte + /// `end`. + /// + /// Equivalent to `self.slice(0, end)`. + /// + /// Panics when `end` does not point to a valid character, or is + /// out of bounds. + /// + /// See also `slice`, `slice_from` and `slice_chars`. + #[unstable = "use slice notation [0..a] instead"] + fn slice_to(&self, end: uint) -> &str { + core_str::StrExt::slice_to(self[], end) + } + + /// Returns a slice of the string from the character range + /// [`begin`..`end`). + /// + /// That is, start at the `begin`-th code point of the string and + /// continue to the `end`-th code point. This does not detect or + /// handle edge cases such as leaving a combining character as the + /// first code point of the string. + /// + /// Due to the design of UTF-8, this operation is `O(end)`. + /// See `slice`, `slice_to` and `slice_from` for `O(1)` + /// variants that use byte indices rather than code point + /// indices. + /// + /// Panics if `begin` > `end` or the either `begin` or `end` are + /// beyond the last character of the string. + /// + /// # Example + /// + /// ```rust + /// let s = "Löwe 老虎 Léopard"; + /// assert_eq!(s.slice_chars(0, 4), "Löwe"); + /// assert_eq!(s.slice_chars(5, 7), "老虎"); + /// ``` + #[unstable = "may have yet to prove its worth"] + fn slice_chars(&self, begin: uint, end: uint) -> &str { + core_str::StrExt::slice_chars(self[], begin, end) + } + + /// Takes a bytewise (not UTF-8) slice from a string. + /// + /// Returns the substring from [`begin`..`end`). + /// + /// Caller must check both UTF-8 character boundaries and the boundaries of + /// the entire slice as well. + #[stable] + unsafe fn slice_unchecked(&self, begin: uint, end: uint) -> &str { + core_str::StrExt::slice_unchecked(self[], begin, end) + } + + /// Returns true if `needle` is a prefix of the string. + /// + /// # Example + /// + /// ```rust + /// assert!("banana".starts_with("ba")); + /// ``` + #[unstable = "awaiting pattern/matcher stabilization"] + fn starts_with(&self, needle: &str) -> bool { + core_str::StrExt::starts_with(self[], needle) + } + + /// Returns true if `needle` is a suffix of the string. + /// + /// # Example + /// + /// ```rust + /// assert!("banana".ends_with("nana")); + /// ``` + #[unstable = "awaiting pattern/matcher stabilization"] + fn ends_with(&self, needle: &str) -> bool { + core_str::StrExt::ends_with(self[], needle) + } + + /// Returns a string with characters that match `to_trim` removed from the left and the right. + /// + /// # Arguments + /// + /// * to_trim - a character matcher + /// + /// # Example + /// + /// ```rust + /// assert_eq!("11foo1bar11".trim_chars('1'), "foo1bar"); + /// let x: &[_] = &['1', '2']; + /// assert_eq!("12foo1bar12".trim_chars(x), "foo1bar"); + /// assert_eq!("123foo1bar123".trim_chars(|&: c: char| c.is_numeric()), "foo1bar"); + /// ``` + #[unstable = "awaiting pattern/matcher stabilization"] + fn trim_chars(&self, to_trim: C) -> &str { + core_str::StrExt::trim_chars(self[], to_trim) + } + + /// Returns a string with leading `chars_to_trim` removed. + /// + /// # Arguments + /// + /// * to_trim - a character matcher + /// + /// # Example + /// + /// ```rust + /// assert_eq!("11foo1bar11".trim_left_chars('1'), "foo1bar11"); + /// let x: &[_] = &['1', '2']; + /// assert_eq!("12foo1bar12".trim_left_chars(x), "foo1bar12"); + /// assert_eq!("123foo1bar123".trim_left_chars(|&: c: char| c.is_numeric()), "foo1bar123"); + /// ``` + #[unstable = "awaiting pattern/matcher stabilization"] + fn trim_left_chars(&self, to_trim: C) -> &str { + core_str::StrExt::trim_left_chars(self[], to_trim) + } + + /// Returns a string with trailing `chars_to_trim` removed. + /// + /// # Arguments + /// + /// * to_trim - a character matcher + /// + /// # Example + /// + /// ```rust + /// assert_eq!("11foo1bar11".trim_right_chars('1'), "11foo1bar"); + /// let x: &[_] = &['1', '2']; + /// assert_eq!("12foo1bar12".trim_right_chars(x), "12foo1bar"); + /// assert_eq!("123foo1bar123".trim_right_chars(|&: c: char| c.is_numeric()), "123foo1bar"); + /// ``` + #[unstable = "awaiting pattern/matcher stabilization"] + fn trim_right_chars(&self, to_trim: C) -> &str { + core_str::StrExt::trim_right_chars(self[], to_trim) + } + + /// Check that `index`-th byte lies at the start and/or end of a + /// UTF-8 code point sequence. + /// + /// The start and end of the string (when `index == self.len()`) + /// are considered to be boundaries. + /// + /// Panics if `index` is greater than `self.len()`. + /// + /// # Example + /// + /// ```rust + /// let s = "Löwe 老虎 Léopard"; + /// assert!(s.is_char_boundary(0)); + /// // start of `老` + /// assert!(s.is_char_boundary(6)); + /// assert!(s.is_char_boundary(s.len())); + /// + /// // second byte of `ö` + /// assert!(!s.is_char_boundary(2)); + /// + /// // third byte of `老` + /// assert!(!s.is_char_boundary(8)); + /// ``` + #[unstable = "naming is uncertain with container conventions"] + fn is_char_boundary(&self, index: uint) -> bool { + core_str::StrExt::is_char_boundary(self[], index) + } + + /// Pluck a character out of a string and return the index of the next + /// character. + /// + /// This function can be used to iterate over the Unicode characters of a + /// string. + /// + /// # Example + /// + /// This example manually iterates through the characters of a + /// string; this should normally be done by `.chars()` or + /// `.char_indices`. + /// + /// ```rust + /// use std::str::CharRange; + /// + /// let s = "中华Việt Nam"; + /// let mut i = 0u; + /// while i < s.len() { + /// let CharRange {ch, next} = s.char_range_at(i); + /// println!("{}: {}", i, ch); + /// i = next; + /// } + /// ``` + /// + /// This outputs: + /// + /// ```text + /// 0: 中 + /// 3: 华 + /// 6: V + /// 7: i + /// 8: ệ + /// 11: t + /// 12: + /// 13: N + /// 14: a + /// 15: m + /// ``` + /// + /// # Arguments + /// + /// * s - The string + /// * i - The byte offset of the char to extract + /// + /// # Return value + /// + /// A record {ch: char, next: uint} containing the char value and the byte + /// index of the next Unicode character. + /// + /// # Panics + /// + /// If `i` is greater than or equal to the length of the string. + /// If `i` is not the index of the beginning of a valid UTF-8 character. + #[unstable = "naming is uncertain with container conventions"] + fn char_range_at(&self, start: uint) -> CharRange { + core_str::StrExt::char_range_at(self[], start) + } + + /// Given a byte position and a str, return the previous char and its position. + /// + /// This function can be used to iterate over a Unicode string in reverse. + /// + /// Returns 0 for next index if called on start index 0. + /// + /// # Panics + /// + /// If `i` is greater than the length of the string. + /// If `i` is not an index following a valid UTF-8 character. + #[unstable = "naming is uncertain with container conventions"] + fn char_range_at_reverse(&self, start: uint) -> CharRange { + core_str::StrExt::char_range_at_reverse(self[], start) + } + + /// Plucks the character starting at the `i`th byte of a string. + /// + /// # Example + /// + /// ```rust + /// let s = "abπc"; + /// assert_eq!(s.char_at(1), 'b'); + /// assert_eq!(s.char_at(2), 'π'); + /// assert_eq!(s.char_at(4), 'c'); + /// ``` + /// + /// # Panics + /// + /// If `i` is greater than or equal to the length of the string. + /// If `i` is not the index of the beginning of a valid UTF-8 character. + #[unstable = "naming is uncertain with container conventions"] + fn char_at(&self, i: uint) -> char { + core_str::StrExt::char_at(self[], i) + } + + /// Plucks the character ending at the `i`th byte of a string. + /// + /// # Panics + /// + /// If `i` is greater than the length of the string. + /// If `i` is not an index following a valid UTF-8 character. + #[unstable = "naming is uncertain with container conventions"] + fn char_at_reverse(&self, i: uint) -> char { + core_str::StrExt::char_at_reverse(self[], i) + } + + /// Work with the byte buffer of a string as a byte slice. + /// + /// # Example + /// + /// ```rust + /// assert_eq!("bors".as_bytes(), b"bors"); + /// ``` + #[stable] + fn as_bytes(&self) -> &[u8] { + core_str::StrExt::as_bytes(self[]) + } + + /// Returns the byte index of the first character of `self` that + /// matches `search`. + /// + /// # Return value + /// + /// `Some` containing the byte index of the last matching character + /// or `None` if there is no match + /// + /// # Example + /// + /// ```rust + /// let s = "Löwe 老虎 Léopard"; + /// + /// assert_eq!(s.find('L'), Some(0)); + /// assert_eq!(s.find('é'), Some(14)); + /// + /// // the first space + /// assert_eq!(s.find(|&: c: char| c.is_whitespace()), Some(5)); + /// + /// // neither are found + /// let x: &[_] = &['1', '2']; + /// assert_eq!(s.find(x), None); + /// ``` + #[unstable = "awaiting pattern/matcher stabilization"] + fn find(&self, search: C) -> Option { + core_str::StrExt::find(self[], search) + } + + /// Returns the byte index of the last character of `self` that + /// matches `search`. + /// + /// # Return value + /// + /// `Some` containing the byte index of the last matching character + /// or `None` if there is no match. + /// + /// # Example + /// + /// ```rust + /// let s = "Löwe 老虎 Léopard"; + /// + /// assert_eq!(s.rfind('L'), Some(13)); + /// assert_eq!(s.rfind('é'), Some(14)); + /// + /// // the second space + /// assert_eq!(s.rfind(|&: c: char| c.is_whitespace()), Some(12)); + /// + /// // searches for an occurrence of either `1` or `2`, but neither are found + /// let x: &[_] = &['1', '2']; + /// assert_eq!(s.rfind(x), None); + /// ``` + #[unstable = "awaiting pattern/matcher stabilization"] + fn rfind(&self, search: C) -> Option { + core_str::StrExt::rfind(self[], search) + } + + /// Returns the byte index of the first matching substring + /// + /// # Arguments + /// + /// * `needle` - The string to search for + /// + /// # Return value + /// + /// `Some` containing the byte index of the first matching substring + /// or `None` if there is no match. + /// + /// # Example + /// + /// ```rust + /// let s = "Löwe 老虎 Léopard"; + /// + /// assert_eq!(s.find_str("老虎 L"), Some(6)); + /// assert_eq!(s.find_str("muffin man"), None); + /// ``` + #[unstable = "awaiting pattern/matcher stabilization"] + fn find_str(&self, needle: &str) -> Option { + core_str::StrExt::find_str(self[], needle) + } + + /// Retrieves the first character from a string slice and returns + /// it. This does not allocate a new string; instead, it returns a + /// slice that point one character beyond the character that was + /// shifted. If the string does not contain any characters, + /// None is returned instead. + /// + /// # Example + /// + /// ```rust + /// let s = "Löwe 老虎 Léopard"; + /// let (c, s1) = s.slice_shift_char().unwrap(); + /// assert_eq!(c, 'L'); + /// assert_eq!(s1, "öwe 老虎 Léopard"); + /// + /// let (c, s2) = s1.slice_shift_char().unwrap(); + /// assert_eq!(c, 'ö'); + /// assert_eq!(s2, "we 老虎 Léopard"); + /// ``` + #[unstable = "awaiting conventions about shifting and slices"] + fn slice_shift_char(&self) -> Option<(char, &str)> { + core_str::StrExt::slice_shift_char(self[]) + } + + /// Returns the byte offset of an inner slice relative to an enclosing outer slice. + /// + /// Panics if `inner` is not a direct slice contained within self. + /// + /// # Example + /// + /// ```rust + /// let string = "a\nb\nc"; + /// let lines: Vec<&str> = string.lines().collect(); + /// + /// assert!(string.subslice_offset(lines[0]) == 0); // &"a" + /// assert!(string.subslice_offset(lines[1]) == 2); // &"b" + /// assert!(string.subslice_offset(lines[2]) == 4); // &"c" + /// ``` + #[unstable = "awaiting pattern/matcher stabilization"] + fn subslice_offset(&self, inner: &str) -> uint { + core_str::StrExt::subslice_offset(self[], inner) + } + + /// Return an unsafe pointer to the strings buffer. + /// + /// The caller must ensure that the string outlives this pointer, + /// and that it is not reallocated (e.g. by pushing to the + /// string). + #[stable] #[inline] - fn into_string(self) -> String { - String::from_str(self) + fn as_ptr(&self) -> *const u8 { + core_str::StrExt::as_ptr(self[]) + } + + /// Return an iterator of `u16` over the string encoded as UTF-16. + #[unstable = "this functionality may only be provided by libunicode"] + fn utf16_units(&self) -> Utf16Units { + Utf16Units { encoder: Utf16Encoder::new(self[].chars()) } + } + + /// Return the number of bytes in this string + /// + /// # Example + /// + /// ``` + /// assert_eq!("foo".len(), 3); + /// assert_eq!("ƒoo".len(), 4); + /// ``` + #[stable] + #[inline] + fn len(&self) -> uint { + core_str::StrExt::len(self[]) + } + + /// Returns true if this slice contains no bytes + /// + /// # Example + /// + /// ``` + /// assert!("".is_empty()); + /// ``` + #[inline] + #[stable] + fn is_empty(&self) -> bool { + core_str::StrExt::is_empty(self[]) + } + + /// Parse this string into the specified type. + /// + /// # Example + /// + /// ``` + /// assert_eq!("4".parse::(), Some(4)); + /// assert_eq!("j".parse::(), None); + /// ``` + #[inline] + #[unstable = "this method was just created"] + fn parse(&self) -> Option { + FromStr::from_str(self[]) + } + + /// Returns an iterator over the + /// [grapheme clusters](http://www.unicode.org/reports/tr29/#Grapheme_Cluster_Boundaries) + /// of the string. + /// + /// If `is_extended` is true, the iterator is over the *extended grapheme clusters*; + /// otherwise, the iterator is over the *legacy grapheme clusters*. + /// [UAX#29](http://www.unicode.org/reports/tr29/#Grapheme_Cluster_Boundaries) + /// recommends extended grapheme cluster boundaries for general processing. + /// + /// # Example + /// + /// ```rust + /// let gr1 = "a\u{310}e\u{301}o\u{308}\u{332}".graphemes(true).collect::>(); + /// let b: &[_] = &["a\u{310}", "e\u{301}", "o\u{308}\u{332}"]; + /// assert_eq!(gr1.as_slice(), b); + /// let gr2 = "a\r\nb🇷🇺🇸🇹".graphemes(true).collect::>(); + /// let b: &[_] = &["a", "\r\n", "b", "🇷🇺🇸🇹"]; + /// assert_eq!(gr2.as_slice(), b); + /// ``` + #[unstable = "this functionality may only be provided by libunicode"] + fn graphemes(&self, is_extended: bool) -> Graphemes { + UnicodeStr::graphemes(self[], is_extended) + } + + /// Returns an iterator over the grapheme clusters of self and their byte offsets. + /// See `graphemes()` method for more information. + /// + /// # Example + /// + /// ```rust + /// let gr_inds = "a̐éö̲\r\n".grapheme_indices(true).collect::>(); + /// let b: &[_] = &[(0u, "a̐"), (3, "é"), (6, "ö̲"), (11, "\r\n")]; + /// assert_eq!(gr_inds.as_slice(), b); + /// ``` + #[unstable = "this functionality may only be provided by libunicode"] + fn grapheme_indices(&self, is_extended: bool) -> GraphemeIndices { + UnicodeStr::grapheme_indices(self[], is_extended) + } + + /// An iterator over the words of a string (subsequences separated + /// by any sequence of whitespace). Sequences of whitespace are + /// collapsed, so empty "words" are not included. + /// + /// # Example + /// + /// ```rust + /// let some_words = " Mary had\ta little \n\t lamb"; + /// let v: Vec<&str> = some_words.words().collect(); + /// assert_eq!(v, vec!["Mary", "had", "a", "little", "lamb"]); + /// ``` + #[stable] + fn words(&self) -> Words { + UnicodeStr::words(self[]) + } + + /// Returns true if the string contains only whitespace. + /// + /// Whitespace characters are determined by `char::is_whitespace`. + /// + /// # Example + /// + /// ```rust + /// # #![allow(deprecated)] + /// assert!(" \t\n".is_whitespace()); + /// assert!("".is_whitespace()); + /// + /// assert!( !"abc".is_whitespace()); + /// ``` + #[deprecated = "use .chars().all(|c| c.is_whitespace())"] + fn is_whitespace(&self) -> bool { + UnicodeStr::is_whitespace(self[]) + } + + /// Returns true if the string contains only alphanumeric code + /// points. + /// + /// Alphanumeric characters are determined by `char::is_alphanumeric`. + /// + /// # Example + /// + /// ```rust + /// # #![allow(deprecated)] + /// assert!("Löwe老虎Léopard123".is_alphanumeric()); + /// assert!("".is_alphanumeric()); + /// + /// assert!( !" &*~".is_alphanumeric()); + /// ``` + #[deprecated = "use .chars().all(|c| c.is_alphanumeric())"] + fn is_alphanumeric(&self) -> bool { + UnicodeStr::is_alphanumeric(self[]) + } + + /// Returns a string's displayed width in columns, treating control + /// characters as zero-width. + /// + /// `is_cjk` determines behavior for characters in the Ambiguous category: + /// if `is_cjk` is `true`, these are 2 columns wide; otherwise, they are 1. + /// In CJK locales, `is_cjk` should be `true`, else it should be `false`. + /// [Unicode Standard Annex #11](http://www.unicode.org/reports/tr11/) + /// recommends that these characters be treated as 1 column (i.e., + /// `is_cjk` = `false`) if the locale is unknown. + #[unstable = "this functionality may only be provided by libunicode"] + fn width(&self, is_cjk: bool) -> uint { + UnicodeStr::width(self[], is_cjk) + } + + /// Returns a string with leading and trailing whitespace removed. + #[stable] + fn trim(&self) -> &str { + UnicodeStr::trim(self[]) + } + + /// Returns a string with leading whitespace removed. + #[stable] + fn trim_left(&self) -> &str { + UnicodeStr::trim_left(self[]) + } + + /// Returns a string with trailing whitespace removed. + #[stable] + fn trim_right(&self) -> &str { + UnicodeStr::trim_right(self[]) + } + + /// Deprecated, call `.to_owned()` instead from the `std::borrow::ToOwned` + /// trait. + #[deprecated = "call `.to_owned()` on `std::borrow::ToOwned` instead"] + fn into_string(&self) -> String { + self[].to_owned() } } +impl StrExt for str {} + #[cfg(test)] mod tests { use std::iter::AdditiveIterator; @@ -841,26 +1752,20 @@ mod tests { use std::clone::Clone; use std::cmp::{Ord, PartialOrd, Equiv}; use std::cmp::Ordering::{Equal, Greater, Less}; - use std::option::Option; - use std::option::Option::{Some, None}; + use std::option::Option::{mod, Some, None}; + use std::result::Result::{Ok, Err}; use std::ptr::RawPtr; use std::iter::{Iterator, IteratorExt, DoubleEndedIteratorExt}; use super::*; + use super::MaybeOwned::{Owned, Slice}; use std::slice::{AsSlice, SliceExt}; - use string::String; + use string::{String, ToString}; use vec::Vec; use slice::CloneSliceExt; use unicode::char::UnicodeChar; - #[test] - fn test_eq_slice() { - assert!((eq_slice("foobar".slice(0, 3), "foo"))); - assert!((eq_slice("barfoo".slice(3, 6), "foo"))); - assert!((!eq_slice("foo1", "foo2"))); - } - #[test] fn test_le() { assert!("" <= ""); @@ -1390,6 +2295,7 @@ mod tests { #[test] fn test_is_utf16() { + use unicode::str::is_utf16; macro_rules! pos ( ($($e:expr),*) => { { $(assert!(is_utf16($e));)* } }); // non-surrogates @@ -1551,28 +2457,6 @@ mod tests { assert!(!"".contains_char('a')); } - #[test] - fn test_truncate_utf16_at_nul() { - let v = []; - let b: &[u16] = &[]; - assert_eq!(truncate_utf16_at_nul(&v), b); - - let v = [0, 2, 3]; - assert_eq!(truncate_utf16_at_nul(&v), b); - - let v = [1, 0, 3]; - let b: &[u16] = &[1]; - assert_eq!(truncate_utf16_at_nul(&v), b); - - let v = [1, 2, 0]; - let b: &[u16] = &[1, 2]; - assert_eq!(truncate_utf16_at_nul(&v), b); - - let v = [1, 2, 3]; - let b: &[u16] = &[1, 2, 3]; - assert_eq!(truncate_utf16_at_nul(&v), b); - } - #[test] fn test_char_at() { let s = "ศไทย中华Việt Nam"; @@ -1824,27 +2708,6 @@ mod tests { assert_eq!(words, vec!["Märy", "häd", "ä", "little", "lämb", "Little", "lämb"]) } - #[test] - fn test_lev_distance() { - use std::char::{ from_u32, MAX }; - // Test bytelength agnosticity - for c in range(0u32, MAX as u32) - .filter_map(|i| from_u32(i)) - .map(|i| String::from_char(1, i)) { - assert_eq!(c[].lev_distance(c[]), 0); - } - - let a = "\nMäry häd ä little lämb\n\nLittle lämb\n"; - let b = "\nMary häd ä little lämb\n\nLittle lämb\n"; - let c = "Mary häd ä little lämb\n\nLittle lämb\n"; - assert_eq!(a.lev_distance(b), 1); - assert_eq!(b.lev_distance(a), 1); - assert_eq!(a.lev_distance(c), 2); - assert_eq!(c.lev_distance(a), 2); - assert_eq!(b.lev_distance(c), 1); - assert_eq!(c.lev_distance(b), 1); - } - #[test] fn test_nfd_chars() { macro_rules! t { @@ -1936,7 +2799,7 @@ mod tests { #[test] fn test_graphemes() { - use std::iter::order; + use core::iter::order; // official Unicode test data // from http://www.unicode.org/Public/UCD/latest/ucd/auxiliary/GraphemeBreakTest.txt let test_same: [(_, &[_]), .. 325] = [ @@ -2367,7 +3230,7 @@ mod tests { #[test] fn test_str_default() { - use std::default::Default; + use core::default::Default; fn t() { let s: S = Default::default(); assert_eq!(s.as_slice(), ""); @@ -2395,13 +3258,13 @@ mod tests { #[test] fn test_str_from_utf8() { let xs = b"hello"; - assert_eq!(from_utf8(xs), Some("hello")); + assert_eq!(from_utf8(xs), Ok("hello")); let xs = "ศไทย中华Việt Nam".as_bytes(); - assert_eq!(from_utf8(xs), Some("ศไทย中华Việt Nam")); + assert_eq!(from_utf8(xs), Ok("ศไทย中华Việt Nam")); let xs = b"hello\xFF"; - assert_eq!(from_utf8(xs), None); + assert_eq!(from_utf8(xs), Err(Utf8Error::TooShort)); } #[test] @@ -2450,8 +3313,8 @@ mod tests { #[test] fn test_maybe_owned_into_string() { - assert_eq!(Slice("abcde").into_string(), String::from_str("abcde")); - assert_eq!(Owned(String::from_str("abcde")).into_string(), + assert_eq!(Slice("abcde").to_string(), String::from_str("abcde")); + assert_eq!(Owned(String::from_str("abcde")).to_string(), String::from_str("abcde")); } @@ -2467,12 +3330,10 @@ mod tests { #[cfg(test)] mod bench { + use prelude::*; use test::Bencher; use test::black_box; use super::*; - use std::iter::{IteratorExt, DoubleEndedIteratorExt}; - use std::str::StrPrelude; - use std::slice::SliceExt; #[bench] fn char_iterator(b: &mut Bencher) { diff --git a/src/libcollections/string.rs b/src/libcollections/string.rs index 38ebd686ddbdd..6c2659b13f720 100644 --- a/src/libcollections/string.rs +++ b/src/libcollections/string.rs @@ -21,12 +21,12 @@ use core::hash; use core::mem; use core::ptr; use core::ops; -// FIXME: ICE's abound if you import the `Slice` type while importing `Slice` trait use core::raw::Slice as RawSlice; +use unicode::str as unicode_str; +use unicode::str::Utf16Item; use slice::CloneSliceExt; -use str; -use str::{CharRange, CowString, FromStr, StrAllocating, Owned}; +use str::{mod, CharRange, FromStr, Utf8Error}; use vec::{DerefVec, Vec, as_vec}; /// A growable string stored as a UTF-8 encoded buffer. @@ -86,27 +86,31 @@ impl String { /// Returns the vector as a string buffer, if possible, taking care not to /// copy it. /// - /// Returns `Err` with the original vector if the vector contains invalid - /// UTF-8. + /// # Failure + /// + /// If the given vector is not valid UTF-8, then the original vector and the + /// corresponding error is returned. /// /// # Examples /// /// ```rust + /// # #![allow(deprecated)] + /// use std::str::Utf8Error; + /// /// let hello_vec = vec![104, 101, 108, 108, 111]; /// let s = String::from_utf8(hello_vec); /// assert_eq!(s, Ok("hello".to_string())); /// /// let invalid_vec = vec![240, 144, 128]; /// let s = String::from_utf8(invalid_vec); - /// assert_eq!(s, Err(vec![240, 144, 128])); + /// assert_eq!(s, Err((vec![240, 144, 128], Utf8Error::TooShort))); /// ``` #[inline] #[unstable = "error type may change"] - pub fn from_utf8(vec: Vec) -> Result> { - if str::is_utf8(vec.as_slice()) { - Ok(String { vec: vec }) - } else { - Err(vec) + pub fn from_utf8(vec: Vec) -> Result, Utf8Error)> { + match str::from_utf8(vec.as_slice()) { + Ok(..) => Ok(String { vec: vec }), + Err(e) => Err((vec, e)) } } @@ -122,8 +126,9 @@ impl String { /// ``` #[unstable = "return type may change"] pub fn from_utf8_lossy<'a>(v: &'a [u8]) -> CowString<'a> { - if str::is_utf8(v) { - return Cow::Borrowed(unsafe { mem::transmute(v) }) + match str::from_utf8(v) { + Ok(s) => return Cow::Borrowed(s), + Err(..) => {} } static TAG_CONT_U8: u8 = 128u8; @@ -172,7 +177,7 @@ impl String { if byte < 128u8 { // subseqidx handles this } else { - let w = str::utf8_char_width(byte); + let w = unicode_str::utf8_char_width(byte); match w { 2 => { @@ -234,7 +239,7 @@ impl String { res.as_mut_vec().push_all(v[subseqidx..total]) }; } - Cow::Owned(res.into_string()) + Cow::Owned(res) } /// Decode a UTF-16 encoded vector `v` into a `String`, returning `None` @@ -255,10 +260,10 @@ impl String { #[unstable = "error value in return may change"] pub fn from_utf16(v: &[u16]) -> Option { let mut s = String::with_capacity(v.len()); - for c in str::utf16_items(v) { + for c in unicode_str::utf16_items(v) { match c { - str::ScalarValue(c) => s.push(c), - str::LoneSurrogate(_) => return None + Utf16Item::ScalarValue(c) => s.push(c), + Utf16Item::LoneSurrogate(_) => return None } } Some(s) @@ -280,7 +285,7 @@ impl String { /// ``` #[stable] pub fn from_utf16_lossy(v: &[u16]) -> String { - str::utf16_items(v).map(|c| c.to_char_lossy()).collect() + unicode_str::utf16_items(v).map(|c| c.to_char_lossy()).collect() } /// Convert a vector of `char`s to a `String`. @@ -512,6 +517,11 @@ impl String { #[inline] #[stable = "function just renamed from push_char"] pub fn push(&mut self, ch: char) { + if (ch as u32) < 0x80 { + self.vec.push(ch as u8); + return; + } + let cur_len = self.len(); // This may use up to 4 bytes. self.vec.reserve(4); @@ -806,21 +816,12 @@ impl<'a, 'b> PartialEq> for &'b str { } #[experimental = "waiting on Str stabilization"] +#[allow(deprecated)] impl Str for String { #[inline] #[stable] fn as_slice<'a>(&'a self) -> &'a str { - unsafe { - mem::transmute(self.vec.as_slice()) - } - } -} - -#[experimental = "waiting on StrAllocating stabilization"] -impl StrAllocating for String { - #[inline] - fn into_string(self) -> String { - self + unsafe { mem::transmute(self.vec.as_slice()) } } } @@ -835,7 +836,7 @@ impl Default for String { #[experimental = "waiting on Show stabilization"] impl fmt::Show for String { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - self.as_slice().fmt(f) + (**self).fmt(f) } } @@ -843,7 +844,7 @@ impl fmt::Show for String { impl hash::Hash for String { #[inline] fn hash(&self, hasher: &mut H) { - self.as_slice().hash(hasher) + (**self).hash(hasher) } } @@ -856,28 +857,7 @@ impl<'a, S: Str> Equiv for String { } } -// NOTE(stage0): Remove impl after a snapshot -#[cfg(stage0)] #[experimental = "waiting on Add stabilization"] -impl Add for String { - /// Concatenates `self` and `other` as a new mutable `String`. - /// - /// # Examples - /// - /// ``` - /// let string1 = "foo".to_string(); - /// let string2 = "bar".to_string(); - /// let string3 = string1 + string2; - /// assert_eq!(string3, "foobar".to_string()); - /// ``` - fn add(&self, other: &S) -> String { - let mut s = String::from_str(self.as_slice()); - s.push_str(other.as_slice()); - return s; - } -} - -#[cfg(not(stage0))] // NOTE(stage0): Remove cfg after a snapshot impl<'a> Add<&'a str, String> for String { fn add(mut self, other: &str) -> String { self.push_str(other); @@ -885,18 +865,10 @@ impl<'a> Add<&'a str, String> for String { } } -#[cfg(not(stage0))] // NOTE(stage0): Remove cfg after a snapshot -impl<'a> Add for &'a str { - fn add(self, mut other: String) -> String { - other.push_str(self); - other - } -} - impl ops::Slice for String { #[inline] fn as_slice_<'a>(&'a self) -> &'a str { - self.as_slice() + unsafe { mem::transmute(self.vec.as_slice()) } } #[inline] @@ -917,7 +889,9 @@ impl ops::Slice for String { #[experimental = "waiting on Deref stabilization"] impl ops::Deref for String { - fn deref<'a>(&'a self) -> &'a str { self.as_slice() } + fn deref<'a>(&'a self) -> &'a str { + unsafe { mem::transmute(self.vec[]) } + } } /// Wrapper type providing a `&String` reference via `Deref`. @@ -1038,16 +1012,26 @@ pub mod raw { } } +/// A clone-on-write string +#[stable] +pub type CowString<'a> = Cow<'a, String, str>; + +#[allow(deprecated)] +impl<'a> Str for CowString<'a> { + #[inline] + fn as_slice<'b>(&'b self) -> &'b str { + (**self).as_slice() + } +} + #[cfg(test)] mod tests { - use std::prelude::*; + use prelude::*; use test::Bencher; - use slice::CloneSliceExt; - use str::{Str, StrPrelude}; + use str::{StrExt, Utf8Error}; use str; - use super::{as_string, String, ToString}; - use vec::Vec; + use super::as_string; #[test] fn test_as_string() { @@ -1064,14 +1048,16 @@ mod tests { #[test] fn test_from_utf8() { let xs = b"hello".to_vec(); - assert_eq!(String::from_utf8(xs), Ok(String::from_str("hello"))); + assert_eq!(String::from_utf8(xs), + Ok(String::from_str("hello"))); let xs = "ศไทย中华Việt Nam".as_bytes().to_vec(); - assert_eq!(String::from_utf8(xs), Ok(String::from_str("ศไทย中华Việt Nam"))); + assert_eq!(String::from_utf8(xs), + Ok(String::from_str("ศไทย中华Việt Nam"))); let xs = b"hello\xFF".to_vec(); assert_eq!(String::from_utf8(xs), - Err(b"hello\xFF".to_vec())); + Err((b"hello\xFF".to_vec(), Utf8Error::TooShort))); } #[test] @@ -1161,7 +1147,7 @@ mod tests { let s_as_utf16 = s.utf16_units().collect::>(); let u_as_string = String::from_utf16(u.as_slice()).unwrap(); - assert!(str::is_utf16(u.as_slice())); + assert!(::unicode::str::is_utf16(u.as_slice())); assert_eq!(s_as_utf16, u); assert_eq!(u_as_string, s); @@ -1411,6 +1397,41 @@ mod tests { }); } + const REPETITIONS: u64 = 10_000; + + #[bench] + fn bench_push_str_one_byte(b: &mut Bencher) { + b.bytes = REPETITIONS; + b.iter(|| { + let mut r = String::new(); + for _ in range(0, REPETITIONS) { + r.push_str("a") + } + }); + } + + #[bench] + fn bench_push_char_one_byte(b: &mut Bencher) { + b.bytes = REPETITIONS; + b.iter(|| { + let mut r = String::new(); + for _ in range(0, REPETITIONS) { + r.push('a') + } + }); + } + + #[bench] + fn bench_push_char_two_bytes(b: &mut Bencher) { + b.bytes = REPETITIONS * 2; + b.iter(|| { + let mut r = String::new(); + for _ in range(0, REPETITIONS) { + r.push('â') + } + }); + } + #[bench] fn from_utf8_lossy_100_ascii(b: &mut Bencher) { let s = b"Hello there, the quick brown fox jumped over the lazy dog! \ diff --git a/src/libcollections/vec.rs b/src/libcollections/vec.rs index e986b2044306f..fa0e4a2340e2a 100644 --- a/src/libcollections/vec.rs +++ b/src/libcollections/vec.rs @@ -53,8 +53,8 @@ use core::cmp::max; use core::default::Default; use core::fmt; use core::hash::{mod, Hash}; +use core::iter::repeat; use core::kinds::marker::{ContravariantLifetime, InvariantType}; -use core::kinds::Sized; use core::mem; use core::num::{Int, UnsignedInt}; use core::ops; @@ -412,6 +412,33 @@ impl Vec { } } + /// Resizes the `Vec` in-place so that `len()` is equal to `new_len`. + /// + /// Calls either `extend()` or `truncate()` depending on whether `new_len` + /// is larger than the current value of `len()` or not. + /// + /// # Examples + /// + /// ``` + /// let mut vec = vec!["hello"]; + /// vec.resize(3, "world"); + /// assert_eq!(vec, vec!["hello", "world", "world"]); + /// + /// let mut vec = vec![1i, 2, 3, 4]; + /// vec.resize(2, 0); + /// assert_eq!(vec, vec![1, 2]); + /// ``` + #[unstable = "matches collection reform specification; waiting for dust to settle"] + pub fn resize(&mut self, new_len: uint, value: T) { + let len = self.len(); + + if new_len > len { + self.extend(repeat(value).take(new_len - len)); + } else { + self.truncate(new_len); + } + } + /// Partitions a vector based on a predicate. /// /// Clones the elements of the vector, partitioning them into two `Vec`s @@ -443,7 +470,7 @@ impl Vec { } } -#[unstable] +#[stable] impl Clone for Vec { fn clone(&self) -> Vec { self.as_slice().to_vec() } @@ -861,7 +888,7 @@ impl Vec { /// ``` #[inline] #[unstable = "matches collection reform specification, waiting for dust to settle"] - pub fn into_iter(self) -> MoveItems { + pub fn into_iter(self) -> IntoIter { unsafe { let ptr = self.ptr; let cap = self.cap; @@ -872,7 +899,7 @@ impl Vec { ptr.offset(self.len() as int) as *const T }; mem::forget(self); - MoveItems { allocation: ptr, cap: cap, ptr: begin, end: end } + IntoIter { allocation: ptr, cap: cap, ptr: begin, end: end } } } @@ -1118,6 +1145,38 @@ impl Vec { } } + /// Creates a draining iterator that clears the `Vec` and iterates over + /// the removed items from start to end. + /// + /// # Examples + /// + /// ``` + /// let mut v = vec!["a".to_string(), "b".to_string()]; + /// for s in v.drain() { + /// // s has type String, not &String + /// println!("{}", s); + /// } + /// assert!(v.is_empty()); + /// ``` + #[inline] + #[unstable = "matches collection reform specification, waiting for dust to settle"] + pub fn drain<'a>(&'a mut self) -> Drain<'a, T> { + unsafe { + let begin = self.ptr as *const T; + let end = if mem::size_of::() == 0 { + (self.ptr as uint + self.len()) as *const T + } else { + self.ptr.offset(self.len() as int) as *const T + }; + self.set_len(0); + Drain { + ptr: begin, + end: end, + marker: ContravariantLifetime, + } + } + } + /// Clears the vector, removing all values. /// /// # Examples @@ -1303,20 +1362,6 @@ impl AsSlice for Vec { } } -// NOTE(stage0): Remove impl after a snapshot -#[cfg(stage0)] -impl> Add> for Vec { - #[inline] - fn add(&self, rhs: &V) -> Vec { - let mut res = Vec::with_capacity(self.len() + rhs.as_slice().len()); - res.push_all(self.as_slice()); - res.push_all(rhs.as_slice()); - res - } -} - - -#[cfg(not(stage0))] // NOTE(stage0): Remove impl after a snapshot impl<'a, T: Clone> Add<&'a [T], Vec> for Vec { #[inline] fn add(mut self, rhs: &[T]) -> Vec { @@ -1325,15 +1370,6 @@ impl<'a, T: Clone> Add<&'a [T], Vec> for Vec { } } -#[cfg(not(stage0))] // NOTE(stage0): Remove impl after a snapshot -impl<'a, T: Clone> Add, Vec> for &'a [T] { - #[inline] - fn add(self, mut rhs: Vec) -> Vec { - rhs.push_all(self); - rhs - } -} - #[unsafe_destructor] impl Drop for Vec { fn drop(&mut self) { @@ -1366,31 +1402,32 @@ impl fmt::Show for Vec { } /// An iterator that moves out of a vector. -pub struct MoveItems { +pub struct IntoIter { allocation: *mut T, // the block of memory allocated for the vector cap: uint, // the capacity of the vector ptr: *const T, end: *const T } -impl MoveItems { - #[inline] +impl IntoIter { /// Drops all items that have not yet been moved and returns the empty vector. + #[inline] + #[unstable] pub fn into_inner(mut self) -> Vec { unsafe { for _x in self { } - let MoveItems { allocation, cap, ptr: _ptr, end: _end } = self; + let IntoIter { allocation, cap, ptr: _ptr, end: _end } = self; mem::forget(self); Vec { ptr: allocation, cap: cap, len: 0 } } } - /// Deprecated, use into_inner() instead - #[deprecated = "renamed to into_inner()"] + /// Deprecated, use .into_inner() instead + #[deprecated = "use .into_inner() instead"] pub fn unwrap(self) -> Vec { self.into_inner() } } -impl Iterator for MoveItems { +impl Iterator for IntoIter { #[inline] fn next<'a>(&'a mut self) -> Option { unsafe { @@ -1424,7 +1461,7 @@ impl Iterator for MoveItems { } } -impl DoubleEndedIterator for MoveItems { +impl DoubleEndedIterator for IntoIter { #[inline] fn next_back<'a>(&'a mut self) -> Option { unsafe { @@ -1447,10 +1484,10 @@ impl DoubleEndedIterator for MoveItems { } } -impl ExactSizeIterator for MoveItems {} +impl ExactSizeIterator for IntoIter {} #[unsafe_destructor] -impl Drop for MoveItems { +impl Drop for IntoIter { fn drop(&mut self) { // destroy the remaining elements if self.cap != 0 { @@ -1462,6 +1499,84 @@ impl Drop for MoveItems { } } +/// An iterator that drains a vector. +#[unsafe_no_drop_flag] +pub struct Drain<'a, T> { + ptr: *const T, + end: *const T, + marker: ContravariantLifetime<'a>, +} + +impl<'a, T> Iterator for Drain<'a, T> { + #[inline] + fn next(&mut self) -> Option { + unsafe { + if self.ptr == self.end { + None + } else { + if mem::size_of::() == 0 { + // purposefully don't use 'ptr.offset' because for + // vectors with 0-size elements this would return the + // same pointer. + self.ptr = mem::transmute(self.ptr as uint + 1); + + // Use a non-null pointer value + Some(ptr::read(mem::transmute(1u))) + } else { + let old = self.ptr; + self.ptr = self.ptr.offset(1); + + Some(ptr::read(old)) + } + } + } + } + + #[inline] + fn size_hint(&self) -> (uint, Option) { + let diff = (self.end as uint) - (self.ptr as uint); + let size = mem::size_of::(); + let exact = diff / (if size == 0 {1} else {size}); + (exact, Some(exact)) + } +} + +impl<'a, T> DoubleEndedIterator for Drain<'a, T> { + #[inline] + fn next_back(&mut self) -> Option { + unsafe { + if self.end == self.ptr { + None + } else { + if mem::size_of::() == 0 { + // See above for why 'ptr.offset' isn't used + self.end = mem::transmute(self.end as uint - 1); + + // Use a non-null pointer value + Some(ptr::read(mem::transmute(1u))) + } else { + self.end = self.end.offset(-1); + + Some(ptr::read(self.end)) + } + } + } + } +} + +impl<'a, T> ExactSizeIterator for Drain<'a, T> {} + +#[unsafe_destructor] +impl<'a, T> Drop for Drain<'a, T> { + fn drop(&mut self) { + // self.ptr == self.end == null if drop has already been called, + // so we can use #[unsafe_no_drop_flag]. + + // destroy the remaining elements + for _x in *self {} + } +} + /// Converts an iterator of pairs into a pair of vectors. /// /// Returns a tuple containing two vectors where the i-th element of the first vector contains the @@ -1806,12 +1921,10 @@ impl<'a> fmt::FormatWriter for Vec { #[cfg(test)] mod tests { - extern crate test; - - use std::prelude::*; - use std::mem::size_of; + use prelude::*; + use core::mem::size_of; use test::Bencher; - use super::{as_vec, unzip, raw, Vec}; + use super::{as_vec, unzip, raw}; struct DropCounter<'a> { count: &'a mut int @@ -2270,6 +2383,39 @@ mod tests { assert!(vec2 == vec![(), (), ()]); } + #[test] + fn test_drain_items() { + let mut vec = vec![1, 2, 3]; + let mut vec2: Vec = vec![]; + for i in vec.drain() { + vec2.push(i); + } + assert_eq!(vec, []); + assert_eq!(vec2, [ 1, 2, 3 ]); + } + + #[test] + fn test_drain_items_reverse() { + let mut vec = vec![1, 2, 3]; + let mut vec2: Vec = vec![]; + for i in vec.drain().rev() { + vec2.push(i); + } + assert_eq!(vec, []); + assert_eq!(vec2, [ 3, 2, 1 ]); + } + + #[test] + fn test_drain_items_zero_sized() { + let mut vec = vec![(), (), ()]; + let mut vec2: Vec<()> = vec![]; + for i in vec.drain() { + vec2.push(i); + } + assert_eq!(vec, []); + assert_eq!(vec2, [(), (), ()]); + } + #[test] fn test_into_boxed_slice() { let xs = vec![1u, 2, 3]; diff --git a/src/libcollections/vec_map.rs b/src/libcollections/vec_map.rs index 8faa9c1c522db..207e27ccdccc3 100644 --- a/src/libcollections/vec_map.rs +++ b/src/libcollections/vec_map.rs @@ -21,7 +21,6 @@ use core::hash::{Hash, Writer}; use core::iter; use core::iter::{Enumerate, FilterMap, Map}; use core::mem::replace; -use core::ops::FnOnce; use {vec, slice}; use vec::Vec; @@ -145,6 +144,7 @@ impl VecMap { #[unstable = "matches collection reform specification, waiting for dust to settle"] pub fn keys<'r>(&'r self) -> Keys<'r, V> { fn first((a, _): (A, B)) -> A { a } + let first: fn((uint, &'r V)) -> uint = first; // coerce to fn pointer Keys { iter: self.iter().map(first) } } @@ -154,6 +154,7 @@ impl VecMap { #[unstable = "matches collection reform specification, waiting for dust to settle"] pub fn values<'r>(&'r self) -> Values<'r, V> { fn second((_, b): (A, B)) -> B { b } + let second: fn((uint, &'r V)) -> &'r V = second; // coerce to fn pointer Values { iter: self.iter().map(second) } } @@ -177,8 +178,8 @@ impl VecMap { /// } /// ``` #[unstable = "matches collection reform specification, waiting for dust to settle"] - pub fn iter<'r>(&'r self) -> Entries<'r, V> { - Entries { + pub fn iter<'r>(&'r self) -> Iter<'r, V> { + Iter { front: 0, back: self.v.len(), iter: self.v.iter() @@ -208,8 +209,8 @@ impl VecMap { /// } /// ``` #[unstable = "matches collection reform specification, waiting for dust to settle"] - pub fn iter_mut<'r>(&'r mut self) -> MutEntries<'r, V> { - MutEntries { + pub fn iter_mut<'r>(&'r mut self) -> IterMut<'r, V> { + IterMut { front: 0, back: self.v.len(), iter: self.v.iter_mut() @@ -236,13 +237,14 @@ impl VecMap { /// assert_eq!(vec, vec![(1, "a"), (2, "b"), (3, "c")]); /// ``` #[unstable = "matches collection reform specification, waiting for dust to settle"] - pub fn into_iter(&mut self) -> MoveItems { + pub fn into_iter(&mut self) -> IntoIter { fn filter((i, v): (uint, Option)) -> Option<(uint, A)> { v.map(|v| (i, v)) } + let filter: fn((uint, Option)) -> Option<(uint, V)> = filter; // coerce to fn ptr let values = replace(&mut self.v, vec!()); - MoveItems { iter: values.into_iter().enumerate().filter_map(filter) } + IntoIter { iter: values.into_iter().enumerate().filter_map(filter) } } /// Return the number of elements in the map. @@ -606,42 +608,42 @@ macro_rules! double_ended_iterator { } /// An iterator over the key-value pairs of a map. -pub struct Entries<'a, V:'a> { +pub struct Iter<'a, V:'a> { front: uint, back: uint, - iter: slice::Items<'a, Option> + iter: slice::Iter<'a, Option> } -iterator! { impl Entries -> (uint, &'a V), as_ref } -double_ended_iterator! { impl Entries -> (uint, &'a V), as_ref } +iterator! { impl Iter -> (uint, &'a V), as_ref } +double_ended_iterator! { impl Iter -> (uint, &'a V), as_ref } /// An iterator over the key-value pairs of a map, with the /// values being mutable. -pub struct MutEntries<'a, V:'a> { +pub struct IterMut<'a, V:'a> { front: uint, back: uint, - iter: slice::MutItems<'a, Option> + iter: slice::IterMut<'a, Option> } -iterator! { impl MutEntries -> (uint, &'a mut V), as_mut } -double_ended_iterator! { impl MutEntries -> (uint, &'a mut V), as_mut } +iterator! { impl IterMut -> (uint, &'a mut V), as_mut } +double_ended_iterator! { impl IterMut -> (uint, &'a mut V), as_mut } /// An iterator over the keys of a map. pub struct Keys<'a, V: 'a> { - iter: Map<(uint, &'a V), uint, Entries<'a, V>, fn((uint, &'a V)) -> uint> + iter: Map<(uint, &'a V), uint, Iter<'a, V>, fn((uint, &'a V)) -> uint> } /// An iterator over the values of a map. pub struct Values<'a, V: 'a> { - iter: Map<(uint, &'a V), &'a V, Entries<'a, V>, fn((uint, &'a V)) -> &'a V> + iter: Map<(uint, &'a V), &'a V, Iter<'a, V>, fn((uint, &'a V)) -> &'a V> } /// A consuming iterator over the key-value pairs of a map. -pub struct MoveItems { +pub struct IntoIter { iter: FilterMap< (uint, Option), (uint, V), - Enumerate>>, + Enumerate>>, fn((uint, Option)) -> Option<(uint, V)>> } @@ -663,18 +665,17 @@ impl<'a, V> DoubleEndedIterator<&'a V> for Values<'a, V> { } -impl Iterator<(uint, V)> for MoveItems { +impl Iterator<(uint, V)> for IntoIter { fn next(&mut self) -> Option<(uint, V)> { self.iter.next() } fn size_hint(&self) -> (uint, Option) { self.iter.size_hint() } } -impl DoubleEndedIterator<(uint, V)> for MoveItems { +impl DoubleEndedIterator<(uint, V)> for IntoIter { fn next_back(&mut self) -> Option<(uint, V)> { self.iter.next_back() } } #[cfg(test)] mod test_map { - use std::prelude::*; - use vec::Vec; + use prelude::*; use core::hash::hash; use super::VecMap; @@ -1047,8 +1048,7 @@ mod test_map { #[cfg(test)] mod bench { - extern crate test; - use self::test::Bencher; + use test::Bencher; use super::VecMap; use bench::{insert_rand_n, insert_seq_n, find_rand_n, find_seq_n}; diff --git a/src/libcore/array.rs b/src/libcore/array.rs index ffaf35414ea0c..e85a132ed363f 100644 --- a/src/libcore/array.rs +++ b/src/libcore/array.rs @@ -25,7 +25,7 @@ use option::Option; macro_rules! array_impls { ($($N:expr)+) => { $( - #[unstable = "waiting for Clone to stabilize"] + #[stable] impl Clone for [T, ..$N] { fn clone(&self) -> [T, ..$N] { *self @@ -115,4 +115,3 @@ array_impls! { 20 21 22 23 24 25 26 27 28 29 30 31 32 } - diff --git a/src/libcore/borrow.rs b/src/libcore/borrow.rs index b44b87bd93807..9bbcf67773ebb 100644 --- a/src/libcore/borrow.rs +++ b/src/libcore/borrow.rs @@ -137,6 +137,7 @@ pub enum Cow<'a, T, Sized? B: 'a> where B: ToOwned { Owned(T) } +#[stable] impl<'a, T, Sized? B> Clone for Cow<'a, T, B> where B: ToOwned { fn clone(&self) -> Cow<'a, T, B> { match *self { diff --git a/src/libcore/cell.rs b/src/libcore/cell.rs index 01979e975774c..b45424a5eed3f 100644 --- a/src/libcore/cell.rs +++ b/src/libcore/cell.rs @@ -164,7 +164,7 @@ use option::Option; use option::Option::{None, Some}; /// A mutable memory location that admits only `Copy` data. -#[unstable = "likely to be renamed; otherwise stable"] +#[stable] pub struct Cell { value: UnsafeCell, noshare: marker::NoSync, @@ -208,7 +208,7 @@ impl Cell { } } -#[unstable = "waiting for `Clone` trait to become stable"] +#[stable] impl Clone for Cell { fn clone(&self) -> Cell { Cell::new(self.get()) @@ -231,7 +231,7 @@ impl PartialEq for Cell { } /// A mutable memory location with dynamically checked borrow rules -#[unstable = "likely to be renamed; otherwise stable"] +#[stable] pub struct RefCell { value: UnsafeCell, borrow: Cell, @@ -256,7 +256,7 @@ impl RefCell { } /// Consumes the `RefCell`, returning the wrapped value. - #[unstable = "recently renamed per RFC 430"] + #[stable] pub fn into_inner(self) -> T { // Since this function takes `self` (the `RefCell`) by value, the // compiler statically verifies that it is not currently borrowed. @@ -275,7 +275,7 @@ impl RefCell { /// immutable borrows can be taken out at the same time. /// /// Returns `None` if the value is currently mutably borrowed. - #[unstable = "may be renamed, depending on global conventions"] + #[unstable = "may be renamed or removed"] pub fn try_borrow<'a>(&'a self) -> Option> { match BorrowRef::new(&self.borrow) { Some(b) => Some(Ref { _value: unsafe { &*self.value.get() }, _borrow: b }), @@ -291,7 +291,7 @@ impl RefCell { /// # Panics /// /// Panics if the value is currently mutably borrowed. - #[unstable] + #[stable] pub fn borrow<'a>(&'a self) -> Ref<'a, T> { match self.try_borrow() { Some(ptr) => ptr, @@ -305,7 +305,7 @@ impl RefCell { /// cannot be borrowed while this borrow is active. /// /// Returns `None` if the value is currently borrowed. - #[unstable = "may be renamed, depending on global conventions"] + #[unstable = "may be renamed or removed"] pub fn try_borrow_mut<'a>(&'a self) -> Option> { match BorrowRefMut::new(&self.borrow) { Some(b) => Some(RefMut { _value: unsafe { &mut *self.value.get() }, _borrow: b }), @@ -321,7 +321,7 @@ impl RefCell { /// # Panics /// /// Panics if the value is currently borrowed. - #[unstable] + #[stable] pub fn borrow_mut<'a>(&'a self) -> RefMut<'a, T> { match self.try_borrow_mut() { Some(ptr) => ptr, @@ -341,7 +341,7 @@ impl RefCell { } } -#[unstable = "waiting for `Clone` to become stable"] +#[stable] impl Clone for RefCell { fn clone(&self) -> RefCell { RefCell::new(self.borrow().clone()) @@ -400,7 +400,7 @@ impl<'b> Clone for BorrowRef<'b> { } /// Wraps a borrowed reference to a value in a `RefCell` box. -#[unstable] +#[stable] pub struct Ref<'b, T:'b> { // FIXME #12808: strange name to try to avoid interfering with // field accesses of the contained type via Deref @@ -456,7 +456,7 @@ impl<'b> BorrowRefMut<'b> { } /// Wraps a mutable borrowed reference to a value in a `RefCell` box. -#[unstable] +#[stable] pub struct RefMut<'b, T:'b> { // FIXME #12808: strange name to try to avoid interfering with // field accesses of the contained type via Deref @@ -517,7 +517,7 @@ impl<'b, T> DerefMut for RefMut<'b, T> { /// is not recommended to access its fields directly, `get` should be used /// instead. #[lang="unsafe"] -#[unstable = "this type may be renamed in the future"] +#[stable] pub struct UnsafeCell { /// Wrapped value /// @@ -539,22 +539,16 @@ impl UnsafeCell { } /// Gets a mutable pointer to the wrapped value. - /// - /// This function is unsafe as the pointer returned is an unsafe pointer and - /// no guarantees are made about the aliasing of the pointers being handed - /// out in this or other tasks. #[inline] - #[unstable = "conventions around acquiring an inner reference are still \ - under development"] - pub unsafe fn get(&self) -> *mut T { &self.value as *const T as *mut T } + #[stable] + pub fn get(&self) -> *mut T { &self.value as *const T as *mut T } /// Unwraps the value /// /// This function is unsafe because there is no guarantee that this or other /// tasks are currently inspecting the inner value. #[inline] - #[unstable = "conventions around the name `unwrap` are still under \ - development"] + #[stable] pub unsafe fn into_inner(self) -> T { self.value } /// Deprecated, use into_inner() instead diff --git a/src/libcore/clone.rs b/src/libcore/clone.rs index f6be422813ac5..686ccf6f1a251 100644 --- a/src/libcore/clone.rs +++ b/src/libcore/clone.rs @@ -19,13 +19,15 @@ //! explicitly, by convention implementing the `Clone` trait and calling //! the `clone` method. -#![unstable] +#![stable] use kinds::Sized; /// A common trait for cloning an object. +#[stable] pub trait Clone { /// Returns a copy of the value. + #[stable] fn clone(&self) -> Self; /// Perform copy-assignment from `source`. @@ -34,12 +36,13 @@ pub trait Clone { /// but can be overridden to reuse the resources of `a` to avoid unnecessary /// allocations. #[inline(always)] - #[experimental = "this function is mostly unused"] + #[unstable = "this function rarely unused"] fn clone_from(&mut self, source: &Self) { *self = source.clone() } } +#[stable] impl<'a, Sized? T> Clone for &'a T { /// Return a shallow copy of the reference. #[inline] @@ -48,6 +51,7 @@ impl<'a, Sized? T> Clone for &'a T { macro_rules! clone_impl { ($t:ty) => { + #[stable] impl Clone for $t { /// Return a deep copy of the value. #[inline] @@ -95,4 +99,3 @@ extern_fn_clone! { A, B, C, D, E } extern_fn_clone! { A, B, C, D, E, F } extern_fn_clone! { A, B, C, D, E, F, G } extern_fn_clone! { A, B, C, D, E, F, G, H } - diff --git a/src/libcore/cmp.rs b/src/libcore/cmp.rs index 6e793be67e250..ca523db214b0c 100644 --- a/src/libcore/cmp.rs +++ b/src/libcore/cmp.rs @@ -41,7 +41,7 @@ #![stable] -pub use self::Ordering::*; +use self::Ordering::*; use kinds::Sized; use option::Option::{mod, Some, None}; diff --git a/src/libcore/fmt/float.rs b/src/libcore/fmt/float.rs index d849bfa24c137..9ab450efd2272 100644 --- a/src/libcore/fmt/float.rs +++ b/src/libcore/fmt/float.rs @@ -23,7 +23,7 @@ use num::cast; use ops::FnOnce; use result::Result::Ok; use slice::{mod, SliceExt}; -use str::StrPrelude; +use str::StrExt; /// A flag that specifies whether to use exponential (scientific) notation. pub enum ExponentFormat { diff --git a/src/libcore/fmt/mod.rs b/src/libcore/fmt/mod.rs index 79fb11f385433..fb26450ec758e 100644 --- a/src/libcore/fmt/mod.rs +++ b/src/libcore/fmt/mod.rs @@ -24,7 +24,7 @@ use result::Result::{Ok, Err}; use result; use slice::SliceExt; use slice; -use str::StrPrelude; +use str::{StrExt, Utf8Error}; pub use self::num::radix; pub use self::num::Radix; @@ -89,7 +89,7 @@ pub struct Formatter<'a> { precision: Option, buf: &'a mut (FormatWriter+'a), - curarg: slice::Items<'a, Argument<'a>>, + curarg: slice::Iter<'a, Argument<'a>>, args: &'a [Argument<'a>], } @@ -795,5 +795,18 @@ impl<'b, T: Show> Show for RefMut<'b, T> { } } +impl Show for Utf8Error { + fn fmt(&self, f: &mut Formatter) -> Result { + match *self { + Utf8Error::InvalidByte(n) => { + write!(f, "invalid utf-8: invalid byte at index {}", n) + } + Utf8Error::TooShort => { + write!(f, "invalid utf-8: byte slice too short") + } + } + } +} + // If you expected tests to be here, look instead at the run-pass/ifmt.rs test, // it's a lot easier than creating all of the rt::Piece structures here. diff --git a/src/libcore/hash/sip.rs b/src/libcore/hash/sip.rs index e10f5a9fed188..ab6b0986c686d 100644 --- a/src/libcore/hash/sip.rs +++ b/src/libcore/hash/sip.rs @@ -195,6 +195,7 @@ impl Writer for SipState { } } +#[stable] impl Clone for SipState { #[inline] fn clone(&self) -> SipState { @@ -271,14 +272,9 @@ pub fn hash_with_keys>(k0: u64, k1: u64, value: &T) -> #[cfg(test)] mod tests { use test::Bencher; - use std::prelude::*; + use prelude::*; use std::fmt; - use str::Str; - use string::String; - use slice::{AsSlice, SliceExt}; - use vec::Vec; - use super::super::{Hash, Writer}; use super::{SipState, hash, hash_with_keys}; diff --git a/src/libcore/intrinsics.rs b/src/libcore/intrinsics.rs index d8f103fa0f3dc..950c47c636987 100644 --- a/src/libcore/intrinsics.rs +++ b/src/libcore/intrinsics.rs @@ -222,7 +222,7 @@ extern "rust-intrinsic" { /// Both types must have the same size and alignment, and this guarantee /// is enforced at compile-time. /// - /// # Example + /// # Examples /// /// ```rust /// use std::mem; @@ -253,14 +253,20 @@ extern "rust-intrinsic" { /// integer, since the conversion would throw away aliasing information. pub fn offset(dst: *const T, offset: int) -> *const T; - /// Copies data from one location to another. - /// - /// Copies `count` elements (not bytes) from `src` to `dst`. The source + /// Copies `count * size_of` bytes from `src` to `dst`. The source /// and destination may *not* overlap. /// /// `copy_nonoverlapping_memory` is semantically equivalent to C's `memcpy`. /// - /// # Example + /// # Safety + /// + /// Beyond requiring that both regions of memory be allocated, it is Undefined Behaviour + /// for source and destination to overlap. Care must also be taken with the ownership of + /// `src` and `dst`. This method semantically moves the values of `src` into `dst`. + /// However it does not drop the contents of `dst`, or prevent the contents of `src` + /// from being dropped or used. + /// + /// # Examples /// /// A safe swap function: /// @@ -284,22 +290,22 @@ extern "rust-intrinsic" { /// } /// } /// ``` - /// - /// # Safety Note - /// - /// If the source and destination overlap then the behavior of this - /// function is undefined. #[unstable] pub fn copy_nonoverlapping_memory(dst: *mut T, src: *const T, count: uint); - /// Copies data from one location to another. - /// - /// Copies `count` elements (not bytes) from `src` to `dst`. The source + /// Copies `count * size_of` bytes from `src` to `dst`. The source /// and destination may overlap. /// /// `copy_memory` is semantically equivalent to C's `memmove`. /// - /// # Example + /// # Safety + /// + /// Care must be taken with the ownership of `src` and `dst`. + /// This method semantically moves the values of `src` into `dst`. + /// However it does not drop the contents of `dst`, or prevent the contents of `src` + /// from being dropped or used. + /// + /// # Examples /// /// Efficiently create a Rust vector from an unsafe buffer: /// diff --git a/src/libcore/iter.rs b/src/libcore/iter.rs index 1f83aad9c7cdc..1cd4d7b89d6d6 100644 --- a/src/libcore/iter.rs +++ b/src/libcore/iter.rs @@ -1386,6 +1386,7 @@ pub struct Map, F: FnMut(A) -> B> { } // FIXME(#19839) Remove in favor of `#[deriving(Clone)]` +#[stable] impl Clone for Map where I: Clone + Iterator, F: Clone + FnMut(A) -> B, @@ -1460,6 +1461,7 @@ pub struct Filter where I: Iterator, P: FnMut(&A) -> bool { } // FIXME(#19839) Remove in favor of `#[deriving(Clone)]` +#[stable] impl Clone for Filter where I: Clone + Iterator, P: Clone + FnMut(&A) -> bool, @@ -1518,6 +1520,7 @@ pub struct FilterMap where I: Iterator, F: FnMut(A) -> Option } // FIXME(#19839) Remove in favor of `#[deriving(Clone)]` +#[stable] impl Clone for FilterMap where I: Clone + Iterator, F: Clone + FnMut(A) -> Option, @@ -1693,6 +1696,7 @@ pub struct SkipWhile where I: Iterator, P: FnMut(&A) -> bool { } // FIXME(#19839) Remove in favor of `#[deriving(Clone)]` +#[stable] impl Clone for SkipWhile where I: Clone + Iterator, P: Clone + FnMut(&A) -> bool, @@ -1736,6 +1740,7 @@ pub struct TakeWhile where I: Iterator, P: FnMut(&A) -> bool { } // FIXME(#19839) Remove in favor of `#[deriving(Clone)]` +#[stable] impl Clone for TakeWhile where I: Clone + Iterator, P: Clone + FnMut(&A) -> bool, @@ -1911,6 +1916,7 @@ pub struct Scan where I: Iterator, F: FnMut(&mut St, A) -> Op } // FIXME(#19839) Remove in favor of `#[deriving(Clone)]` +#[stable] impl Clone for Scan where I: Clone + Iterator, St: Clone, @@ -1955,6 +1961,7 @@ pub struct FlatMap where I: Iterator, U: Iterator, F: FnMut } // FIXME(#19839) Remove in favor of `#[deriving(Clone)]` +#[stable] impl Clone for FlatMap where I: Clone + Iterator, U: Clone + Iterator, @@ -2115,6 +2122,7 @@ pub struct Inspect where I: Iterator, F: FnMut(&A) { } // FIXME(#19839) Remove in favor of `#[deriving(Clone)]` +#[stable] impl Clone for Inspect where I: Clone + Iterator, F: Clone + FnMut(&A), @@ -2222,6 +2230,7 @@ pub struct Unfold where F: FnMut(&mut St) -> Option { } // FIXME(#19839) Remove in favor of `#[deriving(Clone)]` +#[stable] impl Clone for Unfold where F: Clone + FnMut(&mut St) -> Option, St: Clone, @@ -2603,6 +2612,9 @@ pub fn iterate(seed: T, f: F) -> Iterate where val.clone() } + // coerce to a fn pointer + let next: fn(&mut IterateState) -> Option = next; + Unfold::new((f, Some(seed), true), next) } diff --git a/src/libcore/kinds.rs b/src/libcore/kinds.rs index 93fd3f1b9f16f..b0f46e3d68c90 100644 --- a/src/libcore/kinds.rs +++ b/src/libcore/kinds.rs @@ -91,7 +91,8 @@ pub trait Sync for Sized? { /// implemented using unsafe code. In that case, you may want to embed /// some of the marker types below into your type. pub mod marker { - use super::Copy; + use super::{Copy,Sized}; + use clone::Clone; /// A marker type whose type parameter `T` is considered to be /// covariant with respect to the type itself. This is (typically) @@ -131,10 +132,13 @@ pub mod marker { /// (for example, `S<&'static int>` is a subtype of `S<&'a int>` /// for some lifetime `'a`, but not the other way around). #[lang="covariant_type"] - #[deriving(Clone, PartialEq, Eq, PartialOrd, Ord)] - pub struct CovariantType; + #[deriving(PartialEq, Eq, PartialOrd, Ord)] + pub struct CovariantType; - impl Copy for CovariantType {} + impl Copy for CovariantType {} + impl Clone for CovariantType { + fn clone(&self) -> CovariantType { *self } + } /// A marker type whose type parameter `T` is considered to be /// contravariant with respect to the type itself. This is (typically) @@ -176,10 +180,13 @@ pub mod marker { /// function requires arguments of type `T`, it must also accept /// arguments of type `U`, hence such a conversion is safe. #[lang="contravariant_type"] - #[deriving(Clone, PartialEq, Eq, PartialOrd, Ord)] - pub struct ContravariantType; + #[deriving(PartialEq, Eq, PartialOrd, Ord)] + pub struct ContravariantType; - impl Copy for ContravariantType {} + impl Copy for ContravariantType {} + impl Clone for ContravariantType { + fn clone(&self) -> ContravariantType { *self } + } /// A marker type whose type parameter `T` is considered to be /// invariant with respect to the type itself. This is (typically) @@ -203,10 +210,13 @@ pub mod marker { /// never written, but in fact `Cell` uses unsafe code to achieve /// interior mutability. #[lang="invariant_type"] - #[deriving(Clone, PartialEq, Eq, PartialOrd, Ord)] - pub struct InvariantType; + #[deriving(PartialEq, Eq, PartialOrd, Ord)] + pub struct InvariantType; - impl Copy for InvariantType {} + impl Copy for InvariantType {} + impl Clone for InvariantType { + fn clone(&self) -> InvariantType { *self } + } /// As `CovariantType`, but for lifetime parameters. Using /// `CovariantLifetime<'a>` indicates that it is ok to substitute @@ -252,11 +262,9 @@ pub mod marker { /// and this pointer is itself stored in an inherently mutable /// location (such as a `Cell`). #[lang="invariant_lifetime"] - #[deriving(Clone, PartialEq, Eq, PartialOrd, Ord)] + #[deriving(Clone, Copy, PartialEq, Eq, PartialOrd, Ord)] pub struct InvariantLifetime<'a>; - impl<'a> Copy for InvariantLifetime<'a> {} - /// A type which is considered "not sendable", meaning that it cannot /// be safely sent between tasks, even if it is owned. This is /// typically embedded in other types, such as `Gc`, to ensure that diff --git a/src/libcore/lib.rs b/src/libcore/lib.rs index 729cb69193e63..9b6622a7127d5 100644 --- a/src/libcore/lib.rs +++ b/src/libcore/lib.rs @@ -107,7 +107,6 @@ pub mod default; pub mod any; pub mod atomic; -pub mod bool; pub mod borrow; pub mod cell; pub mod char; @@ -120,15 +119,11 @@ pub mod result; pub mod simd; pub mod slice; pub mod str; -pub mod tuple; pub mod hash; -// FIXME #15320: primitive documentation needs top-level modules, this -// should be `core::tuple::unit`. -#[path = "tuple/unit.rs"] -pub mod unit; pub mod fmt; // note: does not need to be public +mod tuple; mod array; #[doc(hidden)] diff --git a/src/libcore/num/int.rs b/src/libcore/num/int.rs index a0659d3830764..91c5e4163f9e5 100644 --- a/src/libcore/num/int.rs +++ b/src/libcore/num/int.rs @@ -10,9 +10,8 @@ //! Operations and constants for architecture-sized signed integers (`int` type) -#![unstable] +#![stable] #![doc(primitive = "int")] #[cfg(target_word_size = "32")] int_module! { int, 32 } #[cfg(target_word_size = "64")] int_module! { int, 64 } - diff --git a/src/libcore/num/int_macros.rs b/src/libcore/num/int_macros.rs index 00b9d88abe194..522eab9180c86 100644 --- a/src/libcore/num/int_macros.rs +++ b/src/libcore/num/int_macros.rs @@ -24,13 +24,12 @@ pub const BYTES : uint = ($bits / 8); // FIXME(#11621): Should be deprecated once CTFE is implemented in favour of // calling the `Bounded::min_value` function. -#[unstable] +#[stable] pub const MIN: $T = (-1 as $T) << (BITS - 1); // FIXME(#9837): Compute MIN like this so the high bits that shouldn't exist are 0. // FIXME(#11621): Should be deprecated once CTFE is implemented in favour of // calling the `Bounded::max_value` function. -#[unstable] +#[stable] pub const MAX: $T = !MIN; ) } - diff --git a/src/libcore/num/mod.rs b/src/libcore/num/mod.rs index 84d1d8e459a8c..60735879213d8 100644 --- a/src/libcore/num/mod.rs +++ b/src/libcore/num/mod.rs @@ -32,7 +32,7 @@ use ops::{Add, Sub, Mul, Div, Rem, Neg}; use ops::{Not, BitAnd, BitOr, BitXor, Shl, Shr}; use option::Option; use option::Option::{Some, None}; -use str::{FromStr, from_str, StrPrelude}; +use str::{FromStr, from_str, StrExt}; /// Simultaneous division and remainder #[inline] diff --git a/src/libcore/num/uint.rs b/src/libcore/num/uint.rs index 80d7b0b4ef374..35739f68da969 100644 --- a/src/libcore/num/uint.rs +++ b/src/libcore/num/uint.rs @@ -10,8 +10,7 @@ //! Operations and constants for architecture-sized unsigned integers (`uint` type) -#![unstable] +#![stable] #![doc(primitive = "uint")] uint_module! { uint, int, ::int::BITS } - diff --git a/src/libcore/num/uint_macros.rs b/src/libcore/num/uint_macros.rs index d79cf20fdfa6f..82eca0d465982 100644 --- a/src/libcore/num/uint_macros.rs +++ b/src/libcore/num/uint_macros.rs @@ -18,10 +18,9 @@ pub const BITS : uint = $bits; #[unstable] pub const BYTES : uint = ($bits / 8); -#[unstable] +#[stable] pub const MIN: $T = 0 as $T; -#[unstable] +#[stable] pub const MAX: $T = 0 as $T - 1 as $T; ) } - diff --git a/src/libcore/ops.rs b/src/libcore/ops.rs index 6e4beb2356e8f..e752fd11ee5b2 100644 --- a/src/libcore/ops.rs +++ b/src/libcore/ops.rs @@ -92,46 +92,6 @@ pub trait Drop { /// struct Foo; /// /// impl Add for Foo { -/// fn add(&self, _rhs: &Foo) -> Foo { -/// println!("Adding!"); -/// *self -/// } -/// } -/// -/// fn main() { -/// Foo + Foo; -/// } -/// ``` -// NOTE(stage0): Remove trait after a snapshot -#[cfg(stage0)] -#[lang="add"] -pub trait Add for Sized? { - /// The method for the `+` operator - fn add(&self, rhs: &RHS) -> Result; -} - -// NOTE(stage0): Remove macro after a snapshot -#[cfg(stage0)] -macro_rules! add_impl { - ($($t:ty)*) => ($( - impl Add<$t, $t> for $t { - #[inline] - fn add(&self, other: &$t) -> $t { (*self) + (*other) } - } - )*) -} - -/// The `Add` trait is used to specify the functionality of `+`. -/// -/// # Example -/// -/// A trivial implementation of `Add`. When `Foo + Foo` happens, it ends up -/// calling `add`, and therefore, `main` prints `Adding!`. -/// -/// ```rust -/// struct Foo; -/// -/// impl Add for Foo { /// fn add(self, _rhs: Foo) -> Foo { /// println!("Adding!"); /// self @@ -142,14 +102,12 @@ macro_rules! add_impl { /// Foo + Foo; /// } /// ``` -#[cfg(not(stage0))] // NOTE(stage0): Remove cfg after a snapshot #[lang="add"] pub trait Add { /// The method for the `+` operator fn add(self, rhs: RHS) -> Result; } -#[cfg(not(stage0))] // NOTE(stage0): Remove cfg after a snapshot macro_rules! add_impl { ($($t:ty)*) => ($( impl Add<$t, $t> for $t { @@ -173,46 +131,6 @@ add_impl! { uint u8 u16 u32 u64 int i8 i16 i32 i64 f32 f64 } /// struct Foo; /// /// impl Sub for Foo { -/// fn sub(&self, _rhs: &Foo) -> Foo { -/// println!("Subtracting!"); -/// *self -/// } -/// } -/// -/// fn main() { -/// Foo - Foo; -/// } -/// ``` -// NOTE(stage0): Remove trait after a snapshot -#[cfg(stage0)] -#[lang="sub"] -pub trait Sub for Sized? { - /// The method for the `-` operator - fn sub(&self, rhs: &RHS) -> Result; -} - -// NOTE(stage0): Remove macro after a snapshot -#[cfg(stage0)] -macro_rules! sub_impl { - ($($t:ty)*) => ($( - impl Sub<$t, $t> for $t { - #[inline] - fn sub(&self, other: &$t) -> $t { (*self) - (*other) } - } - )*) -} - -/// The `Sub` trait is used to specify the functionality of `-`. -/// -/// # Example -/// -/// A trivial implementation of `Sub`. When `Foo - Foo` happens, it ends up -/// calling `sub`, and therefore, `main` prints `Subtracting!`. -/// -/// ```rust -/// struct Foo; -/// -/// impl Sub for Foo { /// fn sub(self, _rhs: Foo) -> Foo { /// println!("Subtracting!"); /// self @@ -223,14 +141,12 @@ macro_rules! sub_impl { /// Foo - Foo; /// } /// ``` -#[cfg(not(stage0))] // NOTE(stage0): Remove cfg after a snapshot #[lang="sub"] pub trait Sub { /// The method for the `-` operator fn sub(self, rhs: RHS) -> Result; } -#[cfg(not(stage0))] // NOTE(stage0): Remove cfg after a snapshot macro_rules! sub_impl { ($($t:ty)*) => ($( impl Sub<$t, $t> for $t { @@ -254,46 +170,6 @@ sub_impl! { uint u8 u16 u32 u64 int i8 i16 i32 i64 f32 f64 } /// struct Foo; /// /// impl Mul for Foo { -/// fn mul(&self, _rhs: &Foo) -> Foo { -/// println!("Multiplying!"); -/// *self -/// } -/// } -/// -/// fn main() { -/// Foo * Foo; -/// } -/// ``` -// NOTE(stage0): Remove trait after a snapshot -#[cfg(stage0)] -#[lang="mul"] -pub trait Mul for Sized? { - /// The method for the `*` operator - fn mul(&self, rhs: &RHS) -> Result; -} - -// NOTE(stage0): Remove macro after a snapshot -#[cfg(stage0)] -macro_rules! mul_impl { - ($($t:ty)*) => ($( - impl Mul<$t, $t> for $t { - #[inline] - fn mul(&self, other: &$t) -> $t { (*self) * (*other) } - } - )*) -} - -/// The `Mul` trait is used to specify the functionality of `*`. -/// -/// # Example -/// -/// A trivial implementation of `Mul`. When `Foo * Foo` happens, it ends up -/// calling `mul`, and therefore, `main` prints `Multiplying!`. -/// -/// ```rust -/// struct Foo; -/// -/// impl Mul for Foo { /// fn mul(self, _rhs: Foo) -> Foo { /// println!("Multiplying!"); /// self @@ -304,14 +180,12 @@ macro_rules! mul_impl { /// Foo * Foo; /// } /// ``` -#[cfg(not(stage0))] // NOTE(stage0): Remove cfg after a snapshot #[lang="mul"] pub trait Mul { /// The method for the `*` operator fn mul(self, rhs: RHS) -> Result; } -#[cfg(not(stage0))] // NOTE(stage0): Remove cfg after a snapshot macro_rules! mul_impl { ($($t:ty)*) => ($( impl Mul<$t, $t> for $t { @@ -335,46 +209,6 @@ mul_impl! { uint u8 u16 u32 u64 int i8 i16 i32 i64 f32 f64 } /// struct Foo; /// /// impl Div for Foo { -/// fn div(&self, _rhs: &Foo) -> Foo { -/// println!("Dividing!"); -/// *self -/// } -/// } -/// -/// fn main() { -/// Foo / Foo; -/// } -/// ``` -// NOTE(stage0): Remove trait after a snapshot -#[cfg(stage0)] -#[lang="div"] -pub trait Div for Sized? { - /// The method for the `/` operator - fn div(&self, rhs: &RHS) -> Result; -} - -// NOTE(stage0): Remove macro after a snapshot -#[cfg(stage0)] -macro_rules! div_impl { - ($($t:ty)*) => ($( - impl Div<$t, $t> for $t { - #[inline] - fn div(&self, other: &$t) -> $t { (*self) / (*other) } - } - )*) -} - -/// The `Div` trait is used to specify the functionality of `/`. -/// -/// # Example -/// -/// A trivial implementation of `Div`. When `Foo / Foo` happens, it ends up -/// calling `div`, and therefore, `main` prints `Dividing!`. -/// -/// ``` -/// struct Foo; -/// -/// impl Div for Foo { /// fn div(self, _rhs: Foo) -> Foo { /// println!("Dividing!"); /// self @@ -385,14 +219,12 @@ macro_rules! div_impl { /// Foo / Foo; /// } /// ``` -#[cfg(not(stage0))] // NOTE(stage0): Remove cfg after a snapshot #[lang="div"] pub trait Div { /// The method for the `/` operator fn div(self, rhs: RHS) -> Result; } -#[cfg(not(stage0))] // NOTE(stage0): Remove cfg after a snapshot macro_rules! div_impl { ($($t:ty)*) => ($( impl Div<$t, $t> for $t { @@ -416,60 +248,6 @@ div_impl! { uint u8 u16 u32 u64 int i8 i16 i32 i64 f32 f64 } /// struct Foo; /// /// impl Rem for Foo { -/// fn rem(&self, _rhs: &Foo) -> Foo { -/// println!("Remainder-ing!"); -/// *self -/// } -/// } -/// -/// fn main() { -/// Foo % Foo; -/// } -/// ``` -// NOTE(stage0): Remove trait after a snapshot -#[cfg(stage0)] -#[lang="rem"] -pub trait Rem for Sized? { - /// The method for the `%` operator - fn rem(&self, rhs: &RHS) -> Result; -} - -// NOTE(stage0): Remove macro after a snapshot -#[cfg(stage0)] -macro_rules! rem_impl { - ($($t:ty)*) => ($( - impl Rem<$t, $t> for $t { - #[inline] - fn rem(&self, other: &$t) -> $t { (*self) % (*other) } - } - )*) -} - -// NOTE(stage0): Remove macro after a snapshot -#[cfg(stage0)] -macro_rules! rem_float_impl { - ($t:ty, $fmod:ident) => { - impl Rem<$t, $t> for $t { - #[inline] - fn rem(&self, other: &$t) -> $t { - extern { fn $fmod(a: $t, b: $t) -> $t; } - unsafe { $fmod(*self, *other) } - } - } - } -} - -/// The `Rem` trait is used to specify the functionality of `%`. -/// -/// # Example -/// -/// A trivial implementation of `Rem`. When `Foo % Foo` happens, it ends up -/// calling `rem`, and therefore, `main` prints `Remainder-ing!`. -/// -/// ``` -/// struct Foo; -/// -/// impl Rem for Foo { /// fn rem(self, _rhs: Foo) -> Foo { /// println!("Remainder-ing!"); /// self @@ -480,14 +258,12 @@ macro_rules! rem_float_impl { /// Foo % Foo; /// } /// ``` -#[cfg(not(stage0))] // NOTE(stage0): Remove cfg after a snapshot #[lang="rem"] pub trait Rem { /// The method for the `%` operator fn rem(self, rhs: RHS) -> Result; } -#[cfg(not(stage0))] // NOTE(stage0): Remove cfg after a snapshot macro_rules! rem_impl { ($($t:ty)*) => ($( impl Rem<$t, $t> for $t { @@ -497,7 +273,6 @@ macro_rules! rem_impl { )*) } -#[cfg(not(stage0))] // NOTE(stage0): Remove cfg after a snapshot macro_rules! rem_float_impl { ($t:ty, $fmod:ident) => { impl Rem<$t, $t> for $t { @@ -721,46 +496,6 @@ not_impl! { bool uint u8 u16 u32 u64 int i8 i16 i32 i64 } /// struct Foo; /// /// impl BitAnd for Foo { -/// fn bitand(&self, _rhs: &Foo) -> Foo { -/// println!("Bitwise And-ing!"); -/// *self -/// } -/// } -/// -/// fn main() { -/// Foo & Foo; -/// } -/// ``` -// NOTE(stage0): Remove trait after a snapshot -#[cfg(stage0)] -#[lang="bitand"] -pub trait BitAnd for Sized? { - /// The method for the `&` operator - fn bitand(&self, rhs: &RHS) -> Result; -} - -// NOTE(stage0): Remove macro after a snapshot -#[cfg(stage0)] -macro_rules! bitand_impl { - ($($t:ty)*) => ($( - impl BitAnd<$t, $t> for $t { - #[inline] - fn bitand(&self, rhs: &$t) -> $t { (*self) & (*rhs) } - } - )*) -} - -/// The `BitAnd` trait is used to specify the functionality of `&`. -/// -/// # Example -/// -/// A trivial implementation of `BitAnd`. When `Foo & Foo` happens, it ends up -/// calling `bitand`, and therefore, `main` prints `Bitwise And-ing!`. -/// -/// ``` -/// struct Foo; -/// -/// impl BitAnd for Foo { /// fn bitand(self, _rhs: Foo) -> Foo { /// println!("Bitwise And-ing!"); /// self @@ -771,14 +506,12 @@ macro_rules! bitand_impl { /// Foo & Foo; /// } /// ``` -#[cfg(not(stage0))] // NOTE(stage0): Remove cfg after a snapshot #[lang="bitand"] pub trait BitAnd { /// The method for the `&` operator fn bitand(self, rhs: RHS) -> Result; } -#[cfg(not(stage0))] // NOTE(stage0): Remove cfg after a snapshot macro_rules! bitand_impl { ($($t:ty)*) => ($( impl BitAnd<$t, $t> for $t { @@ -802,46 +535,6 @@ bitand_impl! { bool uint u8 u16 u32 u64 int i8 i16 i32 i64 } /// struct Foo; /// /// impl BitOr for Foo { -/// fn bitor(&self, _rhs: &Foo) -> Foo { -/// println!("Bitwise Or-ing!"); -/// *self -/// } -/// } -/// -/// fn main() { -/// Foo | Foo; -/// } -/// ``` -// NOTE(stage0): Remove trait after a snapshot -#[cfg(stage0)] -#[lang="bitor"] -pub trait BitOr for Sized? { - /// The method for the `|` operator - fn bitor(&self, rhs: &RHS) -> Result; -} - -// NOTE(stage0): Remove macro after a snapshot -#[cfg(stage0)] -macro_rules! bitor_impl { - ($($t:ty)*) => ($( - impl BitOr<$t,$t> for $t { - #[inline] - fn bitor(&self, rhs: &$t) -> $t { (*self) | (*rhs) } - } - )*) -} - -/// The `BitOr` trait is used to specify the functionality of `|`. -/// -/// # Example -/// -/// A trivial implementation of `BitOr`. When `Foo | Foo` happens, it ends up -/// calling `bitor`, and therefore, `main` prints `Bitwise Or-ing!`. -/// -/// ``` -/// struct Foo; -/// -/// impl BitOr for Foo { /// fn bitor(self, _rhs: Foo) -> Foo { /// println!("Bitwise Or-ing!"); /// self @@ -852,14 +545,12 @@ macro_rules! bitor_impl { /// Foo | Foo; /// } /// ``` -#[cfg(not(stage0))] // NOTE(stage0): Remove cfg after a snapshot #[lang="bitor"] pub trait BitOr { /// The method for the `|` operator fn bitor(self, rhs: RHS) -> Result; } -#[cfg(not(stage0))] // NOTE(stage0): Remove cfg after a snapshot macro_rules! bitor_impl { ($($t:ty)*) => ($( impl BitOr<$t,$t> for $t { @@ -883,46 +574,6 @@ bitor_impl! { bool uint u8 u16 u32 u64 int i8 i16 i32 i64 } /// struct Foo; /// /// impl BitXor for Foo { -/// fn bitxor(&self, _rhs: &Foo) -> Foo { -/// println!("Bitwise Xor-ing!"); -/// *self -/// } -/// } -/// -/// fn main() { -/// Foo ^ Foo; -/// } -/// ``` -// NOTE(stage0): Remove trait after a snapshot -#[cfg(stage0)] -#[lang="bitxor"] -pub trait BitXor for Sized? { - /// The method for the `^` operator - fn bitxor(&self, rhs: &RHS) -> Result; -} - -// NOTE(stage0): Remove macro after a snapshot -#[cfg(stage0)] -macro_rules! bitxor_impl { - ($($t:ty)*) => ($( - impl BitXor<$t, $t> for $t { - #[inline] - fn bitxor(&self, other: &$t) -> $t { (*self) ^ (*other) } - } - )*) -} - -/// The `BitXor` trait is used to specify the functionality of `^`. -/// -/// # Example -/// -/// A trivial implementation of `BitXor`. When `Foo ^ Foo` happens, it ends up -/// calling `bitxor`, and therefore, `main` prints `Bitwise Xor-ing!`. -/// -/// ``` -/// struct Foo; -/// -/// impl BitXor for Foo { /// fn bitxor(self, _rhs: Foo) -> Foo { /// println!("Bitwise Xor-ing!"); /// self @@ -933,14 +584,12 @@ macro_rules! bitxor_impl { /// Foo ^ Foo; /// } /// ``` -#[cfg(not(stage0))] // NOTE(stage0): Remove cfg after a snapshot #[lang="bitxor"] pub trait BitXor { /// The method for the `^` operator fn bitxor(self, rhs: RHS) -> Result; } -#[cfg(not(stage0))] // NOTE(stage0): Remove cfg after a snapshot macro_rules! bitxor_impl { ($($t:ty)*) => ($( impl BitXor<$t, $t> for $t { @@ -964,48 +613,6 @@ bitxor_impl! { bool uint u8 u16 u32 u64 int i8 i16 i32 i64 } /// struct Foo; /// /// impl Shl for Foo { -/// fn shl(&self, _rhs: &Foo) -> Foo { -/// println!("Shifting left!"); -/// *self -/// } -/// } -/// -/// fn main() { -/// Foo << Foo; -/// } -/// ``` -// NOTE(stage0): Remove trait after a snapshot -#[cfg(stage0)] -#[lang="shl"] -pub trait Shl for Sized? { - /// The method for the `<<` operator - fn shl(&self, rhs: &RHS) -> Result; -} - -// NOTE(stage0): Remove macro after a snapshot -#[cfg(stage0)] -macro_rules! shl_impl { - ($($t:ty)*) => ($( - impl Shl for $t { - #[inline] - fn shl(&self, other: &uint) -> $t { - (*self) << (*other) - } - } - )*) -} - -/// The `Shl` trait is used to specify the functionality of `<<`. -/// -/// # Example -/// -/// A trivial implementation of `Shl`. When `Foo << Foo` happens, it ends up -/// calling `shl`, and therefore, `main` prints `Shifting left!`. -/// -/// ``` -/// struct Foo; -/// -/// impl Shl for Foo { /// fn shl(self, _rhs: Foo) -> Foo { /// println!("Shifting left!"); /// self @@ -1016,14 +623,12 @@ macro_rules! shl_impl { /// Foo << Foo; /// } /// ``` -#[cfg(not(stage0))] // NOTE(stage0): Remove cfg after a snapshot #[lang="shl"] pub trait Shl { /// The method for the `<<` operator fn shl(self, rhs: RHS) -> Result; } -#[cfg(not(stage0))] // NOTE(stage0): Remove cfg after a snapshot macro_rules! shl_impl { ($($t:ty)*) => ($( impl Shl for $t { @@ -1049,46 +654,6 @@ shl_impl! { uint u8 u16 u32 u64 int i8 i16 i32 i64 } /// struct Foo; /// /// impl Shr for Foo { -/// fn shr(&self, _rhs: &Foo) -> Foo { -/// println!("Shifting right!"); -/// *self -/// } -/// } -/// -/// fn main() { -/// Foo >> Foo; -/// } -/// ``` -// NOTE(stage0): Remove trait after a snapshot -#[cfg(stage0)] -#[lang="shr"] -pub trait Shr for Sized? { - /// The method for the `>>` operator - fn shr(&self, rhs: &RHS) -> Result; -} - -// NOTE(stage0): Remove macro after a snapshot -#[cfg(stage0)] -macro_rules! shr_impl { - ($($t:ty)*) => ($( - impl Shr for $t { - #[inline] - fn shr(&self, other: &uint) -> $t { (*self) >> (*other) } - } - )*) -} - -/// The `Shr` trait is used to specify the functionality of `>>`. -/// -/// # Example -/// -/// A trivial implementation of `Shr`. When `Foo >> Foo` happens, it ends up -/// calling `shr`, and therefore, `main` prints `Shifting right!`. -/// -/// ``` -/// struct Foo; -/// -/// impl Shr for Foo { /// fn shr(self, _rhs: Foo) -> Foo { /// println!("Shifting right!"); /// self @@ -1099,14 +664,12 @@ macro_rules! shr_impl { /// Foo >> Foo; /// } /// ``` -#[cfg(not(stage0))] // NOTE(stage0): Remove cfg after a snapshot #[lang="shr"] pub trait Shr { /// The method for the `>>` operator fn shr(self, rhs: RHS) -> Result; } -#[cfg(not(stage0))] // NOTE(stage0): Remove cfg after a snapshot macro_rules! shr_impl { ($($t:ty)*) => ($( impl Shr for $t { diff --git a/src/libcore/option.rs b/src/libcore/option.rs index 314b47fc6476b..d831a57893bd7 100644 --- a/src/libcore/option.rs +++ b/src/libcore/option.rs @@ -718,15 +718,15 @@ impl Option { /// # Example /// /// Convert a string to an integer, turning poorly-formed strings - /// into 0 (the default value for integers). `from_str` converts + /// into 0 (the default value for integers). `parse` converts /// a string to any other type that implements `FromStr`, returning /// `None` on error. /// /// ``` /// let good_year_from_input = "1909"; /// let bad_year_from_input = "190blarg"; - /// let good_year = from_str(good_year_from_input).unwrap_or_default(); - /// let bad_year = from_str(bad_year_from_input).unwrap_or_default(); + /// let good_year = good_year_from_input.parse().unwrap_or_default(); + /// let bad_year = bad_year_from_input.parse().unwrap_or_default(); /// /// assert_eq!(1909i, good_year); /// assert_eq!(0i, bad_year); @@ -819,6 +819,7 @@ impl<'a, A> DoubleEndedIterator<&'a A> for Iter<'a, A> { impl<'a, A> ExactSizeIterator<&'a A> for Iter<'a, A> {} +#[stable] impl<'a, A> Clone for Iter<'a, A> { fn clone(&self) -> Iter<'a, A> { Iter { inner: self.inner.clone() } diff --git a/src/libcore/prelude.rs b/src/libcore/prelude.rs index ff3fc870beb83..f6abc8da79c0c 100644 --- a/src/libcore/prelude.rs +++ b/src/libcore/prelude.rs @@ -60,7 +60,7 @@ pub use option::Option::{Some, None}; pub use ptr::RawPtr; pub use result::Result; pub use result::Result::{Ok, Err}; -pub use str::{Str, StrPrelude}; +pub use str::{Str, StrExt}; pub use tuple::{Tuple1, Tuple2, Tuple3, Tuple4}; pub use tuple::{Tuple5, Tuple6, Tuple7, Tuple8}; pub use tuple::{Tuple9, Tuple10, Tuple11, Tuple12}; diff --git a/src/libcore/ptr.rs b/src/libcore/ptr.rs index 36c6b9572ea70..b226d4a6de45d 100644 --- a/src/libcore/ptr.rs +++ b/src/libcore/ptr.rs @@ -97,13 +97,20 @@ use cmp::{PartialEq, Eq, Ord, PartialOrd, Equiv}; use cmp::Ordering; use cmp::Ordering::{Less, Equal, Greater}; -pub use intrinsics::copy_memory; +// FIXME #19649: instrinsic docs don't render, so these have no docs :( + +#[unstable] pub use intrinsics::copy_nonoverlapping_memory; + +#[unstable] +pub use intrinsics::copy_memory; + +#[experimental = "uncertain about naming and semantics"] pub use intrinsics::set_memory; -/// Create a null pointer. +/// Creates a null raw pointer. /// -/// # Example +/// # Examples /// /// ``` /// use std::ptr; @@ -115,9 +122,9 @@ pub use intrinsics::set_memory; #[unstable = "may need a different name after pending changes to pointer types"] pub fn null() -> *const T { 0 as *const T } -/// Create an unsafe mutable null pointer. +/// Creates a null mutable raw pointer. /// -/// # Example +/// # Examples /// /// ``` /// use std::ptr; @@ -129,7 +136,12 @@ pub fn null() -> *const T { 0 as *const T } #[unstable = "may need a different name after pending changes to pointer types"] pub fn null_mut() -> *mut T { 0 as *mut T } -/// Zeroes out `count * size_of::` bytes of memory at `dst` +/// Zeroes out `count * size_of::` bytes of memory at `dst`. `count` may be `0`. +/// +/// # Safety +/// +/// Beyond accepting a raw pointer, this is unsafe because it will not drop the contents of `dst`, +/// and may be used to create invalid instances of `T`. #[inline] #[experimental = "uncertain about naming and semantics"] #[allow(experimental)] @@ -137,8 +149,13 @@ pub unsafe fn zero_memory(dst: *mut T, count: uint) { set_memory(dst, 0, count); } -/// Swap the values at two mutable locations of the same type, without -/// deinitialising either. They may overlap. +/// Swaps the values at two mutable locations of the same type, without +/// deinitialising either. They may overlap, unlike `mem::swap` which is otherwise +/// equivalent. +/// +/// # Safety +/// +/// This is only unsafe because it accepts a raw pointer. #[inline] #[unstable] pub unsafe fn swap(x: *mut T, y: *mut T) { @@ -156,8 +173,13 @@ pub unsafe fn swap(x: *mut T, y: *mut T) { mem::forget(tmp); } -/// Replace the value at a mutable location with a new one, returning the old -/// value, without deinitialising either. +/// Replaces the value at `dest` with `src`, returning the old +/// value, without dropping either. +/// +/// # Safety +/// +/// This is only unsafe because it accepts a raw pointer. +/// Otherwise, this operation is identical to `mem::replace`. #[inline] #[unstable] pub unsafe fn replace(dest: *mut T, mut src: T) -> T { @@ -165,7 +187,17 @@ pub unsafe fn replace(dest: *mut T, mut src: T) -> T { src } -/// Reads the value from `*src` and returns it. +/// Reads the value from `src` without dropping it. This leaves the +/// memory in `src` unchanged. +/// +/// # Safety +/// +/// Beyond accepting a raw pointer, this is unsafe because it semantically +/// moves the value out of `src` without preventing further usage of `src`. +/// If `T` is not `Copy`, then care must be taken to ensure that the value at +/// `src` is not used before the data is overwritten again (e.g. with `write`, +/// `zero_memory`, or `copy_memory`). Note that `*src = foo` counts as a use +/// because it will attempt to drop the value previously at `*src`. #[inline(always)] #[unstable] pub unsafe fn read(src: *const T) -> T { @@ -174,8 +206,11 @@ pub unsafe fn read(src: *const T) -> T { tmp } -/// Reads the value from `*src` and nulls it out. -/// This currently prevents destructors from executing. +/// Reads the value from `src` and nulls it out without dropping it. +/// +/// # Safety +/// +/// This is unsafe for the same reasons that `read` is unsafe. #[inline(always)] #[experimental] #[allow(experimental)] @@ -189,12 +224,17 @@ pub unsafe fn read_and_zero(dest: *mut T) -> T { tmp } -/// Unsafely overwrite a memory location with the given value without destroying +/// Overwrites a memory location with the given value without reading or dropping /// the old value. /// -/// This operation is unsafe because it does not destroy the previous value -/// contained at the location `dst`. This could leak allocations or resources, -/// so care must be taken to previously deallocate the value at `dst`. +/// # Safety +/// +/// Beyond accepting a raw pointer, this operation is unsafe because it does +/// not drop the contents of `dst`. This could leak allocations or resources, +/// so care must be taken not to overwrite an object that should be dropped. +/// +/// This is appropriate for initializing uninitialized memory, or overwritting memory +/// that has previously been `read` from. #[inline] #[unstable] pub unsafe fn write(dst: *mut T, src: T) { @@ -203,39 +243,47 @@ pub unsafe fn write(dst: *mut T, src: T) { /// Methods on raw pointers pub trait RawPtr { - /// Returns the null pointer. + /// Returns a null raw pointer. fn null() -> Self; - /// Returns true if the pointer is equal to the null pointer. + /// Returns true if the pointer is null. fn is_null(&self) -> bool; - /// Returns true if the pointer is not equal to the null pointer. + /// Returns true if the pointer is not null. fn is_not_null(&self) -> bool { !self.is_null() } - /// Returns the value of this pointer (ie, the address it points to) + /// Returns the address of the pointer. fn to_uint(&self) -> uint; /// Returns `None` if the pointer is null, or else returns a reference to the /// value wrapped in `Some`. /// - /// # Safety Notes + /// # Safety /// /// While this method and its mutable counterpart are useful for null-safety, /// it is important to note that this is still an unsafe operation because /// the returned value could be pointing to invalid memory. unsafe fn as_ref<'a>(&self) -> Option<&'a T>; - /// Calculates the offset from a pointer. The offset *must* be in-bounds of - /// the object, or one-byte-past-the-end. `count` is in units of T; e.g. a + /// Calculates the offset from a pointer. `count` is in units of T; e.g. a /// `count` of 3 represents a pointer offset of `3 * sizeof::()` bytes. + /// + /// # Safety + /// + /// The offset must be in-bounds of the object, or one-byte-past-the-end. Otherwise + /// `offset` invokes Undefined Behaviour, regardless of whether the pointer is used. unsafe fn offset(self, count: int) -> Self; } /// Methods on mutable raw pointers pub trait RawMutPtr{ /// Returns `None` if the pointer is null, or else returns a mutable reference - /// to the value wrapped in `Some`. As with `as_ref`, this is unsafe because - /// it cannot verify the validity of the returned pointer. + /// to the value wrapped in `Some`. + /// + /// # Safety + /// + /// As with `as_ref`, this is unsafe because it cannot verify the validity + /// of the returned pointer. unsafe fn as_mut<'a>(&self) -> Option<&'a mut T>; } @@ -340,6 +388,7 @@ impl Equiv<*const T> for *mut T { } } +#[stable] impl Clone for *const T { #[inline] fn clone(&self) -> *const T { @@ -347,6 +396,7 @@ impl Clone for *const T { } } +#[stable] impl Clone for *mut T { #[inline] fn clone(&self) -> *mut T { @@ -451,4 +501,3 @@ impl PartialOrd for *mut T { #[inline] fn ge(&self, other: &*mut T) -> bool { *self >= *other } } - diff --git a/src/libcore/result.rs b/src/libcore/result.rs index 00a2a3d5854d8..8014b4dc89d70 100644 --- a/src/libcore/result.rs +++ b/src/libcore/result.rs @@ -230,15 +230,15 @@ #![stable] -use self::Result::*; +use self::Result::{Ok, Err}; -use std::fmt::Show; -use slice; -use slice::AsSlice; +use clone::Clone; +use fmt::Show; use iter::{Iterator, IteratorExt, DoubleEndedIterator, FromIterator, ExactSizeIterator}; -use option::Option; -use option::Option::{None, Some}; use ops::{FnMut, FnOnce}; +use option::Option::{mod, None, Some}; +use slice::AsSlice; +use slice; /// `Result` is a type that represents either success (`Ok`) or failure (`Err`). /// @@ -248,9 +248,11 @@ use ops::{FnMut, FnOnce}; #[stable] pub enum Result { /// Contains the success value + #[stable] Ok(T), /// Contains the error value + #[stable] Err(E) } @@ -258,6 +260,7 @@ pub enum Result { // Type implementation ///////////////////////////////////////////////////////////////////////////// +#[stable] impl Result { ///////////////////////////////////////////////////////////////////////// // Querying the contained values @@ -300,7 +303,6 @@ impl Result { !self.is_ok() } - ///////////////////////////////////////////////////////////////////////// // Adapter for each variant ///////////////////////////////////////////////////////////////////////// @@ -369,7 +371,7 @@ impl Result { /// ``` #[inline] #[stable] - pub fn as_ref<'r>(&'r self) -> Result<&'r T, &'r E> { + pub fn as_ref(&self) -> Result<&T, &E> { match *self { Ok(ref x) => Ok(x), Err(ref x) => Err(x), @@ -395,8 +397,8 @@ impl Result { /// assert_eq!(x.unwrap_err(), 0); /// ``` #[inline] - #[unstable = "waiting for mut conventions"] - pub fn as_mut<'r>(&'r mut self) -> Result<&'r mut T, &'r mut E> { + #[stable] + pub fn as_mut(&mut self) -> Result<&mut T, &mut E> { match *self { Ok(ref mut x) => Ok(x), Err(ref mut x) => Err(x), @@ -420,7 +422,7 @@ impl Result { /// ``` #[inline] #[unstable = "waiting for mut conventions"] - pub fn as_mut_slice<'r>(&'r mut self) -> &'r mut [T] { + pub fn as_mut_slice(&mut self) -> &mut [T] { match *self { Ok(ref mut x) => slice::mut_ref_slice(x), Err(_) => { @@ -456,7 +458,7 @@ impl Result { /// let line: IoResult = buffer.read_line(); /// // Convert the string line to a number using `map` and `from_str` /// let val: IoResult = line.map(|line| { - /// from_str::(line.as_slice().trim_right()).unwrap_or(0) + /// line.as_slice().trim_right().parse::().unwrap_or(0) /// }); /// // Add the value if there were no errors, otherwise add 0 /// sum += val.ok().unwrap_or(0); @@ -465,11 +467,11 @@ impl Result { /// assert!(sum == 10); /// ``` #[inline] - #[unstable = "waiting for unboxed closures"] + #[stable] pub fn map U>(self, op: F) -> Result { match self { - Ok(t) => Ok(op(t)), - Err(e) => Err(e) + Ok(t) => Ok(op(t)), + Err(e) => Err(e) } } @@ -491,15 +493,14 @@ impl Result { /// assert_eq!(x.map_err(stringify), Err("error code: 13".to_string())); /// ``` #[inline] - #[unstable = "waiting for unboxed closures"] + #[stable] pub fn map_err F>(self, op: O) -> Result { match self { - Ok(t) => Ok(t), - Err(e) => Err(op(e)) + Ok(t) => Ok(t), + Err(e) => Err(op(e)) } } - ///////////////////////////////////////////////////////////////////////// // Iterator constructors ///////////////////////////////////////////////////////////////////////// @@ -516,9 +517,9 @@ impl Result { /// assert_eq!(x.iter().next(), None); /// ``` #[inline] - #[unstable = "waiting for iterator conventions"] - pub fn iter<'r>(&'r self) -> Item<&'r T> { - Item{opt: self.as_ref().ok()} + #[stable] + pub fn iter(&self) -> Iter { + Iter { inner: self.as_ref().ok() } } /// Returns a mutable iterator over the possibly contained value. @@ -537,9 +538,9 @@ impl Result { /// assert_eq!(x.iter_mut().next(), None); /// ``` #[inline] - #[unstable = "waiting for iterator conventions"] - pub fn iter_mut<'r>(&'r mut self) -> Item<&'r mut T> { - Item{opt: self.as_mut().ok()} + #[stable] + pub fn iter_mut(&mut self) -> IterMut { + IterMut { inner: self.as_mut().ok() } } /// Returns a consuming iterator over the possibly contained value. @@ -556,9 +557,9 @@ impl Result { /// assert_eq!(v, vec![]); /// ``` #[inline] - #[unstable = "waiting for iterator conventions"] - pub fn into_iter(self) -> Item { - Item{opt: self.ok()} + #[stable] + pub fn into_iter(self) -> IntoIter { + IntoIter { inner: self.ok() } } //////////////////////////////////////////////////////////////////////// @@ -611,7 +612,7 @@ impl Result { /// assert_eq!(Err(3).and_then(sq).and_then(sq), Err(3)); /// ``` #[inline] - #[unstable = "waiting for unboxed closures"] + #[stable] pub fn and_then Result>(self, op: F) -> Result { match self { Ok(t) => op(t), @@ -665,7 +666,7 @@ impl Result { /// assert_eq!(Err(3).or_else(err).or_else(err), Err(3)); /// ``` #[inline] - #[unstable = "waiting for unboxed closures"] + #[stable] pub fn or_else Result>(self, op: O) -> Result { match self { Ok(t) => Ok(t), @@ -687,7 +688,7 @@ impl Result { /// assert_eq!(x.unwrap_or(optb), optb); /// ``` #[inline] - #[unstable = "waiting for conventions"] + #[stable] pub fn unwrap_or(self, optb: T) -> T { match self { Ok(t) => t, @@ -707,7 +708,7 @@ impl Result { /// assert_eq!(Err("foo").unwrap_or_else(count), 3u); /// ``` #[inline] - #[unstable = "waiting for conventions"] + #[stable] pub fn unwrap_or_else T>(self, op: F) -> T { match self { Ok(t) => t, @@ -716,6 +717,7 @@ impl Result { } } +#[stable] impl Result { /// Unwraps a result, yielding the content of an `Ok`. /// @@ -736,7 +738,7 @@ impl Result { /// x.unwrap(); // panics with `emergency failure` /// ``` #[inline] - #[unstable = "waiting for conventions"] + #[stable] pub fn unwrap(self) -> T { match self { Ok(t) => t, @@ -746,6 +748,7 @@ impl Result { } } +#[stable] impl Result { /// Unwraps a result, yielding the content of an `Err`. /// @@ -766,7 +769,7 @@ impl Result { /// assert_eq!(x.unwrap_err(), "emergency failure"); /// ``` #[inline] - #[unstable = "waiting for conventions"] + #[stable] pub fn unwrap_err(self) -> E { match self { Ok(t) => @@ -797,42 +800,75 @@ impl AsSlice for Result { } ///////////////////////////////////////////////////////////////////////////// -// The Result Iterator +// The Result Iterators ///////////////////////////////////////////////////////////////////////////// -/// A `Result` iterator that yields either one or zero elements -/// -/// The `Item` iterator is returned by the `iter`, `iter_mut` and `into_iter` -/// methods on `Result`. -#[deriving(Clone)] -#[unstable = "waiting for iterator conventions"] -pub struct Item { - opt: Option -} +/// An iterator over a reference to the `Ok` variant of a `Result`. +#[stable] +pub struct Iter<'a, T: 'a> { inner: Option<&'a T> } -impl Iterator for Item { +impl<'a, T> Iterator<&'a T> for Iter<'a, T> { #[inline] - fn next(&mut self) -> Option { - self.opt.take() + fn next(&mut self) -> Option<&'a T> { self.inner.take() } + #[inline] + fn size_hint(&self) -> (uint, Option) { + let n = if self.inner.is_some() {1} else {0}; + (n, Some(n)) } +} + +impl<'a, T> DoubleEndedIterator<&'a T> for Iter<'a, T> { + #[inline] + fn next_back(&mut self) -> Option<&'a T> { self.inner.take() } +} + +impl<'a, T> ExactSizeIterator<&'a T> for Iter<'a, T> {} + +impl<'a, T> Clone for Iter<'a, T> { + fn clone(&self) -> Iter<'a, T> { Iter { inner: self.inner } } +} + +/// An iterator over a mutable reference to the `Ok` variant of a `Result`. +#[stable] +pub struct IterMut<'a, T: 'a> { inner: Option<&'a mut T> } +impl<'a, T> Iterator<&'a mut T> for IterMut<'a, T> { + #[inline] + fn next(&mut self) -> Option<&'a mut T> { self.inner.take() } #[inline] fn size_hint(&self) -> (uint, Option) { - match self.opt { - Some(_) => (1, Some(1)), - None => (0, Some(0)), - } + let n = if self.inner.is_some() {1} else {0}; + (n, Some(n)) } } -impl DoubleEndedIterator for Item { +impl<'a, T> DoubleEndedIterator<&'a mut T> for IterMut<'a, T> { #[inline] - fn next_back(&mut self) -> Option { - self.opt.take() + fn next_back(&mut self) -> Option<&'a mut T> { self.inner.take() } +} + +impl<'a, T> ExactSizeIterator<&'a mut T> for IterMut<'a, T> {} + +/// An iterator over the value in a `Ok` variant of a `Result`. +#[stable] +pub struct IntoIter { inner: Option } + +impl Iterator for IntoIter { + #[inline] + fn next(&mut self) -> Option { self.inner.take() } + #[inline] + fn size_hint(&self) -> (uint, Option) { + let n = if self.inner.is_some() {1} else {0}; + (n, Some(n)) } } -impl ExactSizeIterator for Item {} +impl DoubleEndedIterator for IntoIter { + #[inline] + fn next_back(&mut self) -> Option { self.inner.take() } +} + +impl ExactSizeIterator for IntoIter {} ///////////////////////////////////////////////////////////////////////////// // FromIterator diff --git a/src/libcore/slice.rs b/src/libcore/slice.rs index f5d117bca9fc8..26684864c4c49 100644 --- a/src/libcore/slice.rs +++ b/src/libcore/slice.rs @@ -67,7 +67,7 @@ pub trait SliceExt for Sized? { fn slice_from<'a>(&'a self, start: uint) -> &'a [T]; fn slice_to<'a>(&'a self, end: uint) -> &'a [T]; fn split_at<'a>(&'a self, mid: uint) -> (&'a [T], &'a [T]); - fn iter<'a>(&'a self) -> Items<'a, T>; + fn iter<'a>(&'a self) -> Iter<'a, T>; fn split<'a, P>(&'a self, pred: P) -> Splits<'a, T, P> where P: FnMut(&T) -> bool; fn splitn<'a, P>(&'a self, n: uint, pred: P) -> SplitsN> @@ -92,7 +92,7 @@ pub trait SliceExt for Sized? { fn slice_mut<'a>(&'a mut self, start: uint, end: uint) -> &'a mut [T]; fn slice_from_mut<'a>(&'a mut self, start: uint) -> &'a mut [T]; fn slice_to_mut<'a>(&'a mut self, end: uint) -> &'a mut [T]; - fn iter_mut<'a>(&'a mut self) -> MutItems<'a, T>; + fn iter_mut<'a>(&'a mut self) -> IterMut<'a, T>; fn head_mut<'a>(&'a mut self) -> Option<&'a mut T>; fn tail_mut<'a>(&'a mut self) -> &'a mut [T]; fn init_mut<'a>(&'a mut self) -> &'a mut [T]; @@ -141,15 +141,15 @@ impl SliceExt for [T] { } #[inline] - fn iter<'a>(&'a self) -> Items<'a, T> { + fn iter<'a>(&'a self) -> Iter<'a, T> { unsafe { let p = self.as_ptr(); if mem::size_of::() == 0 { - Items{ptr: p, + Iter{ptr: p, end: (p as uint + self.len()) as *const T, marker: marker::ContravariantLifetime::<'a>} } else { - Items{ptr: p, + Iter{ptr: p, end: p.offset(self.len() as int), marker: marker::ContravariantLifetime::<'a>} } @@ -286,15 +286,15 @@ impl SliceExt for [T] { } #[inline] - fn iter_mut<'a>(&'a mut self) -> MutItems<'a, T> { + fn iter_mut<'a>(&'a mut self) -> IterMut<'a, T> { unsafe { let p = self.as_mut_ptr(); if mem::size_of::() == 0 { - MutItems{ptr: p, + IterMut{ptr: p, end: (p as uint + self.len()) as *mut T, marker: marker::ContravariantLifetime::<'a>} } else { - MutItems{ptr: p, + IterMut{ptr: p, end: p.offset(self.len() as int), marker: marker::ContravariantLifetime::<'a>} } @@ -655,7 +655,7 @@ impl<'a, T> Default for &'a [T] { // Iterators // -// The shared definition of the `Item` and `MutItems` iterators +// The shared definition of the `Item` and `IterMut` iterators macro_rules! iterator { (struct $name:ident -> $ptr:ty, $elem:ty) => { #[experimental = "needs review"] @@ -738,14 +738,14 @@ macro_rules! make_slice { /// Immutable slice iterator #[experimental = "needs review"] -pub struct Items<'a, T: 'a> { +pub struct Iter<'a, T: 'a> { ptr: *const T, end: *const T, marker: marker::ContravariantLifetime<'a> } #[experimental] -impl<'a, T> ops::Slice for Items<'a, T> { +impl<'a, T> ops::Slice for Iter<'a, T> { fn as_slice_(&self) -> &[T] { self.as_slice() } @@ -763,7 +763,7 @@ impl<'a, T> ops::Slice for Items<'a, T> { } } -impl<'a, T> Items<'a, T> { +impl<'a, T> Iter<'a, T> { /// View the underlying data as a subslice of the original data. /// /// This has the same lifetime as the original slice, and so the @@ -774,20 +774,20 @@ impl<'a, T> Items<'a, T> { } } -impl<'a,T> Copy for Items<'a,T> {} +impl<'a,T> Copy for Iter<'a,T> {} -iterator!{struct Items -> *const T, &'a T} +iterator!{struct Iter -> *const T, &'a T} #[experimental = "needs review"] -impl<'a, T> ExactSizeIterator<&'a T> for Items<'a, T> {} +impl<'a, T> ExactSizeIterator<&'a T> for Iter<'a, T> {} -#[experimental = "needs review"] -impl<'a, T> Clone for Items<'a, T> { - fn clone(&self) -> Items<'a, T> { *self } +#[stable] +impl<'a, T> Clone for Iter<'a, T> { + fn clone(&self) -> Iter<'a, T> { *self } } #[experimental = "needs review"] -impl<'a, T> RandomAccessIterator<&'a T> for Items<'a, T> { +impl<'a, T> RandomAccessIterator<&'a T> for Iter<'a, T> { #[inline] fn indexable(&self) -> uint { let (exact, _) = self.size_hint(); @@ -813,14 +813,14 @@ impl<'a, T> RandomAccessIterator<&'a T> for Items<'a, T> { /// Mutable slice iterator. #[experimental = "needs review"] -pub struct MutItems<'a, T: 'a> { +pub struct IterMut<'a, T: 'a> { ptr: *mut T, end: *mut T, marker: marker::ContravariantLifetime<'a>, } #[experimental] -impl<'a, T> ops::Slice for MutItems<'a, T> { +impl<'a, T> ops::Slice for IterMut<'a, T> { fn as_slice_<'b>(&'b self) -> &'b [T] { make_slice!(T -> &'b [T]: self.ptr, self.end) } @@ -839,7 +839,7 @@ impl<'a, T> ops::Slice for MutItems<'a, T> { } #[experimental] -impl<'a, T> ops::SliceMut for MutItems<'a, T> { +impl<'a, T> ops::SliceMut for IterMut<'a, T> { fn as_mut_slice_<'b>(&'b mut self) -> &'b mut [T] { make_slice!(T -> &'b mut [T]: self.ptr, self.end) } @@ -857,7 +857,7 @@ impl<'a, T> ops::SliceMut for MutItems<'a, T> { } } -impl<'a, T> MutItems<'a, T> { +impl<'a, T> IterMut<'a, T> { /// View the underlying data as a subslice of the original data. /// /// To avoid creating `&mut` references that alias, this is forced @@ -870,10 +870,10 @@ impl<'a, T> MutItems<'a, T> { } } -iterator!{struct MutItems -> *mut T, &'a mut T} +iterator!{struct IterMut -> *mut T, &'a mut T} #[experimental = "needs review"] -impl<'a, T> ExactSizeIterator<&'a mut T> for MutItems<'a, T> {} +impl<'a, T> ExactSizeIterator<&'a mut T> for IterMut<'a, T> {} /// An abstraction over the splitting iterators, so that splitn, splitn_mut etc /// can be implemented once. @@ -893,6 +893,7 @@ pub struct Splits<'a, T:'a, P> where P: FnMut(&T) -> bool { } // FIXME(#19839) Remove in favor of `#[deriving(Clone)]` +#[stable] impl<'a, T, P> Clone for Splits<'a, T, P> where P: Clone + FnMut(&T) -> bool { fn clone(&self) -> Splits<'a, T, P> { Splits { @@ -1550,4 +1551,3 @@ impl_int_slice! { u16, i16 } impl_int_slice! { u32, i32 } impl_int_slice! { u64, i64 } impl_int_slice! { uint, int } - diff --git a/src/libcore/str.rs b/src/libcore/str.rs index a89a7970ae9c4..204ffae6cbd54 100644 --- a/src/libcore/str.rs +++ b/src/libcore/str.rs @@ -16,31 +16,29 @@ #![doc(primitive = "str")] -pub use self::Utf16Item::*; -pub use self::Searcher::{Naive, TwoWay, TwoWayLong}; +use self::Searcher::{Naive, TwoWay, TwoWayLong}; -use char::Char; -use char; use clone::Clone; -use cmp::{Eq, mod}; +use cmp::{mod, Eq}; use default::Default; -use iter::{Map, Iterator, IteratorExt, DoubleEndedIterator}; -use iter::{DoubleEndedIteratorExt, ExactSizeIterator}; use iter::range; +use iter::{DoubleEndedIteratorExt, ExactSizeIterator}; +use iter::{Map, Iterator, IteratorExt, DoubleEndedIterator}; use kinds::Sized; use mem; use num::Int; -use option::Option; -use option::Option::{None, Some}; use ops::{Fn, FnMut}; +use option::Option::{mod, None, Some}; use ptr::RawPtr; use raw::{Repr, Slice}; +use result::Result::{mod, Ok, Err}; use slice::{mod, SliceExt}; use uint; /// A trait to abstract the idea of creating a new instance of a type from a /// string. -#[experimental = "might need to return Result"] +// FIXME(#17307): there should be an `E` associated type for a `Result` return +#[unstable = "will return a Result once associated types are working"] pub trait FromStr { /// Parses a string `s` to return an optional value of this type. If the /// string is ill-formatted, the None is returned. @@ -48,6 +46,7 @@ pub trait FromStr { } /// A utility function that just calls FromStr::from_str +#[deprecated = "call the .parse() method on the string instead"] pub fn from_str(s: &str) -> Option { FromStr::from_str(s) } @@ -60,9 +59,9 @@ impl FromStr for bool { /// # Examples /// /// ```rust - /// assert_eq!(from_str::("true"), Some(true)); - /// assert_eq!(from_str::("false"), Some(false)); - /// assert_eq!(from_str::("not even a boolean"), None); + /// assert_eq!("true".parse(), Some(true)); + /// assert_eq!("false".parse(), Some(false)); + /// assert_eq!("not even a boolean".parse::(), None); /// ``` #[inline] fn from_str(s: &str) -> Option { @@ -78,22 +77,39 @@ impl FromStr for bool { Section: Creating a string */ -/// Converts a slice of bytes to a string slice without performing any allocations. +/// Errors which can occur when attempting to interpret a byte slice as a `str`. +#[deriving(Copy, Eq, PartialEq, Clone)] +pub enum Utf8Error { + /// An invalid byte was detected at the byte offset given. + /// + /// The offset is guaranteed to be in bounds of the slice in question, and + /// the byte at the specified offset was the first invalid byte in the + /// sequence detected. + InvalidByte(uint), + + /// The byte slice was invalid because more bytes were needed but no more + /// bytes were available. + TooShort, +} + +/// Converts a slice of bytes to a string slice without performing any +/// allocations. /// /// Once the slice has been validated as utf-8, it is transmuted in-place and /// returned as a '&str' instead of a '&[u8]' /// -/// Returns None if the slice is not utf-8. -pub fn from_utf8<'a>(v: &'a [u8]) -> Option<&'a str> { - if is_utf8(v) { - Some(unsafe { from_utf8_unchecked(v) }) - } else { - None - } +/// # Failure +/// +/// Returns `Err` if the slice is not utf-8 with a description as to why the +/// provided slice is not utf-8. +pub fn from_utf8(v: &[u8]) -> Result<&str, Utf8Error> { + try!(run_utf8_validation_iterator(&mut v.iter())); + Ok(unsafe { from_utf8_unchecked(v) }) } /// Converts a slice of bytes to a string slice without checking /// that the string contains valid UTF-8. +#[stable] pub unsafe fn from_utf8_unchecked<'a>(v: &'a [u8]) -> &'a str { mem::transmute(v) } @@ -111,6 +127,7 @@ pub unsafe fn from_utf8_unchecked<'a>(v: &'a [u8]) -> &'a str { /// # Panics /// /// This function will panic if the string pointed to by `s` is not valid UTF-8. +#[unstable = "may change location based on the outcome of the c_str module"] pub unsafe fn from_c_str(s: *const i8) -> &'static str { let s = s as *const u8; let mut len = 0u; @@ -118,10 +135,11 @@ pub unsafe fn from_c_str(s: *const i8) -> &'static str { len += 1u; } let v: &'static [u8] = ::mem::transmute(Slice { data: s, len: len }); - from_utf8(v).expect("from_c_str passed invalid utf-8 data") + from_utf8(v).ok().expect("from_c_str passed invalid utf-8 data") } /// Something that can be used to compare against a character +#[unstable = "definition may change as pattern-related methods are stabilized"] pub trait CharEq { /// Determine if the splitter should split at the given character fn matches(&mut self, char) -> bool; @@ -167,7 +185,7 @@ Section: Iterators /// Created with the method `.chars()`. #[deriving(Clone, Copy)] pub struct Chars<'a> { - iter: slice::Items<'a, u8> + iter: slice::Iter<'a, u8> } // Return the initial codepoint accumulator for the first byte. @@ -273,12 +291,12 @@ impl<'a> DoubleEndedIterator for Chars<'a> { /// External iterator for a string's characters and their byte offsets. /// Use with the `std::iter` module. #[deriving(Clone)] -pub struct CharOffsets<'a> { +pub struct CharIndices<'a> { front_offset: uint, iter: Chars<'a>, } -impl<'a> Iterator<(uint, char)> for CharOffsets<'a> { +impl<'a> Iterator<(uint, char)> for CharIndices<'a> { #[inline] fn next(&mut self) -> Option<(uint, char)> { let (pre_len, _) = self.iter.iter.size_hint(); @@ -299,7 +317,7 @@ impl<'a> Iterator<(uint, char)> for CharOffsets<'a> { } } -impl<'a> DoubleEndedIterator<(uint, char)> for CharOffsets<'a> { +impl<'a> DoubleEndedIterator<(uint, char)> for CharIndices<'a> { #[inline] fn next_back(&mut self) -> Option<(uint, char)> { match self.iter.next_back() { @@ -315,13 +333,16 @@ impl<'a> DoubleEndedIterator<(uint, char)> for CharOffsets<'a> { /// External iterator for a string's bytes. /// Use with the `std::iter` module. -pub type Bytes<'a> = Map<&'a u8, u8, slice::Items<'a, u8>, BytesFn>; +#[stable] +#[deriving(Clone)] +pub struct Bytes<'a> { + inner: Map<&'a u8, u8, slice::Iter<'a, u8>, BytesFn>, +} /// A temporary new type wrapper that ensures that the `Bytes` iterator /// is cloneable. #[deriving(Copy)] -#[experimental = "iterator type instability"] -pub struct BytesFn(fn(&u8) -> u8); +struct BytesFn(fn(&u8) -> u8); impl<'a> Fn(&'a u8) -> u8 for BytesFn { extern "rust-call" fn call(&self, (ptr,): (&'a u8,)) -> u8 { @@ -355,8 +376,17 @@ pub struct CharSplitsN<'a, Sep> { invert: bool, } +/// An iterator over the lines of a string, separated by `\n`. +#[stable] +pub struct Lines<'a> { + inner: CharSplits<'a, char>, +} + /// An iterator over the lines of a string, separated by either `\n` or (`\r\n`). -pub type AnyLines<'a> = Map<&'a str, &'a str, CharSplits<'a, char>, fn(&str) -> &str>; +#[stable] +pub struct LinesAny<'a> { + inner: Map<&'a str, &'a str, Lines<'a>, fn(&str) -> &str>, +} impl<'a, Sep> CharSplits<'a, Sep> { #[inline] @@ -799,63 +829,6 @@ impl<'a> Iterator<&'a str> for StrSplits<'a> { } } -/// External iterator for a string's UTF16 codeunits. -/// Use with the `std::iter` module. -#[deriving(Clone)] -pub struct Utf16CodeUnits<'a> { - encoder: Utf16Encoder> -} - -impl<'a> Iterator for Utf16CodeUnits<'a> { - #[inline] - fn next(&mut self) -> Option { self.encoder.next() } - - #[inline] - fn size_hint(&self) -> (uint, Option) { self.encoder.size_hint() } -} - - -/// Iterator adaptor for encoding `char`s to UTF-16. -#[deriving(Clone)] -pub struct Utf16Encoder { - chars: I, - extra: u16 -} - -impl Utf16Encoder { - /// Create an UTF-16 encoder from any `char` iterator. - pub fn new(chars: I) -> Utf16Encoder where I: Iterator { - Utf16Encoder { chars: chars, extra: 0 } - } -} - -impl Iterator for Utf16Encoder where I: Iterator { - #[inline] - fn next(&mut self) -> Option { - if self.extra != 0 { - let tmp = self.extra; - self.extra = 0; - return Some(tmp); - } - - let mut buf = [0u16, ..2]; - self.chars.next().map(|ch| { - let n = ch.encode_utf16(buf[mut]).unwrap_or(0); - if n == 2 { self.extra = buf[1]; } - buf[0] - }) - } - - #[inline] - fn size_hint(&self) -> (uint, Option) { - let (low, high) = self.chars.size_hint(); - // every char gets either one u16 or two u16, - // so this iterator is between 1 or 2 times as - // long as the underlying iterator. - (low, high.and_then(|n| n.checked_mul(2))) - } -} - /* Section: Comparing strings */ @@ -880,7 +853,7 @@ fn eq_slice_(a: &str, b: &str) -> bool { /// to compare &[u8] byte slices that are not necessarily valid UTF-8. #[lang="str_eq"] #[inline] -pub fn eq_slice(a: &str, b: &str) -> bool { +fn eq_slice(a: &str, b: &str) -> bool { eq_slice_(a, b) } @@ -893,32 +866,37 @@ Section: Misc /// `iter` reset such that it is pointing at the first byte in the /// invalid sequence. #[inline(always)] -fn run_utf8_validation_iterator(iter: &mut slice::Items) -> bool { +fn run_utf8_validation_iterator(iter: &mut slice::Iter) + -> Result<(), Utf8Error> { + let whole = iter.as_slice(); loop { // save the current thing we're pointing at. let old = *iter; // restore the iterator we had at the start of this codepoint. - macro_rules! err ( () => { {*iter = old; return false} }); + macro_rules! err (() => { { + *iter = old; + return Err(Utf8Error::InvalidByte(whole.len() - iter.as_slice().len())) + } }); macro_rules! next ( () => { - match iter.next() { - Some(a) => *a, - // we needed data, but there was none: error! - None => err!() - } - }); + match iter.next() { + Some(a) => *a, + // we needed data, but there was none: error! + None => return Err(Utf8Error::TooShort), + } + }); let first = match iter.next() { Some(&b) => b, // we're at the end of the iterator and a codepoint // boundary at the same time, so this string is valid. - None => return true + None => return Ok(()) }; // ASCII characters are always valid, so only large // bytes need more examination. if first >= 128 { - let w = utf8_char_width(first); + let w = UTF8_CHAR_WIDTH[first as uint] as uint; let second = next!(); // 2-byte encoding is for codepoints \u{0080} to \u{07ff} // first C2 80 last DF BF @@ -964,145 +942,13 @@ fn run_utf8_validation_iterator(iter: &mut slice::Items) -> bool { } /// Determines if a vector of bytes contains valid UTF-8. +#[deprecated = "call from_utf8 instead"] pub fn is_utf8(v: &[u8]) -> bool { - run_utf8_validation_iterator(&mut v.iter()) -} - -/// Determines if a vector of `u16` contains valid UTF-16 -pub fn is_utf16(v: &[u16]) -> bool { - let mut it = v.iter(); - macro_rules! next ( ($ret:expr) => { - match it.next() { Some(u) => *u, None => return $ret } - } - ); - loop { - let u = next!(true); - - match char::from_u32(u as u32) { - Some(_) => {} - None => { - let u2 = next!(false); - if u < 0xD7FF || u > 0xDBFF || - u2 < 0xDC00 || u2 > 0xDFFF { return false; } - } - } - } -} - -/// An iterator that decodes UTF-16 encoded codepoints from a vector -/// of `u16`s. -#[deriving(Clone)] -pub struct Utf16Items<'a> { - iter: slice::Items<'a, u16> -} -/// The possibilities for values decoded from a `u16` stream. -#[deriving(Copy, PartialEq, Eq, Clone, Show)] -pub enum Utf16Item { - /// A valid codepoint. - ScalarValue(char), - /// An invalid surrogate without its pair. - LoneSurrogate(u16) -} - -impl Utf16Item { - /// Convert `self` to a `char`, taking `LoneSurrogate`s to the - /// replacement character (U+FFFD). - #[inline] - pub fn to_char_lossy(&self) -> char { - match *self { - ScalarValue(c) => c, - LoneSurrogate(_) => '\u{FFFD}' - } - } + run_utf8_validation_iterator(&mut v.iter()).is_ok() } -impl<'a> Iterator for Utf16Items<'a> { - fn next(&mut self) -> Option { - let u = match self.iter.next() { - Some(u) => *u, - None => return None - }; - - if u < 0xD800 || 0xDFFF < u { - // not a surrogate - Some(ScalarValue(unsafe {mem::transmute(u as u32)})) - } else if u >= 0xDC00 { - // a trailing surrogate - Some(LoneSurrogate(u)) - } else { - // preserve state for rewinding. - let old = self.iter; - - let u2 = match self.iter.next() { - Some(u2) => *u2, - // eof - None => return Some(LoneSurrogate(u)) - }; - if u2 < 0xDC00 || u2 > 0xDFFF { - // not a trailing surrogate so we're not a valid - // surrogate pair, so rewind to redecode u2 next time. - self.iter = old; - return Some(LoneSurrogate(u)) - } - - // all ok, so lets decode it. - let c = ((u - 0xD800) as u32 << 10 | (u2 - 0xDC00) as u32) + 0x1_0000; - Some(ScalarValue(unsafe {mem::transmute(c)})) - } - } - - #[inline] - fn size_hint(&self) -> (uint, Option) { - let (low, high) = self.iter.size_hint(); - // we could be entirely valid surrogates (2 elements per - // char), or entirely non-surrogates (1 element per char) - (low / 2, high) - } -} - -/// Create an iterator over the UTF-16 encoded codepoints in `v`, -/// returning invalid surrogates as `LoneSurrogate`s. -/// -/// # Example -/// -/// ```rust -/// use std::str; -/// use std::str::{ScalarValue, LoneSurrogate}; -/// -/// // 𝄞music -/// let v = [0xD834, 0xDD1E, 0x006d, 0x0075, -/// 0x0073, 0xDD1E, 0x0069, 0x0063, -/// 0xD834]; -/// -/// assert_eq!(str::utf16_items(&v).collect::>(), -/// vec![ScalarValue('𝄞'), -/// ScalarValue('m'), ScalarValue('u'), ScalarValue('s'), -/// LoneSurrogate(0xDD1E), -/// ScalarValue('i'), ScalarValue('c'), -/// LoneSurrogate(0xD834)]); -/// ``` -pub fn utf16_items<'a>(v: &'a [u16]) -> Utf16Items<'a> { - Utf16Items { iter : v.iter() } -} - -/// Return a slice of `v` ending at (and not including) the first NUL -/// (0). -/// -/// # Example -/// -/// ```rust -/// use std::str; -/// -/// // "abcd" -/// let mut v = ['a' as u16, 'b' as u16, 'c' as u16, 'd' as u16]; -/// // no NULs so no change -/// assert_eq!(str::truncate_utf16_at_nul(&v), v.as_slice()); -/// -/// // "ab\0d" -/// v[2] = 0; -/// let b: &[_] = &['a' as u16, 'b' as u16]; -/// assert_eq!(str::truncate_utf16_at_nul(&v), b); -/// ``` +/// Deprecated function +#[deprecated = "this function will be removed"] pub fn truncate_utf16_at_nul<'a>(v: &'a [u16]) -> &'a [u16] { match v.iter().position(|c| *c == 0) { // don't include the 0 @@ -1133,6 +979,7 @@ static UTF8_CHAR_WIDTH: [u8, ..256] = [ /// Given a first byte, determine how many bytes are in this UTF-8 character #[inline] +#[deprecated = "this function has moved to libunicode"] pub fn utf8_char_width(b: u8) -> uint { return UTF8_CHAR_WIDTH[b as uint] as uint; } @@ -1141,6 +988,7 @@ pub fn utf8_char_width(b: u8) -> uint { /// the next `char` in a string. This can be used as a data structure /// for iterating over the UTF-8 bytes of a string. #[deriving(Copy)] +#[unstable = "naming is uncertain with container conventions"] pub struct CharRange { /// Current `char` pub ch: char, @@ -1159,7 +1007,7 @@ pub mod raw { use ptr::RawPtr; use raw::Slice; use slice::SliceExt; - use str::{is_utf8, StrPrelude}; + use str::StrExt; /// Converts a slice of bytes to a string slice without checking /// that the string contains valid UTF-8. @@ -1181,8 +1029,7 @@ pub mod raw { curr = s.offset(len as int); } let v = Slice { data: s, len: len }; - assert!(is_utf8(::mem::transmute(v))); - ::mem::transmute(v) + super::from_utf8(::mem::transmute(v)).unwrap() } /// Takes a bytewise (not UTF-8) slice from a string. @@ -1225,7 +1072,7 @@ pub mod traits { use option::Option; use option::Option::Some; use ops; - use str::{Str, StrPrelude, eq_slice}; + use str::{Str, StrExt, eq_slice}; impl Ord for str { #[inline] @@ -1291,707 +1138,70 @@ pub mod traits { } /// Any string that can be represented as a slice +#[unstable = "Instead of taking this bound generically, this trait will be \ + replaced with one of slicing syntax, deref coercions, or \ + a more generic conversion trait"] pub trait Str for Sized? { /// Work with `self` as a slice. fn as_slice<'a>(&'a self) -> &'a str; } +#[allow(deprecated)] impl Str for str { #[inline] fn as_slice<'a>(&'a self) -> &'a str { self } } +#[allow(deprecated)] impl<'a, Sized? S> Str for &'a S where S: Str { #[inline] fn as_slice(&self) -> &str { Str::as_slice(*self) } } /// Methods for string slices -pub trait StrPrelude for Sized? { - /// Returns true if one string contains another - /// - /// # Arguments - /// - /// - needle - The string to look for - /// - /// # Example - /// - /// ```rust - /// assert!("bananas".contains("nana")); - /// ``` - fn contains(&self, needle: &str) -> bool; +#[allow(missing_docs)] +pub trait StrExt for Sized? { + // NB there are no docs here are they're all located on the StrExt trait in + // libcollections, not here. - /// Returns true if a string contains a char. - /// - /// # Arguments - /// - /// - needle - The char to look for - /// - /// # Example - /// - /// ```rust - /// assert!("hello".contains_char('e')); - /// ``` + fn contains(&self, needle: &str) -> bool; fn contains_char(&self, needle: char) -> bool; - - /// An iterator over the characters of `self`. Note, this iterates - /// over Unicode code-points, not Unicode graphemes. - /// - /// # Example - /// - /// ```rust - /// let v: Vec = "abc åäö".chars().collect(); - /// assert_eq!(v, vec!['a', 'b', 'c', ' ', 'å', 'ä', 'ö']); - /// ``` fn chars<'a>(&'a self) -> Chars<'a>; - - /// An iterator over the bytes of `self` - /// - /// # Example - /// - /// ```rust - /// let v: Vec = "bors".bytes().collect(); - /// assert_eq!(v, b"bors".to_vec()); - /// ``` fn bytes<'a>(&'a self) -> Bytes<'a>; - - /// An iterator over the characters of `self` and their byte offsets. - fn char_indices<'a>(&'a self) -> CharOffsets<'a>; - - /// An iterator over substrings of `self`, separated by characters - /// matched by `sep`. - /// - /// # Example - /// - /// ```rust - /// # #![feature(unboxed_closures)] - /// - /// # fn main() { - /// let v: Vec<&str> = "Mary had a little lamb".split(' ').collect(); - /// assert_eq!(v, vec!["Mary", "had", "a", "little", "lamb"]); - /// - /// let v: Vec<&str> = "abc1def2ghi".split(|&: c: char| c.is_numeric()).collect(); - /// assert_eq!(v, vec!["abc", "def", "ghi"]); - /// - /// let v: Vec<&str> = "lionXXtigerXleopard".split('X').collect(); - /// assert_eq!(v, vec!["lion", "", "tiger", "leopard"]); - /// - /// let v: Vec<&str> = "".split('X').collect(); - /// assert_eq!(v, vec![""]); - /// # } - /// ``` + fn char_indices<'a>(&'a self) -> CharIndices<'a>; fn split<'a, Sep: CharEq>(&'a self, sep: Sep) -> CharSplits<'a, Sep>; - - /// An iterator over substrings of `self`, separated by characters - /// matched by `sep`, restricted to splitting at most `count` - /// times. - /// - /// # Example - /// - /// ```rust - /// # #![feature(unboxed_closures)] - /// - /// # fn main() { - /// let v: Vec<&str> = "Mary had a little lambda".splitn(2, ' ').collect(); - /// assert_eq!(v, vec!["Mary", "had", "a little lambda"]); - /// - /// let v: Vec<&str> = "abc1def2ghi".splitn(1, |&: c: char| c.is_numeric()).collect(); - /// assert_eq!(v, vec!["abc", "def2ghi"]); - /// - /// let v: Vec<&str> = "lionXXtigerXleopard".splitn(2, 'X').collect(); - /// assert_eq!(v, vec!["lion", "", "tigerXleopard"]); - /// - /// let v: Vec<&str> = "abcXdef".splitn(0, 'X').collect(); - /// assert_eq!(v, vec!["abcXdef"]); - /// - /// let v: Vec<&str> = "".splitn(1, 'X').collect(); - /// assert_eq!(v, vec![""]); - /// # } - /// ``` fn splitn<'a, Sep: CharEq>(&'a self, count: uint, sep: Sep) -> CharSplitsN<'a, Sep>; - - /// An iterator over substrings of `self`, separated by characters - /// matched by `sep`. - /// - /// Equivalent to `split`, except that the trailing substring - /// is skipped if empty (terminator semantics). - /// - /// # Example - /// - /// ```rust - /// # #![feature(unboxed_closures)] - /// - /// # fn main() { - /// let v: Vec<&str> = "A.B.".split_terminator('.').collect(); - /// assert_eq!(v, vec!["A", "B"]); - /// - /// let v: Vec<&str> = "A..B..".split_terminator('.').collect(); - /// assert_eq!(v, vec!["A", "", "B", ""]); - /// - /// let v: Vec<&str> = "Mary had a little lamb".split(' ').rev().collect(); - /// assert_eq!(v, vec!["lamb", "little", "a", "had", "Mary"]); - /// - /// let v: Vec<&str> = "abc1def2ghi".split(|&: c: char| c.is_numeric()).rev().collect(); - /// assert_eq!(v, vec!["ghi", "def", "abc"]); - /// - /// let v: Vec<&str> = "lionXXtigerXleopard".split('X').rev().collect(); - /// assert_eq!(v, vec!["leopard", "tiger", "", "lion"]); - /// # } - /// ``` fn split_terminator<'a, Sep: CharEq>(&'a self, sep: Sep) -> CharSplits<'a, Sep>; - - /// An iterator over substrings of `self`, separated by characters - /// matched by `sep`, starting from the end of the string. - /// Restricted to splitting at most `count` times. - /// - /// # Example - /// - /// ```rust - /// # #![feature(unboxed_closures)] - /// - /// # fn main() { - /// let v: Vec<&str> = "Mary had a little lamb".rsplitn(2, ' ').collect(); - /// assert_eq!(v, vec!["lamb", "little", "Mary had a"]); - /// - /// let v: Vec<&str> = "abc1def2ghi".rsplitn(1, |&: c: char| c.is_numeric()).collect(); - /// assert_eq!(v, vec!["ghi", "abc1def"]); - /// - /// let v: Vec<&str> = "lionXXtigerXleopard".rsplitn(2, 'X').collect(); - /// assert_eq!(v, vec!["leopard", "tiger", "lionX"]); - /// # } - /// ``` fn rsplitn<'a, Sep: CharEq>(&'a self, count: uint, sep: Sep) -> CharSplitsN<'a, Sep>; - - /// An iterator over the start and end indices of the disjoint - /// matches of `sep` within `self`. - /// - /// That is, each returned value `(start, end)` satisfies - /// `self.slice(start, end) == sep`. For matches of `sep` within - /// `self` that overlap, only the indices corresponding to the - /// first match are returned. - /// - /// # Example - /// - /// ```rust - /// let v: Vec<(uint, uint)> = "abcXXXabcYYYabc".match_indices("abc").collect(); - /// assert_eq!(v, vec![(0,3), (6,9), (12,15)]); - /// - /// let v: Vec<(uint, uint)> = "1abcabc2".match_indices("abc").collect(); - /// assert_eq!(v, vec![(1,4), (4,7)]); - /// - /// let v: Vec<(uint, uint)> = "ababa".match_indices("aba").collect(); - /// assert_eq!(v, vec![(0, 3)]); // only the first `aba` - /// ``` fn match_indices<'a>(&'a self, sep: &'a str) -> MatchIndices<'a>; - - /// An iterator over the substrings of `self` separated by `sep`. - /// - /// # Example - /// - /// ```rust - /// let v: Vec<&str> = "abcXXXabcYYYabc".split_str("abc").collect(); - /// assert_eq!(v, vec!["", "XXX", "YYY", ""]); - /// - /// let v: Vec<&str> = "1abcabc2".split_str("abc").collect(); - /// assert_eq!(v, vec!["1", "", "2"]); - /// ``` fn split_str<'a>(&'a self, &'a str) -> StrSplits<'a>; - - /// An iterator over the lines of a string (subsequences separated - /// by `\n`). This does not include the empty string after a - /// trailing `\n`. - /// - /// # Example - /// - /// ```rust - /// let four_lines = "foo\nbar\n\nbaz\n"; - /// let v: Vec<&str> = four_lines.lines().collect(); - /// assert_eq!(v, vec!["foo", "bar", "", "baz"]); - /// ``` - fn lines<'a>(&'a self) -> CharSplits<'a, char>; - - /// An iterator over the lines of a string, separated by either - /// `\n` or `\r\n`. As with `.lines()`, this does not include an - /// empty trailing line. - /// - /// # Example - /// - /// ```rust - /// let four_lines = "foo\r\nbar\n\r\nbaz\n"; - /// let v: Vec<&str> = four_lines.lines_any().collect(); - /// assert_eq!(v, vec!["foo", "bar", "", "baz"]); - /// ``` - fn lines_any<'a>(&'a self) -> AnyLines<'a>; - - /// Returns the number of Unicode code points (`char`) that a - /// string holds. - /// - /// This does not perform any normalization, and is `O(n)`, since - /// UTF-8 is a variable width encoding of code points. - /// - /// *Warning*: The number of code points in a string does not directly - /// correspond to the number of visible characters or width of the - /// visible text due to composing characters, and double- and - /// zero-width ones. - /// - /// See also `.len()` for the byte length. - /// - /// # Example - /// - /// ```rust - /// // composed forms of `ö` and `é` - /// let c = "Löwe 老虎 Léopard"; // German, Simplified Chinese, French - /// // decomposed forms of `ö` and `é` - /// let d = "Lo\u{0308}we 老虎 Le\u{0301}opard"; - /// - /// assert_eq!(c.char_len(), 15); - /// assert_eq!(d.char_len(), 17); - /// - /// assert_eq!(c.len(), 21); - /// assert_eq!(d.len(), 23); - /// - /// // the two strings *look* the same - /// println!("{}", c); - /// println!("{}", d); - /// ``` + fn lines<'a>(&'a self) -> Lines<'a>; + fn lines_any<'a>(&'a self) -> LinesAny<'a>; fn char_len(&self) -> uint; - - /// Returns a slice of the given string from the byte range - /// [`begin`..`end`). - /// - /// This operation is `O(1)`. - /// - /// Panics when `begin` and `end` do not point to valid characters - /// or point beyond the last character of the string. - /// - /// See also `slice_to` and `slice_from` for slicing prefixes and - /// suffixes of strings, and `slice_chars` for slicing based on - /// code point counts. - /// - /// # Example - /// - /// ```rust - /// let s = "Löwe 老虎 Léopard"; - /// assert_eq!(s.slice(0, 1), "L"); - /// - /// assert_eq!(s.slice(1, 9), "öwe 老"); - /// - /// // these will panic: - /// // byte 2 lies within `ö`: - /// // s.slice(2, 3); - /// - /// // byte 8 lies within `老` - /// // s.slice(1, 8); - /// - /// // byte 100 is outside the string - /// // s.slice(3, 100); - /// ``` fn slice<'a>(&'a self, begin: uint, end: uint) -> &'a str; - - /// Returns a slice of the string from `begin` to its end. - /// - /// Equivalent to `self.slice(begin, self.len())`. - /// - /// Panics when `begin` does not point to a valid character, or is - /// out of bounds. - /// - /// See also `slice`, `slice_to` and `slice_chars`. fn slice_from<'a>(&'a self, begin: uint) -> &'a str; - - /// Returns a slice of the string from the beginning to byte - /// `end`. - /// - /// Equivalent to `self.slice(0, end)`. - /// - /// Panics when `end` does not point to a valid character, or is - /// out of bounds. - /// - /// See also `slice`, `slice_from` and `slice_chars`. fn slice_to<'a>(&'a self, end: uint) -> &'a str; - - /// Returns a slice of the string from the character range - /// [`begin`..`end`). - /// - /// That is, start at the `begin`-th code point of the string and - /// continue to the `end`-th code point. This does not detect or - /// handle edge cases such as leaving a combining character as the - /// first code point of the string. - /// - /// Due to the design of UTF-8, this operation is `O(end)`. - /// See `slice`, `slice_to` and `slice_from` for `O(1)` - /// variants that use byte indices rather than code point - /// indices. - /// - /// Panics if `begin` > `end` or the either `begin` or `end` are - /// beyond the last character of the string. - /// - /// # Example - /// - /// ```rust - /// let s = "Löwe 老虎 Léopard"; - /// assert_eq!(s.slice_chars(0, 4), "Löwe"); - /// assert_eq!(s.slice_chars(5, 7), "老虎"); - /// ``` fn slice_chars<'a>(&'a self, begin: uint, end: uint) -> &'a str; - - /// Takes a bytewise (not UTF-8) slice from a string. - /// - /// Returns the substring from [`begin`..`end`). - /// - /// Caller must check both UTF-8 character boundaries and the boundaries of - /// the entire slice as well. unsafe fn slice_unchecked<'a>(&'a self, begin: uint, end: uint) -> &'a str; - - /// Returns true if `needle` is a prefix of the string. - /// - /// # Example - /// - /// ```rust - /// assert!("banana".starts_with("ba")); - /// ``` fn starts_with(&self, needle: &str) -> bool; - - /// Returns true if `needle` is a suffix of the string. - /// - /// # Example - /// - /// ```rust - /// assert!("banana".ends_with("nana")); - /// ``` fn ends_with(&self, needle: &str) -> bool; - - /// Returns a string with characters that match `to_trim` removed from the left and the right. - /// - /// # Arguments - /// - /// * to_trim - a character matcher - /// - /// # Example - /// - /// ```rust - /// # #![feature(unboxed_closures)] - /// - /// # fn main() { - /// assert_eq!("11foo1bar11".trim_chars('1'), "foo1bar"); - /// let x: &[_] = &['1', '2']; - /// assert_eq!("12foo1bar12".trim_chars(x), "foo1bar"); - /// assert_eq!("123foo1bar123".trim_chars(|&: c: char| c.is_numeric()), "foo1bar"); - /// # } - /// ``` fn trim_chars<'a, C: CharEq>(&'a self, to_trim: C) -> &'a str; - - /// Returns a string with leading `chars_to_trim` removed. - /// - /// # Arguments - /// - /// * to_trim - a character matcher - /// - /// # Example - /// - /// ```rust - /// # #![feature(unboxed_closures)] - /// - /// # fn main() { - /// assert_eq!("11foo1bar11".trim_left_chars('1'), "foo1bar11"); - /// let x: &[_] = &['1', '2']; - /// assert_eq!("12foo1bar12".trim_left_chars(x), "foo1bar12"); - /// assert_eq!("123foo1bar123".trim_left_chars(|&: c: char| c.is_numeric()), "foo1bar123"); - /// # } - /// ``` fn trim_left_chars<'a, C: CharEq>(&'a self, to_trim: C) -> &'a str; - - /// Returns a string with trailing `chars_to_trim` removed. - /// - /// # Arguments - /// - /// * to_trim - a character matcher - /// - /// # Example - /// - /// ```rust - /// # #![feature(unboxed_closures)] - /// - /// # fn main() { - /// assert_eq!("11foo1bar11".trim_right_chars('1'), "11foo1bar"); - /// let x: &[_] = &['1', '2']; - /// assert_eq!("12foo1bar12".trim_right_chars(x), "12foo1bar"); - /// assert_eq!("123foo1bar123".trim_right_chars(|&: c: char| c.is_numeric()), "123foo1bar"); - /// # } - /// ``` fn trim_right_chars<'a, C: CharEq>(&'a self, to_trim: C) -> &'a str; - - /// Check that `index`-th byte lies at the start and/or end of a - /// UTF-8 code point sequence. - /// - /// The start and end of the string (when `index == self.len()`) - /// are considered to be boundaries. - /// - /// Panics if `index` is greater than `self.len()`. - /// - /// # Example - /// - /// ```rust - /// let s = "Löwe 老虎 Léopard"; - /// assert!(s.is_char_boundary(0)); - /// // start of `老` - /// assert!(s.is_char_boundary(6)); - /// assert!(s.is_char_boundary(s.len())); - /// - /// // second byte of `ö` - /// assert!(!s.is_char_boundary(2)); - /// - /// // third byte of `老` - /// assert!(!s.is_char_boundary(8)); - /// ``` fn is_char_boundary(&self, index: uint) -> bool; - - /// Pluck a character out of a string and return the index of the next - /// character. - /// - /// This function can be used to iterate over the Unicode characters of a - /// string. - /// - /// # Example - /// - /// This example manually iterates through the characters of a - /// string; this should normally be done by `.chars()` or - /// `.char_indices`. - /// - /// ```rust - /// use std::str::CharRange; - /// - /// let s = "中华Việt Nam"; - /// let mut i = 0u; - /// while i < s.len() { - /// let CharRange {ch, next} = s.char_range_at(i); - /// println!("{}: {}", i, ch); - /// i = next; - /// } - /// ``` - /// - /// This outputs: - /// - /// ```text - /// 0: 中 - /// 3: 华 - /// 6: V - /// 7: i - /// 8: ệ - /// 11: t - /// 12: - /// 13: N - /// 14: a - /// 15: m - /// ``` - /// - /// # Arguments - /// - /// * s - The string - /// * i - The byte offset of the char to extract - /// - /// # Return value - /// - /// A record {ch: char, next: uint} containing the char value and the byte - /// index of the next Unicode character. - /// - /// # Panics - /// - /// If `i` is greater than or equal to the length of the string. - /// If `i` is not the index of the beginning of a valid UTF-8 character. fn char_range_at(&self, start: uint) -> CharRange; - - /// Given a byte position and a str, return the previous char and its position. - /// - /// This function can be used to iterate over a Unicode string in reverse. - /// - /// Returns 0 for next index if called on start index 0. - /// - /// # Panics - /// - /// If `i` is greater than the length of the string. - /// If `i` is not an index following a valid UTF-8 character. fn char_range_at_reverse(&self, start: uint) -> CharRange; - - /// Plucks the character starting at the `i`th byte of a string. - /// - /// # Example - /// - /// ```rust - /// let s = "abπc"; - /// assert_eq!(s.char_at(1), 'b'); - /// assert_eq!(s.char_at(2), 'π'); - /// assert_eq!(s.char_at(4), 'c'); - /// ``` - /// - /// # Panics - /// - /// If `i` is greater than or equal to the length of the string. - /// If `i` is not the index of the beginning of a valid UTF-8 character. fn char_at(&self, i: uint) -> char; - - /// Plucks the character ending at the `i`th byte of a string. - /// - /// # Panics - /// - /// If `i` is greater than the length of the string. - /// If `i` is not an index following a valid UTF-8 character. fn char_at_reverse(&self, i: uint) -> char; - - /// Work with the byte buffer of a string as a byte slice. - /// - /// # Example - /// - /// ```rust - /// assert_eq!("bors".as_bytes(), b"bors"); - /// ``` fn as_bytes<'a>(&'a self) -> &'a [u8]; - - /// Returns the byte index of the first character of `self` that - /// matches `search`. - /// - /// # Return value - /// - /// `Some` containing the byte index of the last matching character - /// or `None` if there is no match - /// - /// # Example - /// - /// ```rust - /// # #![feature(unboxed_closures)] - /// - /// # fn main() { - /// let s = "Löwe 老虎 Léopard"; - /// - /// assert_eq!(s.find('L'), Some(0)); - /// assert_eq!(s.find('é'), Some(14)); - /// - /// // the first space - /// assert_eq!(s.find(|&: c: char| c.is_whitespace()), Some(5)); - /// - /// // neither are found - /// let x: &[_] = &['1', '2']; - /// assert_eq!(s.find(x), None); - /// # } - /// ``` fn find(&self, search: C) -> Option; - - /// Returns the byte index of the last character of `self` that - /// matches `search`. - /// - /// # Return value - /// - /// `Some` containing the byte index of the last matching character - /// or `None` if there is no match. - /// - /// # Example - /// - /// ```rust - /// # #![feature(unboxed_closures)] - /// - /// # fn main() { - /// let s = "Löwe 老虎 Léopard"; - /// - /// assert_eq!(s.rfind('L'), Some(13)); - /// assert_eq!(s.rfind('é'), Some(14)); - /// - /// // the second space - /// assert_eq!(s.rfind(|&: c: char| c.is_whitespace()), Some(12)); - /// - /// // searches for an occurrence of either `1` or `2`, but neither are found - /// let x: &[_] = &['1', '2']; - /// assert_eq!(s.rfind(x), None); - /// # } - /// ``` fn rfind(&self, search: C) -> Option; - - /// Returns the byte index of the first matching substring - /// - /// # Arguments - /// - /// * `needle` - The string to search for - /// - /// # Return value - /// - /// `Some` containing the byte index of the first matching substring - /// or `None` if there is no match. - /// - /// # Example - /// - /// ```rust - /// let s = "Löwe 老虎 Léopard"; - /// - /// assert_eq!(s.find_str("老虎 L"), Some(6)); - /// assert_eq!(s.find_str("muffin man"), None); - /// ``` fn find_str(&self, &str) -> Option; - - /// Retrieves the first character from a string slice and returns - /// it. This does not allocate a new string; instead, it returns a - /// slice that point one character beyond the character that was - /// shifted. If the string does not contain any characters, - /// None is returned instead. - /// - /// # Example - /// - /// ```rust - /// let s = "Löwe 老虎 Léopard"; - /// let (c, s1) = s.slice_shift_char().unwrap(); - /// assert_eq!(c, 'L'); - /// assert_eq!(s1, "öwe 老虎 Léopard"); - /// - /// let (c, s2) = s1.slice_shift_char().unwrap(); - /// assert_eq!(c, 'ö'); - /// assert_eq!(s2, "we 老虎 Léopard"); - /// ``` fn slice_shift_char<'a>(&'a self) -> Option<(char, &'a str)>; - - /// Returns the byte offset of an inner slice relative to an enclosing outer slice. - /// - /// Panics if `inner` is not a direct slice contained within self. - /// - /// # Example - /// - /// ```rust - /// let string = "a\nb\nc"; - /// let lines: Vec<&str> = string.lines().collect(); - /// - /// assert!(string.subslice_offset(lines[0]) == 0); // &"a" - /// assert!(string.subslice_offset(lines[1]) == 2); // &"b" - /// assert!(string.subslice_offset(lines[2]) == 4); // &"c" - /// ``` fn subslice_offset(&self, inner: &str) -> uint; - - /// Return an unsafe pointer to the strings buffer. - /// - /// The caller must ensure that the string outlives this pointer, - /// and that it is not reallocated (e.g. by pushing to the - /// string). fn as_ptr(&self) -> *const u8; - - /// Return an iterator of `u16` over the string encoded as UTF-16. - fn utf16_units<'a>(&'a self) -> Utf16CodeUnits<'a>; - - /// Return the number of bytes in this string - /// - /// # Example - /// - /// ``` - /// assert_eq!("foo".len(), 3); - /// assert_eq!("ƒoo".len(), 4); - /// ``` - #[experimental = "not triaged yet"] fn len(&self) -> uint; - - /// Returns true if this slice contains no bytes - /// - /// # Example - /// - /// ``` - /// assert!("".is_empty()); - /// ``` - #[inline] - #[experimental = "not triaged yet"] - fn is_empty(&self) -> bool { self.len() == 0 } + fn is_empty(&self) -> bool; } #[inline(never)] @@ -2001,7 +1211,7 @@ fn slice_error_fail(s: &str, begin: uint, end: uint) -> ! { begin, end, s); } -impl StrPrelude for str { +impl StrExt for str { #[inline] fn contains(&self, needle: &str) -> bool { self.find_str(needle).is_some() @@ -2021,12 +1231,12 @@ impl StrPrelude for str { fn bytes(&self) -> Bytes { fn deref(&x: &u8) -> u8 { x } - self.as_bytes().iter().map(BytesFn(deref)) + Bytes { inner: self.as_bytes().iter().map(BytesFn(deref)) } } #[inline] - fn char_indices(&self) -> CharOffsets { - CharOffsets{front_offset: 0, iter: self.chars()} + fn char_indices(&self) -> CharIndices { + CharIndices { front_offset: 0, iter: self.chars() } } #[inline] @@ -2089,18 +1299,19 @@ impl StrPrelude for str { } #[inline] - fn lines(&self) -> CharSplits { - self.split_terminator('\n') + fn lines(&self) -> Lines { + Lines { inner: self.split_terminator('\n') } } - fn lines_any(&self) -> AnyLines { + fn lines_any(&self) -> LinesAny { fn f(line: &str) -> &str { let l = line.len(); if l > 0 && line.as_bytes()[l - 1] == b'\r' { line.slice(0, l - 1) } else { line } } - self.lines().map(f) + let f: fn(&str) -> &str = f; // coerce to fn pointer + LinesAny { inner: self.lines().map(f) } } #[inline] @@ -2353,12 +1564,10 @@ impl StrPrelude for str { } #[inline] - fn utf16_units(&self) -> Utf16CodeUnits { - Utf16CodeUnits { encoder: Utf16Encoder::new(self.chars()) } - } + fn len(&self) -> uint { self.repr().len } #[inline] - fn len(&self) -> uint { self.repr().len } + fn is_empty(&self) -> bool { self.len() == 0 } } #[stable] @@ -2367,3 +1576,34 @@ impl<'a> Default for &'a str { fn default() -> &'a str { "" } } +impl<'a> Iterator<&'a str> for Lines<'a> { + #[inline] + fn next(&mut self) -> Option<&'a str> { self.inner.next() } + #[inline] + fn size_hint(&self) -> (uint, Option) { self.inner.size_hint() } +} +impl<'a> DoubleEndedIterator<&'a str> for Lines<'a> { + #[inline] + fn next_back(&mut self) -> Option<&'a str> { self.inner.next_back() } +} +impl<'a> Iterator<&'a str> for LinesAny<'a> { + #[inline] + fn next(&mut self) -> Option<&'a str> { self.inner.next() } + #[inline] + fn size_hint(&self) -> (uint, Option) { self.inner.size_hint() } +} +impl<'a> DoubleEndedIterator<&'a str> for LinesAny<'a> { + #[inline] + fn next_back(&mut self) -> Option<&'a str> { self.inner.next_back() } +} +impl<'a> Iterator for Bytes<'a> { + #[inline] + fn next(&mut self) -> Option { self.inner.next() } + #[inline] + fn size_hint(&self) -> (uint, Option) { self.inner.size_hint() } +} +impl<'a> DoubleEndedIterator for Bytes<'a> { + #[inline] + fn next_back(&mut self) -> Option { self.inner.next_back() } +} +impl<'a> ExactSizeIterator for Bytes<'a> {} diff --git a/src/libcore/tuple/mod.rs b/src/libcore/tuple.rs similarity index 99% rename from src/libcore/tuple/mod.rs rename to src/libcore/tuple.rs index 5ea84f7db9179..a92914c99e35b 100644 --- a/src/libcore/tuple/mod.rs +++ b/src/libcore/tuple.rs @@ -62,15 +62,14 @@ //! assert_eq!(d, (0u32, 0.0f32)); //! ``` -#![doc(primitive = "tuple")] #![stable] #[unstable = "this is just a documentation module and should not be part \ of the public api"] -pub use unit; use clone::Clone; use cmp::*; +use cmp::Ordering::*; use default::Default; use option::Option; use option::Option::Some; @@ -126,7 +125,7 @@ macro_rules! tuple_impls { )+ } - #[unstable = "waiting for Clone to stabilize"] + #[stable] impl<$($T:Clone),+> Clone for ($($T,)+) { fn clone(&self) -> ($($T,)+) { ($(e!(self.$idx.clone()),)+) @@ -328,4 +327,3 @@ tuple_impls! { (val11, ref11, mut11, 11) -> L } } - diff --git a/src/libcoretest/lib.rs b/src/libcoretest/lib.rs index 05d862d7bc7f4..44029ebb7fa0f 100644 --- a/src/libcoretest/lib.rs +++ b/src/libcoretest/lib.rs @@ -13,6 +13,7 @@ extern crate core; extern crate test; extern crate libc; +extern crate unicode; mod any; mod atomic; diff --git a/src/libcoretest/str.rs b/src/libcoretest/str.rs index 763fcccdbfdc8..63d6e14a4a6b8 100644 --- a/src/libcoretest/str.rs +++ b/src/libcoretest/str.rs @@ -117,7 +117,7 @@ fn test_rev_split_char_iterator_no_trailing() { #[test] fn test_utf16_code_units() { - use core::str::Utf16Encoder; + use unicode::str::Utf16Encoder; assert_eq!(Utf16Encoder::new(vec!['é', '\U0001F4A9'].into_iter()).collect::>(), vec![0xE9, 0xD83D, 0xDCA9]) } diff --git a/src/libfmt_macros/lib.rs b/src/libfmt_macros/lib.rs index 3099bf559e4bf..c284fb7c9e338 100644 --- a/src/libfmt_macros/lib.rs +++ b/src/libfmt_macros/lib.rs @@ -23,7 +23,8 @@ html_root_url = "http://doc.rust-lang.org/nightly/", html_playground_url = "http://play.rust-lang.org/")] -#![feature(macro_rules, globs, import_shadowing)] +#![feature(macro_rules, globs, slicing_syntax)] + pub use self::Piece::*; pub use self::Position::*; pub use self::Alignment::*; @@ -136,7 +137,7 @@ pub enum Count<'a> { /// necessary there's probably lots of room for improvement performance-wise. pub struct Parser<'a> { input: &'a str, - cur: str::CharOffsets<'a>, + cur: str::CharIndices<'a>, /// Error messages accumulated during parsing pub errors: Vec, } @@ -208,13 +209,11 @@ impl<'a> Parser<'a> { self.cur.next(); } Some((_, other)) => { - self.err(format!("expected `{}`, found `{}`", - c, - other).as_slice()); + self.err(format!("expected `{}`, found `{}`", c, other)[]); } None => { self.err(format!("expected `{}` but string was terminated", - c).as_slice()); + c)[]); } } } @@ -237,12 +236,12 @@ impl<'a> Parser<'a> { // we may not consume the character, so clone the iterator match self.cur.clone().next() { Some((pos, '}')) | Some((pos, '{')) => { - return self.input.slice(start, pos); + return self.input[start..pos]; } Some(..) => { self.cur.next(); } None => { self.cur.next(); - return self.input.slice(start, self.input.len()); + return self.input[start..self.input.len()]; } } } @@ -282,7 +281,7 @@ impl<'a> Parser<'a> { flags: 0, precision: CountImplied, width: CountImplied, - ty: self.input.slice(0, 0), + ty: self.input[0..0], }; if !self.consume(':') { return spec } @@ -391,7 +390,7 @@ impl<'a> Parser<'a> { self.cur.next(); pos } - Some(..) | None => { return self.input.slice(0, 0); } + Some(..) | None => { return self.input[0..0]; } }; let mut end; loop { @@ -403,7 +402,7 @@ impl<'a> Parser<'a> { None => { end = self.input.len(); break } } } - self.input.slice(start, end) + self.input[start..end] } /// Optionally parses an integer at the current position. This doesn't deal diff --git a/src/libgetopts/lib.rs b/src/libgetopts/lib.rs index b45d0c9b01ecd..0426f26937621 100644 --- a/src/libgetopts/lib.rs +++ b/src/libgetopts/lib.rs @@ -85,8 +85,7 @@ html_favicon_url = "http://www.rust-lang.org/favicon.ico", html_root_url = "http://doc.rust-lang.org/nightly/", html_playground_url = "http://play.rust-lang.org/")] -#![feature(globs, phase)] -#![feature(import_shadowing)] +#![feature(globs, phase, slicing_syntax)] #![feature(unboxed_closures)] #![deny(missing_docs)] @@ -102,9 +101,8 @@ use self::Whitespace::*; use self::LengthLimit::*; use std::fmt; -use std::result::Result::{Err, Ok}; +use std::iter::repeat; use std::result; -use std::string::String; /// Name of an option. Either a string or a single char. #[deriving(Clone, PartialEq, Eq)] @@ -283,7 +281,7 @@ impl OptGroup { impl Matches { fn opt_vals(&self, nm: &str) -> Vec { - match find_opt(self.opts.as_slice(), Name::from_str(nm)) { + match find_opt(self.opts[], Name::from_str(nm)) { Some(id) => self.vals[id].clone(), None => panic!("No option '{}' defined", nm) } @@ -311,8 +309,7 @@ impl Matches { /// Returns true if any of several options were matched. pub fn opts_present(&self, names: &[String]) -> bool { for nm in names.iter() { - match find_opt(self.opts.as_slice(), - Name::from_str(nm.as_slice())) { + match find_opt(self.opts.as_slice(), Name::from_str(nm[])) { Some(id) if !self.vals[id].is_empty() => return true, _ => (), }; @@ -323,7 +320,7 @@ impl Matches { /// Returns the string argument supplied to one of several matching options or `None`. pub fn opts_str(&self, names: &[String]) -> Option { for nm in names.iter() { - match self.opt_val(nm.as_slice()) { + match self.opt_val(nm[]) { Some(Val(ref s)) => return Some(s.clone()), _ => () } @@ -588,7 +585,7 @@ pub fn getopts(args: &[String], optgrps: &[OptGroup]) -> Result { while i < l { let cur = args[i].clone(); let curlen = cur.len(); - if !is_arg(cur.as_slice()) { + if !is_arg(cur[]) { free.push(cur); } else if cur == "--" { let mut j = i + 1; @@ -598,7 +595,7 @@ pub fn getopts(args: &[String], optgrps: &[OptGroup]) -> Result { let mut names; let mut i_arg = None; if cur.as_bytes()[1] == b'-' { - let tail = cur.slice(2, curlen); + let tail = cur[2..curlen]; let tail_eq: Vec<&str> = tail.split('=').collect(); if tail_eq.len() <= 1 { names = vec!(Long(tail.to_string())); @@ -634,7 +631,7 @@ pub fn getopts(args: &[String], optgrps: &[OptGroup]) -> Result { }; if arg_follows && range.next < curlen { - i_arg = Some(cur.slice(range.next, curlen).to_string()); + i_arg = Some(cur[range.next..curlen].to_string()); break; } @@ -661,7 +658,7 @@ pub fn getopts(args: &[String], optgrps: &[OptGroup]) -> Result { .push(Val((i_arg.clone()) .unwrap())); } else if name_pos < names.len() || i + 1 == l || - is_arg(args[i + 1].as_slice()) { + is_arg(args[i + 1][]) { vals[optid].push(Given); } else { i += 1; @@ -703,7 +700,7 @@ pub fn getopts(args: &[String], optgrps: &[OptGroup]) -> Result { /// Derive a usage message from a set of long options. pub fn usage(brief: &str, opts: &[OptGroup]) -> String { - let desc_sep = format!("\n{}", " ".repeat(24)); + let desc_sep = format!("\n{}", repeat(" ").take(24).collect::()); let rows = opts.iter().map(|optref| { let OptGroup{short_name, @@ -713,14 +710,14 @@ pub fn usage(brief: &str, opts: &[OptGroup]) -> String { hasarg, ..} = (*optref).clone(); - let mut row = " ".repeat(4); + let mut row = repeat(" ").take(4).collect::(); // short option match short_name.len() { 0 => {} 1 => { row.push('-'); - row.push_str(short_name.as_slice()); + row.push_str(short_name[]); row.push(' '); } _ => panic!("the short name should only be 1 ascii char long"), @@ -731,7 +728,7 @@ pub fn usage(brief: &str, opts: &[OptGroup]) -> String { 0 => {} _ => { row.push_str("--"); - row.push_str(long_name.as_slice()); + row.push_str(long_name[]); row.push(' '); } } @@ -739,23 +736,23 @@ pub fn usage(brief: &str, opts: &[OptGroup]) -> String { // arg match hasarg { No => {} - Yes => row.push_str(hint.as_slice()), + Yes => row.push_str(hint[]), Maybe => { row.push('['); - row.push_str(hint.as_slice()); + row.push_str(hint[]); row.push(']'); } } // FIXME: #5516 should be graphemes not codepoints // here we just need to indent the start of the description - let rowlen = row.char_len(); + let rowlen = row.chars().count(); if rowlen < 24 { for _ in range(0, 24 - rowlen) { row.push(' '); } } else { - row.push_str(desc_sep.as_slice()) + row.push_str(desc_sep[]); } // Normalize desc to contain words separated by one space character @@ -767,16 +764,14 @@ pub fn usage(brief: &str, opts: &[OptGroup]) -> String { // FIXME: #5516 should be graphemes not codepoints let mut desc_rows = Vec::new(); - each_split_within(desc_normalized_whitespace.as_slice(), - 54, - |substr| { + each_split_within(desc_normalized_whitespace[], 54, |substr| { desc_rows.push(substr.to_string()); true }); // FIXME: #5516 should be graphemes not codepoints // wrapped description - row.push_str(desc_rows.connect(desc_sep.as_slice()).as_slice()); + row.push_str(desc_rows.connect(desc_sep[])[]); row }); @@ -795,10 +790,10 @@ fn format_option(opt: &OptGroup) -> String { // Use short_name is possible, but fallback to long_name. if opt.short_name.len() > 0 { line.push('-'); - line.push_str(opt.short_name.as_slice()); + line.push_str(opt.short_name[]); } else { line.push_str("--"); - line.push_str(opt.long_name.as_slice()); + line.push_str(opt.long_name[]); } if opt.hasarg != No { @@ -806,7 +801,7 @@ fn format_option(opt: &OptGroup) -> String { if opt.hasarg == Maybe { line.push('['); } - line.push_str(opt.hint.as_slice()); + line.push_str(opt.hint[]); if opt.hasarg == Maybe { line.push(']'); } @@ -828,8 +823,7 @@ pub fn short_usage(program_name: &str, opts: &[OptGroup]) -> String { line.push_str(opts.iter() .map(format_option) .collect::>() - .connect(" ") - .as_slice()); + .connect(" ")[]); line } @@ -892,9 +886,9 @@ fn each_split_within(ss: &str, lim: uint, mut it: F) -> bool where (B, Cr, UnderLim) => { B } (B, Cr, OverLim) if (i - last_start + 1) > lim => panic!("word starting with {} longer than limit!", - ss.slice(last_start, i + 1)), + ss[last_start..i + 1]), (B, Cr, OverLim) => { - *cont = it(ss.slice(slice_start, last_end)); + *cont = it(ss[slice_start..last_end]); slice_start = last_start; B } @@ -904,7 +898,7 @@ fn each_split_within(ss: &str, lim: uint, mut it: F) -> bool where } (B, Ws, OverLim) => { last_end = i; - *cont = it(ss.slice(slice_start, last_end)); + *cont = it(ss[slice_start..last_end]); A } @@ -913,14 +907,14 @@ fn each_split_within(ss: &str, lim: uint, mut it: F) -> bool where B } (C, Cr, OverLim) => { - *cont = it(ss.slice(slice_start, last_end)); + *cont = it(ss[slice_start..last_end]); slice_start = i; last_start = i; last_end = i; B } (C, Ws, OverLim) => { - *cont = it(ss.slice(slice_start, last_end)); + *cont = it(ss[slice_start..last_end]); A } (C, Ws, UnderLim) => { diff --git a/src/libgraphviz/lib.rs b/src/libgraphviz/lib.rs index 34e19aa4a03e7..ce3df1090bd58 100644 --- a/src/libgraphviz/lib.rs +++ b/src/libgraphviz/lib.rs @@ -448,8 +448,8 @@ impl<'a> LabelText<'a> { /// Renders text as string suitable for a label in a .dot file. pub fn escape(&self) -> String { match self { - &LabelStr(ref s) => (&**s).escape_default(), - &EscStr(ref s) => LabelText::escape_str(s.as_slice()), + &LabelStr(ref s) => s.escape_default(), + &EscStr(ref s) => LabelText::escape_str(s[]), } } @@ -475,10 +475,10 @@ impl<'a> LabelText<'a> { /// Puts `suffix` on a line below this label, with a blank line separator. pub fn suffix_line(self, suffix: LabelText) -> LabelText<'static> { - let mut prefix = self.pre_escaped_content().into_string(); + let mut prefix = self.pre_escaped_content().into_owned(); let suffix = suffix.pre_escaped_content(); prefix.push_str(r"\n\n"); - prefix.push_str(suffix.as_slice()); + prefix.push_str(suffix[]); EscStr(prefix.into_cow()) } } @@ -671,7 +671,7 @@ mod tests { impl<'a> Labeller<'a, Node, &'a Edge> for LabelledGraph { fn graph_id(&'a self) -> Id<'a> { - Id::new(self.name.as_slice()).unwrap() + Id::new(self.name[]).unwrap() } fn node_id(&'a self, n: &Node) -> Id<'a> { id_name(n) @@ -735,7 +735,7 @@ mod tests { fn test_input(g: LabelledGraph) -> IoResult { let mut writer = Vec::new(); render(&g, &mut writer).unwrap(); - (&mut writer.as_slice()).read_to_string() + (&mut writer[]).read_to_string() } // All of the tests use raw-strings as the format for the expected outputs, @@ -847,7 +847,7 @@ r#"digraph hasse_diagram { edge(1, 3, ";"), edge(2, 3, ";" ))); render(&g, &mut writer).unwrap(); - let r = (&mut writer.as_slice()).read_to_string(); + let r = (&mut writer[]).read_to_string(); assert_eq!(r.unwrap(), r#"digraph syntax_tree { diff --git a/src/libgraphviz/maybe_owned_vec.rs b/src/libgraphviz/maybe_owned_vec.rs index be8761043c0c7..88483b6c93543 100644 --- a/src/libgraphviz/maybe_owned_vec.rs +++ b/src/libgraphviz/maybe_owned_vec.rs @@ -63,7 +63,7 @@ impl<'a,T> IntoMaybeOwnedVector<'a,T> for &'a [T] { } impl<'a,T> MaybeOwnedVector<'a,T> { - pub fn iter(&'a self) -> slice::Items<'a,T> { + pub fn iter(&'a self) -> slice::Iter<'a,T> { match self { &Growable(ref v) => v.as_slice().iter(), &Borrowed(ref v) => v.iter(), diff --git a/src/liblog/directive.rs b/src/liblog/directive.rs index d1db0ec89a16b..2b25a64affee3 100644 --- a/src/liblog/directive.rs +++ b/src/liblog/directive.rs @@ -23,7 +23,7 @@ pub static LOG_LEVEL_NAMES: [&'static str, ..4] = ["ERROR", "WARN", "INFO", /// Parse an individual log level that is either a number or a symbolic log level fn parse_log_level(level: &str) -> Option { - from_str::(level).or_else(|| { + level.parse::().or_else(|| { let pos = LOG_LEVEL_NAMES.iter().position(|&name| name.eq_ignore_ascii_case(level)); pos.map(|p| p as u32 + 1) }).map(|p| cmp::min(p, ::MAX_LOG_LEVEL)) diff --git a/src/liblog/lib.rs b/src/liblog/lib.rs index 2bf9af9027182..97301628a4512 100644 --- a/src/liblog/lib.rs +++ b/src/liblog/lib.rs @@ -164,7 +164,7 @@ html_favicon_url = "http://www.rust-lang.org/favicon.ico", html_root_url = "http://doc.rust-lang.org/nightly/", html_playground_url = "http://play.rust-lang.org/")] -#![feature(macro_rules, unboxed_closures)] +#![feature(macro_rules, unboxed_closures, slicing_syntax)] #![deny(missing_docs)] extern crate regex; @@ -280,7 +280,7 @@ pub fn log(level: u32, loc: &'static LogLocation, args: &fmt::Arguments) { // Test the literal string from args against the current filter, if there // is one. match unsafe { FILTER.as_ref() } { - Some(filter) if !filter.is_match(args.to_string().as_slice()) => return, + Some(filter) if !filter.is_match(args.to_string()[]) => return, _ => {} } @@ -370,12 +370,12 @@ pub fn mod_enabled(level: u32, module: &str) -> bool { fn enabled(level: u32, module: &str, - iter: slice::Items) + iter: slice::Iter) -> bool { // Search for the longest match, the vector is assumed to be pre-sorted. for directive in iter.rev() { match directive.name { - Some(ref name) if !module.starts_with(name.as_slice()) => {}, + Some(ref name) if !module.starts_with(name[]) => {}, Some(..) | None => { return level <= directive.level } @@ -390,7 +390,7 @@ fn enabled(level: u32, /// `Once` primitive (and this function is called from that primitive). fn init() { let (mut directives, filter) = match os::getenv("RUST_LOG") { - Some(spec) => directive::parse_logging_spec(spec.as_slice()), + Some(spec) => directive::parse_logging_spec(spec[]), None => (Vec::new(), None), }; diff --git a/src/libregex/parse.rs b/src/libregex/parse.rs index 78558a322665d..0cd8df73c37ce 100644 --- a/src/libregex/parse.rs +++ b/src/libregex/parse.rs @@ -286,7 +286,7 @@ impl<'a> Parser<'a> { true => Ok(()), false => { self.err(format!("Expected {} but got EOF.", - expected).as_slice()) + expected)[]) } } } @@ -295,10 +295,10 @@ impl<'a> Parser<'a> { match self.next_char() { true if self.cur() == expected => Ok(()), true => self.err(format!("Expected '{}' but got '{}'.", - expected, self.cur()).as_slice()), + expected, self.cur())[]), false => { self.err(format!("Expected '{}' but got EOF.", - expected).as_slice()) + expected)[]) } } } @@ -443,14 +443,14 @@ impl<'a> Parser<'a> { Literal(c3, _) => c2 = c3, // allow literal escapes below ast => return self.err(format!("Expected a literal, but got {}.", - ast).as_slice()), + ast)[]), } } if c2 < c { return self.err(format!("Invalid character class \ range '{}-{}'", c, - c2).as_slice()) + c2)[]) } ranges.push((c, self.cur())) } else { @@ -488,7 +488,7 @@ impl<'a> Parser<'a> { FLAG_EMPTY }; let name = self.slice(name_start, closer - 1); - match find_class(ASCII_CLASSES, name.as_slice()) { + match find_class(ASCII_CLASSES, name[]) { None => None, Some(ranges) => { self.chari = closer; @@ -513,7 +513,7 @@ impl<'a> Parser<'a> { return self.err(format!("No closing brace for counted \ repetition starting at position \ {}.", - start).as_slice()) + start)[]) } }; self.chari = closer; @@ -524,7 +524,7 @@ impl<'a> Parser<'a> { // Parse the min and max values from the regex. let (mut min, mut max): (uint, Option); if !inner.contains(",") { - min = try!(self.parse_uint(inner.as_slice())); + min = try!(self.parse_uint(inner[])); max = Some(min); } else { let pieces: Vec<&str> = inner.splitn(1, ',').collect(); @@ -546,19 +546,19 @@ impl<'a> Parser<'a> { if min > MAX_REPEAT { return self.err(format!( "{} exceeds maximum allowed repetitions ({})", - min, MAX_REPEAT).as_slice()); + min, MAX_REPEAT)[]); } if max.is_some() { let m = max.unwrap(); if m > MAX_REPEAT { return self.err(format!( "{} exceeds maximum allowed repetitions ({})", - m, MAX_REPEAT).as_slice()); + m, MAX_REPEAT)[]); } if m < min { return self.err(format!( "Max repetitions ({}) cannot be smaller than min \ - repetitions ({}).", m, min).as_slice()); + repetitions ({}).", m, min)[]); } } @@ -622,8 +622,7 @@ impl<'a> Parser<'a> { Ok(AstClass(ranges, flags)) } _ => { - self.err(format!("Invalid escape sequence '\\\\{}'", - c).as_slice()) + self.err(format!("Invalid escape sequence '\\\\{}'", c)[]) } } } @@ -643,7 +642,7 @@ impl<'a> Parser<'a> { Some(i) => i, None => return self.err(format!( "Missing '}}' for unclosed '{{' at position {}", - self.chari).as_slice()), + self.chari)[]), }; if closer - self.chari + 1 == 0 { return self.err("No Unicode class name found.") @@ -657,10 +656,10 @@ impl<'a> Parser<'a> { name = self.slice(self.chari + 1, self.chari + 2); self.chari += 1; } - match find_class(UNICODE_CLASSES, name.as_slice()) { + match find_class(UNICODE_CLASSES, name[]) { None => { return self.err(format!("Could not find Unicode class '{}'", - name).as_slice()) + name)[]) } Some(ranges) => { Ok(AstClass(ranges, negated | (self.flags & FLAG_NOCASE))) @@ -683,11 +682,11 @@ impl<'a> Parser<'a> { } } let s = self.slice(start, end); - match num::from_str_radix::(s.as_slice(), 8) { + match num::from_str_radix::(s[], 8) { Some(n) => Ok(Literal(try!(self.char_from_u32(n)), FLAG_EMPTY)), None => { self.err(format!("Could not parse '{}' as octal number.", - s).as_slice()) + s)[]) } } } @@ -705,12 +704,12 @@ impl<'a> Parser<'a> { None => { return self.err(format!("Missing '}}' for unclosed \ '{{' at position {}", - start).as_slice()) + start)[]) } Some(i) => i, }; self.chari = closer; - self.parse_hex_digits(self.slice(start, closer).as_slice()) + self.parse_hex_digits(self.slice(start, closer)[]) } // Parses a two-digit hex number. @@ -730,8 +729,7 @@ impl<'a> Parser<'a> { match num::from_str_radix::(s, 16) { Some(n) => Ok(Literal(try!(self.char_from_u32(n)), FLAG_EMPTY)), None => { - self.err(format!("Could not parse '{}' as hex number.", - s).as_slice()) + self.err(format!("Could not parse '{}' as hex number.", s)[]) } } } @@ -757,7 +755,7 @@ impl<'a> Parser<'a> { } if self.names.contains(&name) { return self.err(format!("Duplicate capture group name '{}'.", - name).as_slice()) + name)[]) } self.names.push(name.clone()); self.chari = closer; @@ -791,7 +789,7 @@ impl<'a> Parser<'a> { if sign < 0 { return self.err(format!( "Cannot negate flags twice in '{}'.", - self.slice(start, self.chari + 1)).as_slice()) + self.slice(start, self.chari + 1))[]) } sign = -1; saw_flag = false; @@ -802,7 +800,7 @@ impl<'a> Parser<'a> { if !saw_flag { return self.err(format!( "A valid flag does not follow negation in '{}'", - self.slice(start, self.chari + 1)).as_slice()) + self.slice(start, self.chari + 1))[]) } flags = flags ^ flags; } @@ -814,7 +812,7 @@ impl<'a> Parser<'a> { return Ok(()) } _ => return self.err(format!( - "Unrecognized flag '{}'.", self.cur()).as_slice()), + "Unrecognized flag '{}'.", self.cur())[]), } } } @@ -908,11 +906,11 @@ impl<'a> Parser<'a> { } fn parse_uint(&self, s: &str) -> Result { - match from_str::(s) { + match s.parse::() { Some(i) => Ok(i), None => { self.err(format!("Expected an unsigned integer but got '{}'.", - s).as_slice()) + s)[]) } } } @@ -922,8 +920,7 @@ impl<'a> Parser<'a> { Some(c) => Ok(c), None => { self.err(format!("Could not decode '{}' to unicode \ - character.", - n).as_slice()) + character.", n)[]) } } } diff --git a/src/libregex/re.rs b/src/libregex/re.rs index 151587e423abb..4383192edafb0 100644 --- a/src/libregex/re.rs +++ b/src/libregex/re.rs @@ -417,7 +417,7 @@ impl Regex { /// # extern crate regex; #[phase(plugin)] extern crate regex_macros; /// # fn main() { /// let re = regex!("[^01]+"); - /// assert_eq!(re.replace("1078910", "").as_slice(), "1010"); + /// assert_eq!(re.replace("1078910", ""), "1010"); /// # } /// ``` /// @@ -435,7 +435,7 @@ impl Regex { /// let result = re.replace("Springsteen, Bruce", |&: caps: &Captures| { /// format!("{} {}", caps.at(2).unwrap_or(""), caps.at(1).unwrap_or("")) /// }); - /// assert_eq!(result.as_slice(), "Bruce Springsteen"); + /// assert_eq!(result, "Bruce Springsteen"); /// # } /// ``` /// @@ -450,7 +450,7 @@ impl Regex { /// # fn main() { /// let re = regex!(r"(?P[^,\s]+),\s+(?P\S+)"); /// let result = re.replace("Springsteen, Bruce", "$first $last"); - /// assert_eq!(result.as_slice(), "Bruce Springsteen"); + /// assert_eq!(result, "Bruce Springsteen"); /// # } /// ``` /// @@ -469,7 +469,7 @@ impl Regex { /// /// let re = regex!(r"(?P[^,\s]+),\s+(\S+)"); /// let result = re.replace("Springsteen, Bruce", NoExpand("$2 $last")); - /// assert_eq!(result.as_slice(), "$2 $last"); + /// assert_eq!(result, "$2 $last"); /// # } /// ``` pub fn replace(&self, text: &str, rep: R) -> String { @@ -505,19 +505,19 @@ impl Regex { } let (s, e) = cap.pos(0).unwrap(); // captures only reports matches - new.push_str(text.slice(last_match, s)); - new.push_str(rep.reg_replace(&cap).as_slice()); + new.push_str(text[last_match..s]); + new.push_str(rep.reg_replace(&cap)[]); last_match = e; } - new.push_str(text.slice(last_match, text.len())); + new.push_str(text[last_match..text.len()]); return new; } /// Returns the original string of this regex. pub fn as_str<'a>(&'a self) -> &'a str { match *self { - Dynamic(ExDynamic { ref original, .. }) => original.as_slice(), - Native(ExNative { ref original, .. }) => original.as_slice(), + Dynamic(ExDynamic { ref original, .. }) => original[], + Native(ExNative { ref original, .. }) => original[], } } @@ -540,8 +540,8 @@ impl Regex { } pub enum NamesIter<'a> { - NamesIterNative(::std::slice::Items<'a, Option<&'static str>>), - NamesIterDynamic(::std::slice::Items<'a, Option>) + NamesIterNative(::std::slice::Iter<'a, Option<&'static str>>), + NamesIterDynamic(::std::slice::Iter<'a, Option>) } impl<'a> Iterator> for NamesIter<'a> { @@ -608,13 +608,13 @@ impl<'r, 't> Iterator<&'t str> for RegexSplits<'r, 't> { if self.last >= text.len() { None } else { - let s = text.slice(self.last, text.len()); + let s = text[self.last..text.len()]; self.last = text.len(); Some(s) } } Some((s, e)) => { - let matched = text.slice(self.last, s); + let matched = text[self.last..s]; self.last = e; Some(matched) } @@ -642,7 +642,7 @@ impl<'r, 't> Iterator<&'t str> for RegexSplitsN<'r, 't> { } else { self.cur += 1; if self.cur >= self.limit { - Some(text.slice(self.splits.last, text.len())) + Some(text[self.splits.last..text.len()]) } else { self.splits.next() } @@ -769,13 +769,13 @@ impl<'t> Captures<'t> { let pre = refs.at(1).unwrap_or(""); let name = refs.at(2).unwrap_or(""); format!("{}{}", pre, - match from_str::(name.as_slice()) { + match name.parse::() { None => self.name(name).unwrap_or("").to_string(), Some(i) => self.at(i).unwrap_or("").to_string(), }) }); let re = Regex::new(r"\$\$").unwrap(); - re.replace_all(text.as_slice(), NoExpand("$")) + re.replace_all(text[], NoExpand("$")) } /// Returns the number of captured groups. diff --git a/src/librustc/lib.rs b/src/librustc/lib.rs index 90e9973c3f302..4647c92e3d1e8 100644 --- a/src/librustc/lib.rs +++ b/src/librustc/lib.rs @@ -22,7 +22,7 @@ html_favicon_url = "http://www.rust-lang.org/favicon.ico", html_root_url = "http://doc.rust-lang.org/nightly/")] -#![feature(default_type_params, globs, import_shadowing, macro_rules, phase, quote)] +#![feature(default_type_params, globs, macro_rules, phase, quote)] #![feature(slicing_syntax, unsafe_destructor)] #![feature(rustc_diagnostic_macros)] #![feature(unboxed_closures)] @@ -40,6 +40,8 @@ extern crate collections; #[phase(plugin, link)] extern crate log; #[phase(plugin, link)] extern crate syntax; +extern crate "serialize" as rustc_serialize; // used by deriving + #[cfg(test)] extern crate test; @@ -90,7 +92,6 @@ pub mod middle { pub mod reachable; pub mod region; pub mod recursion_limit; - pub mod resolve; pub mod resolve_lifetime; pub mod stability; pub mod subst; @@ -116,6 +117,7 @@ pub mod util { pub mod ppaux; pub mod nodemap; pub mod snapshot_vec; + pub mod lev_distance; } pub mod lib { diff --git a/src/librustc/lint/builtin.rs b/src/librustc/lint/builtin.rs index 88b12aa5660c9..0fd69ea25bc0d 100644 --- a/src/librustc/lint/builtin.rs +++ b/src/librustc/lint/builtin.rs @@ -37,7 +37,7 @@ use util::nodemap::{FnvHashMap, NodeSet}; use lint::{Context, LintPass, LintArray}; use std::{cmp, slice}; -use std::collections::hash_map::{Occupied, Vacant}; +use std::collections::hash_map::Entry::{Occupied, Vacant}; use std::num::SignedInt; use std::{i8, i16, i32, i64, u8, u16, u32, u64, f32, f64}; use syntax::{abi, ast, ast_map}; @@ -250,10 +250,12 @@ impl LintPass for TypeLimits { let (min, max) = float_ty_range(t); let lit_val: f64 = match lit.node { ast::LitFloat(ref v, _) | - ast::LitFloatUnsuffixed(ref v) => match from_str(v.get()) { - Some(f) => f, - None => return - }, + ast::LitFloatUnsuffixed(ref v) => { + match v.parse() { + Some(f) => f, + None => return + } + } _ => panic!() }; if lit_val < min || lit_val > max { @@ -507,7 +509,7 @@ impl BoxPointers { if n_uniq > 0 { let s = ty_to_string(cx.tcx, ty); let m = format!("type uses owned (Box type) pointers: {}", s); - cx.span_lint(BOX_POINTERS, span, m.as_slice()); + cx.span_lint(BOX_POINTERS, span, m[]); } } } @@ -587,7 +589,7 @@ impl LintPass for RawPointerDeriving { } fn check_item(&mut self, cx: &Context, item: &ast::Item) { - if !attr::contains_name(item.attrs.as_slice(), "automatically_derived") { + if !attr::contains_name(item.attrs[], "automatically_derived") { return } let did = match item.node { @@ -766,11 +768,11 @@ impl LintPass for UnusedResults { ty::ty_enum(did, _) => { if ast_util::is_local(did) { if let ast_map::NodeItem(it) = cx.tcx.map.get(did.node) { - warned |= check_must_use(cx, it.attrs.as_slice(), s.span); + warned |= check_must_use(cx, it.attrs[], s.span); } } else { csearch::get_item_attrs(&cx.sess().cstore, did, |attrs| { - warned |= check_must_use(cx, attrs.as_slice(), s.span); + warned |= check_must_use(cx, attrs[], s.span); }); } } @@ -792,7 +794,7 @@ impl LintPass for UnusedResults { msg.push_str(s.get()); } } - cx.span_lint(UNUSED_MUST_USE, sp, msg.as_slice()); + cx.span_lint(UNUSED_MUST_USE, sp, msg[]); return true; } } @@ -838,7 +840,7 @@ impl NonCamelCaseTypes { } else { format!("{} `{}` should have a camel case name such as `{}`", sort, s, c) }; - cx.span_lint(NON_CAMEL_CASE_TYPES, span, m.as_slice()); + cx.span_lint(NON_CAMEL_CASE_TYPES, span, m[]); } } } @@ -978,7 +980,7 @@ impl NonSnakeCase { if !is_snake_case(ident) { cx.span_lint(NON_SNAKE_CASE, span, format!("{} `{}` should have a snake case name such as `{}`", - sort, s, to_snake_case(s.get())).as_slice()); + sort, s, to_snake_case(s.get()))[]); } } } @@ -1065,7 +1067,7 @@ impl LintPass for NonUpperCaseGlobals { format!("static constant `{}` should have an uppercase name \ such as `{}`", s.get(), s.get().chars().map(|c| c.to_uppercase()) - .collect::().as_slice()).as_slice()); + .collect::()[])[]); } } _ => {} @@ -1082,7 +1084,7 @@ impl LintPass for NonUpperCaseGlobals { format!("static constant in pattern `{}` should have an uppercase \ name such as `{}`", s.get(), s.get().chars().map(|c| c.to_uppercase()) - .collect::().as_slice()).as_slice()); + .collect::()[])[]); } } _ => {} @@ -1107,7 +1109,7 @@ impl UnusedParens { if !necessary { cx.span_lint(UNUSED_PARENS, value.span, format!("unnecessary parentheses around {}", - msg).as_slice()) + msg)[]) } } @@ -1157,9 +1159,9 @@ impl LintPass for UnusedParens { ast::ExprIf(ref cond, _, _) => (cond, "`if` condition", true), ast::ExprWhile(ref cond, _, _) => (cond, "`while` condition", true), ast::ExprMatch(ref head, _, source) => match source { - ast::MatchNormal => (head, "`match` head expression", true), - ast::MatchIfLetDesugar => (head, "`if let` head expression", true), - ast::MatchWhileLetDesugar => (head, "`while let` head expression", true), + ast::MatchSource::Normal => (head, "`match` head expression", true), + ast::MatchSource::IfLetDesugar { .. } => (head, "`if let` head expression", true), + ast::MatchSource::WhileLetDesugar => (head, "`while let` head expression", true), }, ast::ExprRet(Some(ref value)) => (value, "`return` value", false), ast::ExprAssign(_, ref value) => (value, "assigned value", false), @@ -1209,7 +1211,7 @@ impl LintPass for UnusedImportBraces { let m = format!("braces around {} is unnecessary", token::get_ident(*name).get()); cx.span_lint(UNUSED_IMPORT_BRACES, view_item.span, - m.as_slice()); + m[]); }, _ => () } @@ -1248,7 +1250,7 @@ impl LintPass for NonShorthandFieldPatterns { if ident.node.as_str() == fieldpat.node.ident.as_str() { cx.span_lint(NON_SHORTHAND_FIELD_PATTERNS, fieldpat.span, format!("the `{}:` in this pattern is redundant and can \ - be removed", ident.node.as_str()).as_slice()) + be removed", ident.node.as_str())[]) } } } @@ -1352,7 +1354,7 @@ impl LintPass for UnusedMut { fn check_expr(&mut self, cx: &Context, e: &ast::Expr) { if let ast::ExprMatch(_, ref arms, _) = e.node { for a in arms.iter() { - self.check_unused_mut_pat(cx, a.pats.as_slice()) + self.check_unused_mut_pat(cx, a.pats[]) } } } @@ -1473,7 +1475,7 @@ impl MissingDoc { }); if !has_doc { cx.span_lint(MISSING_DOCS, sp, - format!("missing documentation for {}", desc).as_slice()); + format!("missing documentation for {}", desc)[]); } } } @@ -1487,7 +1489,7 @@ impl LintPass for MissingDoc { let doc_hidden = self.doc_hidden() || attrs.iter().any(|attr| { attr.check_name("doc") && match attr.meta_item_list() { None => false, - Some(l) => attr::contains_name(l.as_slice(), "hidden"), + Some(l) => attr::contains_name(l[], "hidden"), } }); self.doc_hidden_stack.push(doc_hidden); @@ -1509,7 +1511,7 @@ impl LintPass for MissingDoc { } fn check_crate(&mut self, cx: &Context, krate: &ast::Crate) { - self.check_missing_docs_attrs(cx, None, krate.attrs.as_slice(), + self.check_missing_docs_attrs(cx, None, krate.attrs[], krate.span, "crate"); } @@ -1523,7 +1525,7 @@ impl LintPass for MissingDoc { ast::ItemTy(..) => "a type alias", _ => return }; - self.check_missing_docs_attrs(cx, Some(it.id), it.attrs.as_slice(), + self.check_missing_docs_attrs(cx, Some(it.id), it.attrs[], it.span, desc); } @@ -1536,13 +1538,13 @@ impl LintPass for MissingDoc { // Otherwise, doc according to privacy. This will also check // doc for default methods defined on traits. - self.check_missing_docs_attrs(cx, Some(m.id), m.attrs.as_slice(), + self.check_missing_docs_attrs(cx, Some(m.id), m.attrs[], m.span, "a method"); } } fn check_ty_method(&mut self, cx: &Context, tm: &ast::TypeMethod) { - self.check_missing_docs_attrs(cx, Some(tm.id), tm.attrs.as_slice(), + self.check_missing_docs_attrs(cx, Some(tm.id), tm.attrs[], tm.span, "a type method"); } @@ -1552,14 +1554,14 @@ impl LintPass for MissingDoc { let cur_struct_def = *self.struct_def_stack.last() .expect("empty struct_def_stack"); self.check_missing_docs_attrs(cx, Some(cur_struct_def), - sf.node.attrs.as_slice(), sf.span, + sf.node.attrs[], sf.span, "a struct field") } } } fn check_variant(&mut self, cx: &Context, v: &ast::Variant, _: &ast::Generics) { - self.check_missing_docs_attrs(cx, Some(v.node.id), v.node.attrs.as_slice(), + self.check_missing_docs_attrs(cx, Some(v.node.id), v.node.attrs[], v.span, "a variant"); assert!(!self.in_variant); self.in_variant = true; @@ -1675,7 +1677,7 @@ impl Stability { _ => format!("use of {} item", label) }; - cx.span_lint(lint, span, msg.as_slice()); + cx.span_lint(lint, span, msg[]); } fn is_internal(&self, cx: &Context, span: Span) -> bool { diff --git a/src/librustc/lint/context.rs b/src/librustc/lint/context.rs index d8d9d653e62f7..ffae485364a86 100644 --- a/src/librustc/lint/context.rs +++ b/src/librustc/lint/context.rs @@ -104,7 +104,7 @@ impl LintStore { } pub fn get_lints<'t>(&'t self) -> &'t [(&'static Lint, bool)] { - self.lints.as_slice() + self.lints[] } pub fn get_lint_groups<'t>(&'t self) -> Vec<(&'static str, Vec, bool)> { @@ -124,11 +124,11 @@ impl LintStore { match (sess, from_plugin) { // We load builtin lints first, so a duplicate is a compiler bug. // Use early_error when handling -W help with no crate. - (None, _) => early_error(msg.as_slice()), - (Some(sess), false) => sess.bug(msg.as_slice()), + (None, _) => early_error(msg[]), + (Some(sess), false) => sess.bug(msg[]), // A duplicate name from a plugin is a user error. - (Some(sess), true) => sess.err(msg.as_slice()), + (Some(sess), true) => sess.err(msg[]), } } @@ -149,11 +149,11 @@ impl LintStore { match (sess, from_plugin) { // We load builtin lints first, so a duplicate is a compiler bug. // Use early_error when handling -W help with no crate. - (None, _) => early_error(msg.as_slice()), - (Some(sess), false) => sess.bug(msg.as_slice()), + (None, _) => early_error(msg[]), + (Some(sess), false) => sess.bug(msg[]), // A duplicate name from a plugin is a user error. - (Some(sess), true) => sess.err(msg.as_slice()), + (Some(sess), true) => sess.err(msg[]), } } } @@ -260,8 +260,8 @@ impl LintStore { let warning = format!("lint {} has been renamed to {}", lint_name, new_name); match span { - Some(span) => sess.span_warn(span, warning.as_slice()), - None => sess.warn(warning.as_slice()), + Some(span) => sess.span_warn(span, warning[]), + None => sess.warn(warning[]), }; Some(lint_id) } @@ -271,13 +271,13 @@ impl LintStore { pub fn process_command_line(&mut self, sess: &Session) { for &(ref lint_name, level) in sess.opts.lint_opts.iter() { - match self.find_lint(lint_name.as_slice(), sess, None) { + match self.find_lint(lint_name[], sess, None) { Some(lint_id) => self.set_level(lint_id, (level, CommandLine)), None => { match self.lint_groups.iter().map(|(&x, pair)| (x, pair.0.clone())) .collect::>>() - .get(lint_name.as_slice()) { + .get(lint_name[]) { Some(v) => { v.iter() .map(|lint_id: &LintId| @@ -285,7 +285,7 @@ impl LintStore { .collect::>(); } None => sess.err(format!("unknown {} flag: {}", - level.as_str(), lint_name).as_slice()), + level.as_str(), lint_name)[]), } } } @@ -396,10 +396,10 @@ pub fn raw_emit_lint(sess: &Session, lint: &'static Lint, if level == Forbid { level = Deny; } match (level, span) { - (Warn, Some(sp)) => sess.span_warn(sp, msg.as_slice()), - (Warn, None) => sess.warn(msg.as_slice()), - (Deny, Some(sp)) => sess.span_err(sp, msg.as_slice()), - (Deny, None) => sess.err(msg.as_slice()), + (Warn, Some(sp)) => sess.span_warn(sp, msg[]), + (Warn, None) => sess.warn(msg[]), + (Deny, Some(sp)) => sess.span_err(sp, msg[]), + (Deny, None) => sess.err(msg[]), _ => sess.bug("impossible level in raw_emit_lint"), } @@ -492,7 +492,7 @@ impl<'a, 'tcx> Context<'a, 'tcx> { None => { self.span_lint(builtin::UNKNOWN_LINTS, span, format!("unknown `{}` attribute: `{}`", - level.as_str(), lint_name).as_slice()); + level.as_str(), lint_name)[]); continue; } } @@ -508,7 +508,7 @@ impl<'a, 'tcx> Context<'a, 'tcx> { self.tcx.sess.span_err(span, format!("{}({}) overruled by outer forbid({})", level.as_str(), lint_name, - lint_name).as_slice()); + lint_name)[]); } else if now != level { let src = self.lints.get_level_source(lint_id).1; self.level_stack.push((lint_id, (now, src))); @@ -543,7 +543,7 @@ impl<'a, 'tcx> Context<'a, 'tcx> { impl<'a, 'tcx, 'v> Visitor<'v> for Context<'a, 'tcx> { fn visit_item(&mut self, it: &ast::Item) { - self.with_lint_attrs(it.attrs.as_slice(), |cx| { + self.with_lint_attrs(it.attrs[], |cx| { run_lints!(cx, check_item, it); cx.visit_ids(|v| v.visit_item(it)); visit::walk_item(cx, it); @@ -551,14 +551,14 @@ impl<'a, 'tcx, 'v> Visitor<'v> for Context<'a, 'tcx> { } fn visit_foreign_item(&mut self, it: &ast::ForeignItem) { - self.with_lint_attrs(it.attrs.as_slice(), |cx| { + self.with_lint_attrs(it.attrs[], |cx| { run_lints!(cx, check_foreign_item, it); visit::walk_foreign_item(cx, it); }) } fn visit_view_item(&mut self, i: &ast::ViewItem) { - self.with_lint_attrs(i.attrs.as_slice(), |cx| { + self.with_lint_attrs(i.attrs[], |cx| { run_lints!(cx, check_view_item, i); cx.visit_ids(|v| v.visit_view_item(i)); visit::walk_view_item(cx, i); @@ -584,7 +584,7 @@ impl<'a, 'tcx, 'v> Visitor<'v> for Context<'a, 'tcx> { body: &'v ast::Block, span: Span, id: ast::NodeId) { match fk { visit::FkMethod(_, _, m) => { - self.with_lint_attrs(m.attrs.as_slice(), |cx| { + self.with_lint_attrs(m.attrs[], |cx| { run_lints!(cx, check_fn, fk, decl, body, span, id); cx.visit_ids(|v| { v.visit_fn(fk, decl, body, span, id); @@ -600,7 +600,7 @@ impl<'a, 'tcx, 'v> Visitor<'v> for Context<'a, 'tcx> { } fn visit_ty_method(&mut self, t: &ast::TypeMethod) { - self.with_lint_attrs(t.attrs.as_slice(), |cx| { + self.with_lint_attrs(t.attrs[], |cx| { run_lints!(cx, check_ty_method, t); visit::walk_ty_method(cx, t); }) @@ -617,14 +617,14 @@ impl<'a, 'tcx, 'v> Visitor<'v> for Context<'a, 'tcx> { } fn visit_struct_field(&mut self, s: &ast::StructField) { - self.with_lint_attrs(s.node.attrs.as_slice(), |cx| { + self.with_lint_attrs(s.node.attrs[], |cx| { run_lints!(cx, check_struct_field, s); visit::walk_struct_field(cx, s); }) } fn visit_variant(&mut self, v: &ast::Variant, g: &ast::Generics) { - self.with_lint_attrs(v.node.attrs.as_slice(), |cx| { + self.with_lint_attrs(v.node.attrs[], |cx| { run_lints!(cx, check_variant, v, g); visit::walk_variant(cx, v, g); run_lints!(cx, check_variant_post, v, g); @@ -718,7 +718,7 @@ impl<'a, 'tcx> IdVisitingOperation for Context<'a, 'tcx> { None => {} Some(lints) => { for (lint_id, span, msg) in lints.into_iter() { - self.span_lint(lint_id.lint, span, msg.as_slice()) + self.span_lint(lint_id.lint, span, msg[]) } } } @@ -764,7 +764,7 @@ pub fn check_crate(tcx: &ty::ctxt, let mut cx = Context::new(tcx, krate, exported_items); // Visit the whole crate. - cx.with_lint_attrs(krate.attrs.as_slice(), |cx| { + cx.with_lint_attrs(krate.attrs[], |cx| { cx.visit_id(ast::CRATE_NODE_ID); cx.visit_ids(|v| { v.visited_outermost = true; @@ -784,7 +784,7 @@ pub fn check_crate(tcx: &ty::ctxt, for &(lint, span, ref msg) in v.iter() { tcx.sess.span_bug(span, format!("unprocessed lint {} at {}: {}", - lint.as_str(), tcx.map.node_to_string(*id), *msg).as_slice()) + lint.as_str(), tcx.map.node_to_string(*id), *msg)[]) } } diff --git a/src/librustc/metadata/creader.rs b/src/librustc/metadata/creader.rs index 9e87153e64a15..98b57511957f6 100644 --- a/src/librustc/metadata/creader.rs +++ b/src/librustc/metadata/creader.rs @@ -23,7 +23,7 @@ use plugin::load::PluginMetadata; use util::nodemap::FnvHashMap; use std::rc::Rc; -use std::collections::hash_map::{Occupied, Vacant}; +use std::collections::hash_map::Entry::{Occupied, Vacant}; use syntax::ast; use syntax::abi; use syntax::attr; @@ -95,11 +95,11 @@ fn warn_if_multiple_versions(diag: &SpanHandler, cstore: &CStore) { for (name, dupes) in map.into_iter() { if dupes.len() == 1 { continue } diag.handler().warn( - format!("using multiple versions of crate `{}`", name).as_slice()); + format!("using multiple versions of crate `{}`", name)[]); for dupe in dupes.into_iter() { let data = cstore.get_crate_data(dupe); diag.span_note(data.span, "used here"); - loader::note_crate_name(diag, data.name().as_slice()); + loader::note_crate_name(diag, data.name()[]); } } } @@ -117,7 +117,7 @@ fn should_link(i: &ast::ViewItem) -> bool { i.attrs.iter().all(|attr| { attr.name().get() != "phase" || attr.meta_item_list().map_or(false, |phases| { - attr::contains_name(phases.as_slice(), "link") + attr::contains_name(phases[], "link") }) }) } @@ -131,8 +131,8 @@ fn visit_view_item(e: &mut Env, i: &ast::ViewItem) { Some(info) => { let (cnum, _, _) = resolve_crate(e, &None, - info.ident.as_slice(), - info.name.as_slice(), + info.ident[], + info.name[], None, i.span); e.sess.cstore.add_extern_mod_stmt_cnum(info.id, cnum); @@ -157,7 +157,7 @@ fn extract_crate_info(e: &Env, i: &ast::ViewItem) -> Option { let name = match *path_opt { Some((ref path_str, _)) => { let name = path_str.get().to_string(); - validate_crate_name(Some(e.sess), name.as_slice(), + validate_crate_name(Some(e.sess), name[], Some(i.span)); name } @@ -188,7 +188,7 @@ pub fn validate_crate_name(sess: Option<&Session>, s: &str, sp: Option) { for c in s.chars() { if c.is_alphanumeric() { continue } if c == '_' || c == '-' { continue } - err(format!("invalid character `{}` in crate name: `{}`", c, s).as_slice()); + err(format!("invalid character `{}` in crate name: `{}`", c, s)[]); } match sess { Some(sess) => sess.abort_if_errors(), @@ -246,7 +246,7 @@ fn visit_item(e: &Env, i: &ast::Item) { } else { e.sess.span_err(m.span, format!("unknown kind: `{}`", - k).as_slice()); + k)[]); cstore::NativeUnknown } } @@ -327,7 +327,7 @@ fn existing_match(e: &Env, name: &str, match e.sess.opts.externs.get(name) { Some(locs) => { let found = locs.iter().any(|l| { - let l = fs::realpath(&Path::new(l.as_slice())).ok(); + let l = fs::realpath(&Path::new(l[])).ok(); l == source.dylib || l == source.rlib }); if found { @@ -405,7 +405,7 @@ fn resolve_crate<'a>(e: &mut Env, crate_name: name, hash: hash.map(|a| &*a), filesearch: e.sess.target_filesearch(), - triple: e.sess.opts.target_triple.as_slice(), + triple: e.sess.opts.target_triple[], root: root, rejected_via_hash: vec!(), rejected_via_triple: vec!(), @@ -431,8 +431,8 @@ fn resolve_crate_deps(e: &mut Env, decoder::get_crate_deps(cdata).iter().map(|dep| { debug!("resolving dep crate {} hash: `{}`", dep.name, dep.hash); let (local_cnum, _, _) = resolve_crate(e, root, - dep.name.as_slice(), - dep.name.as_slice(), + dep.name[], + dep.name[], Some(&dep.hash), span); (dep.cnum, local_cnum) @@ -455,14 +455,14 @@ impl<'a> PluginMetadataReader<'a> { pub fn read_plugin_metadata(&mut self, krate: &ast::ViewItem) -> PluginMetadata { let info = extract_crate_info(&self.env, krate).unwrap(); - let target_triple = self.env.sess.opts.target_triple.as_slice(); + let target_triple = self.env.sess.opts.target_triple[]; let is_cross = target_triple != config::host_triple(); let mut should_link = info.should_link && !is_cross; let mut load_ctxt = loader::Context { sess: self.env.sess, span: krate.span, - ident: info.ident.as_slice(), - crate_name: info.name.as_slice(), + ident: info.ident[], + crate_name: info.name[], hash: None, filesearch: self.env.sess.host_filesearch(), triple: config::host_triple(), @@ -483,7 +483,7 @@ impl<'a> PluginMetadataReader<'a> { let message = format!("crate `{}` contains a plugin_registrar fn but \ only a version for triple `{}` could be found (need {})", info.ident, target_triple, config::host_triple()); - self.env.sess.span_err(krate.span, message.as_slice()); + self.env.sess.span_err(krate.span, message[]); // need to abort now because the syntax expansion // code will shortly attempt to load and execute // code from the found library. @@ -502,7 +502,7 @@ impl<'a> PluginMetadataReader<'a> { let message = format!("plugin crate `{}` only found in rlib format, \ but must be available in dylib format", info.ident); - self.env.sess.span_err(krate.span, message.as_slice()); + self.env.sess.span_err(krate.span, message[]); // No need to abort because the loading code will just ignore this // empty dylib. } @@ -511,11 +511,11 @@ impl<'a> PluginMetadataReader<'a> { macros: macros, registrar_symbol: registrar, }; - if should_link && existing_match(&self.env, info.name.as_slice(), + if should_link && existing_match(&self.env, info.name[], None).is_none() { // register crate now to avoid double-reading metadata - register_crate(&mut self.env, &None, info.ident.as_slice(), - info.name.as_slice(), krate.span, library); + register_crate(&mut self.env, &None, info.ident[], + info.name[], krate.span, library); } pc } diff --git a/src/librustc/metadata/csearch.rs b/src/librustc/metadata/csearch.rs index a474af7c6e1ff..13342bf82cfea 100644 --- a/src/librustc/metadata/csearch.rs +++ b/src/librustc/metadata/csearch.rs @@ -19,7 +19,6 @@ use metadata::cstore; use metadata::decoder; use middle::def; use middle::lang_items; -use middle::resolve; use middle::ty; use rbml; @@ -96,7 +95,7 @@ pub fn get_item_path(tcx: &ty::ctxt, def: ast::DefId) -> Vec // FIXME #1920: This path is not always correct if the crate is not linked // into the root namespace. - let mut r = vec![ast_map::PathMod(token::intern(cdata.name.as_slice()))]; + let mut r = vec![ast_map::PathMod(token::intern(cdata.name[]))]; r.push_all(path.as_slice()); r } @@ -148,7 +147,7 @@ pub fn get_impl_or_trait_item<'tcx>(tcx: &ty::ctxt<'tcx>, def: ast::DefId) } pub fn get_trait_item_name_and_kind(cstore: &cstore::CStore, def: ast::DefId) - -> (ast::Name, resolve::TraitItemKind) { + -> (ast::Name, def::TraitItemKind) { let cdata = cstore.get_crate_data(def.krate); decoder::get_trait_item_name_and_kind(cstore.intr.clone(), &*cdata, diff --git a/src/librustc/metadata/decoder.rs b/src/librustc/metadata/decoder.rs index b89c5dbcd0885..f05607a999b66 100644 --- a/src/librustc/metadata/decoder.rs +++ b/src/librustc/metadata/decoder.rs @@ -27,7 +27,6 @@ use metadata::tydecode::{parse_ty_data, parse_region_data, parse_def_id, parse_predicate_data}; use middle::def; use middle::lang_items; -use middle::resolve::{TraitItemKind, TypeTraitItemKind}; use middle::subst; use middle::ty::{ImplContainer, TraitContainer}; use middle::ty::{mod, Ty}; @@ -222,7 +221,7 @@ fn each_reexport(d: rbml::Doc, f: F) -> bool where fn variant_disr_val(d: rbml::Doc) -> Option { reader::maybe_get_doc(d, tag_disr_val).and_then(|val_doc| { reader::with_doc_data(val_doc, |data| { - str::from_utf8(data).and_then(from_str) + str::from_utf8(data).ok().and_then(|s| s.parse()) }) }) } @@ -701,7 +700,7 @@ pub fn get_enum_variants<'tcx>(intr: Rc, cdata: Cmd, id: ast::Nod item, tcx, cdata); let name = item_name(&*intr, item); let (ctor_ty, arg_tys, arg_names) = match ctor_ty.sty { - ty::ty_bare_fn(ref f) => + ty::ty_bare_fn(_, ref f) => (Some(ctor_ty), f.sig.0.inputs.clone(), None), _ => { // Nullary or struct enum variant. let mut arg_names = Vec::new(); @@ -785,15 +784,15 @@ pub fn get_impl_items(cdata: Cmd, impl_id: ast::NodeId) pub fn get_trait_item_name_and_kind(intr: Rc, cdata: Cmd, id: ast::NodeId) - -> (ast::Name, TraitItemKind) { + -> (ast::Name, def::TraitItemKind) { let doc = lookup_item(id, cdata.data()); let name = item_name(&*intr, doc); match item_sort(doc) { 'r' | 'p' => { let explicit_self = get_explicit_self(doc); - (name, TraitItemKind::from_explicit_self_category(explicit_self)) + (name, def::TraitItemKind::from_explicit_self_category(explicit_self)) } - 't' => (name, TypeTraitItemKind), + 't' => (name, def::TypeTraitItemKind), c => { panic!("get_trait_item_name_and_kind(): unknown trait item kind \ in metadata: `{}`", c) @@ -1161,7 +1160,7 @@ pub fn get_crate_deps(data: &[u8]) -> Vec { } reader::tagged_docs(depsdoc, tag_crate_dep, |depdoc| { let name = docstr(depdoc, tag_crate_dep_crate_name); - let hash = Svh::new(docstr(depdoc, tag_crate_dep_hash).as_slice()); + let hash = Svh::new(docstr(depdoc, tag_crate_dep_hash)[]); deps.push(CrateDep { cnum: crate_num, name: name, @@ -1346,7 +1345,7 @@ pub fn get_dylib_dependency_formats(cdata: Cmd) if spec.len() == 0 { continue } let cnum = spec.split(':').nth(0).unwrap(); let link = spec.split(':').nth(1).unwrap(); - let cnum = from_str(cnum).unwrap(); + let cnum = cnum.parse().unwrap(); let cnum = match cdata.cnum_map.get(&cnum) { Some(&n) => n, None => panic!("didn't find a crate in the cnum_map") diff --git a/src/librustc/metadata/encoder.rs b/src/librustc/metadata/encoder.rs index cc383aa217a7b..6782b3a74813e 100644 --- a/src/librustc/metadata/encoder.rs +++ b/src/librustc/metadata/encoder.rs @@ -21,10 +21,10 @@ use metadata::common::*; use metadata::cstore; use metadata::decoder; use metadata::tyencode; +use middle::def; use middle::ty::{lookup_item_type}; use middle::ty::{mod, Ty}; use middle::stability; -use middle; use util::nodemap::{FnvHashMap, NodeMap, NodeSet}; use serialize::Encodable; @@ -66,7 +66,7 @@ pub type EncodeInlinedItem<'a> = |ecx: &EncodeContext, pub struct EncodeParams<'a, 'tcx: 'a> { pub diag: &'a SpanHandler, pub tcx: &'a ty::ctxt<'tcx>, - pub reexports2: &'a middle::resolve::ExportMap2, + pub reexports: &'a def::ExportMap, pub item_symbols: &'a RefCell>, pub link_meta: &'a LinkMeta, pub cstore: &'a cstore::CStore, @@ -77,7 +77,7 @@ pub struct EncodeParams<'a, 'tcx: 'a> { pub struct EncodeContext<'a, 'tcx: 'a> { pub diag: &'a SpanHandler, pub tcx: &'a ty::ctxt<'tcx>, - pub reexports2: &'a middle::resolve::ExportMap2, + pub reexports: &'a def::ExportMap, pub item_symbols: &'a RefCell>, pub link_meta: &'a LinkMeta, pub cstore: &'a cstore::CStore, @@ -95,7 +95,7 @@ fn encode_impl_type_basename(rbml_w: &mut Encoder, name: ast::Ident) { } pub fn encode_def_id(rbml_w: &mut Encoder, id: DefId) { - rbml_w.wr_tagged_str(tag_def_id, def_to_string(id).as_slice()); + rbml_w.wr_tagged_str(tag_def_id, def_to_string(id)[]); } #[deriving(Clone)] @@ -154,7 +154,7 @@ fn encode_variant_id(rbml_w: &mut Encoder, vid: DefId) { rbml_w.end_tag(); rbml_w.start_tag(tag_mod_child); - rbml_w.wr_str(s.as_slice()); + rbml_w.wr_str(s[]); rbml_w.end_tag(); } @@ -264,7 +264,7 @@ fn encode_symbol(ecx: &EncodeContext, } None => { ecx.diag.handler().bug( - format!("encode_symbol: id not found {}", id).as_slice()); + format!("encode_symbol: id not found {}", id)[]); } } rbml_w.end_tag(); @@ -332,8 +332,8 @@ fn encode_enum_variant_info(ecx: &EncodeContext, encode_name(rbml_w, variant.node.name.name); encode_parent_item(rbml_w, local_def(id)); encode_visibility(rbml_w, variant.node.vis); - encode_attributes(rbml_w, variant.node.attrs.as_slice()); - encode_repr_attrs(rbml_w, ecx, variant.node.attrs.as_slice()); + encode_attributes(rbml_w, variant.node.attrs[]); + encode_repr_attrs(rbml_w, ecx, variant.node.attrs[]); let stab = stability::lookup(ecx.tcx, ast_util::local_def(variant.node.id)); encode_stability(rbml_w, stab); @@ -344,9 +344,9 @@ fn encode_enum_variant_info(ecx: &EncodeContext, let fields = ty::lookup_struct_fields(ecx.tcx, def_id); let idx = encode_info_for_struct(ecx, rbml_w, - fields.as_slice(), + fields[], index); - encode_struct_fields(rbml_w, fields.as_slice(), def_id); + encode_struct_fields(rbml_w, fields[], def_id); encode_index(rbml_w, idx, write_i64); } } @@ -379,26 +379,26 @@ fn encode_path>(rbml_w: &mut Encoder, path: PI) { } fn encode_reexported_static_method(rbml_w: &mut Encoder, - exp: &middle::resolve::Export2, + exp: &def::Export, method_def_id: DefId, method_name: ast::Name) { debug!("(encode reexported static method) {}::{}", exp.name, token::get_name(method_name)); rbml_w.start_tag(tag_items_data_item_reexport); rbml_w.start_tag(tag_items_data_item_reexport_def_id); - rbml_w.wr_str(def_to_string(method_def_id).as_slice()); + rbml_w.wr_str(def_to_string(method_def_id)[]); rbml_w.end_tag(); rbml_w.start_tag(tag_items_data_item_reexport_name); rbml_w.wr_str(format!("{}::{}", exp.name, - token::get_name(method_name)).as_slice()); + token::get_name(method_name))[]); rbml_w.end_tag(); rbml_w.end_tag(); } fn encode_reexported_static_base_methods(ecx: &EncodeContext, rbml_w: &mut Encoder, - exp: &middle::resolve::Export2) + exp: &def::Export) -> bool { let impl_items = ecx.tcx.impl_items.borrow(); match ecx.tcx.inherent_impls.borrow().get(&exp.def_id) { @@ -428,7 +428,7 @@ fn encode_reexported_static_base_methods(ecx: &EncodeContext, fn encode_reexported_static_trait_methods(ecx: &EncodeContext, rbml_w: &mut Encoder, - exp: &middle::resolve::Export2) + exp: &def::Export) -> bool { match ecx.tcx.trait_items_cache.borrow().get(&exp.def_id) { Some(trait_items) => { @@ -449,10 +449,8 @@ fn encode_reexported_static_trait_methods(ecx: &EncodeContext, fn encode_reexported_static_methods(ecx: &EncodeContext, rbml_w: &mut Encoder, mod_path: PathElems, - exp: &middle::resolve::Export2) { + exp: &def::Export) { if let Some(ast_map::NodeItem(item)) = ecx.tcx.map.find(exp.def_id.node) { - let original_name = token::get_ident(item.ident); - let path_differs = ecx.tcx.map.with_path(exp.def_id.node, |path| { let (mut a, mut b) = (path, mod_path.clone()); loop { @@ -474,16 +472,16 @@ fn encode_reexported_static_methods(ecx: &EncodeContext, // encoded metadata for static methods relative to Bar, // but not yet for Foo. // - if path_differs || original_name.get() != exp.name { + if path_differs || item.ident.name != exp.name { if !encode_reexported_static_base_methods(ecx, rbml_w, exp) { if encode_reexported_static_trait_methods(ecx, rbml_w, exp) { debug!("(encode reexported static methods) {} [trait]", - original_name); + item.ident.name); } } else { debug!("(encode reexported static methods) {} [base]", - original_name); + item.ident.name); } } } @@ -519,7 +517,7 @@ fn encode_reexports(ecx: &EncodeContext, id: NodeId, path: PathElems) { debug!("(encoding info for module) encoding reexports for {}", id); - match ecx.reexports2.get(&id) { + match ecx.reexports.get(&id) { Some(ref exports) => { debug!("(encoding info for module) found reexports for {}", id); for exp in exports.iter() { @@ -531,10 +529,10 @@ fn encode_reexports(ecx: &EncodeContext, id); rbml_w.start_tag(tag_items_data_item_reexport); rbml_w.start_tag(tag_items_data_item_reexport_def_id); - rbml_w.wr_str(def_to_string(exp.def_id).as_slice()); + rbml_w.wr_str(def_to_string(exp.def_id)[]); rbml_w.end_tag(); rbml_w.start_tag(tag_items_data_item_reexport_name); - rbml_w.wr_str(exp.name.as_slice()); + rbml_w.wr_str(exp.name.as_str()); rbml_w.end_tag(); rbml_w.end_tag(); encode_reexported_static_methods(ecx, rbml_w, path.clone(), exp); @@ -564,13 +562,13 @@ fn encode_info_for_mod(ecx: &EncodeContext, // Encode info about all the module children. for item in md.items.iter() { rbml_w.start_tag(tag_mod_child); - rbml_w.wr_str(def_to_string(local_def(item.id)).as_slice()); + rbml_w.wr_str(def_to_string(local_def(item.id))[]); rbml_w.end_tag(); each_auxiliary_node_id(&**item, |auxiliary_node_id| { rbml_w.start_tag(tag_mod_child); rbml_w.wr_str(def_to_string(local_def( - auxiliary_node_id)).as_slice()); + auxiliary_node_id))[]); rbml_w.end_tag(); true }); @@ -582,7 +580,7 @@ fn encode_info_for_mod(ecx: &EncodeContext, did, ecx.tcx.map.node_to_string(did)); rbml_w.start_tag(tag_mod_impl); - rbml_w.wr_str(def_to_string(local_def(did)).as_slice()); + rbml_w.wr_str(def_to_string(local_def(did))[]); rbml_w.end_tag(); } } @@ -617,7 +615,7 @@ fn encode_visibility(rbml_w: &mut Encoder, visibility: ast::Visibility) { ast::Public => 'y', ast::Inherited => 'i', }; - rbml_w.wr_str(ch.to_string().as_slice()); + rbml_w.wr_str(ch.to_string()[]); rbml_w.end_tag(); } @@ -629,7 +627,7 @@ fn encode_unboxed_closure_kind(rbml_w: &mut Encoder, ty::FnMutUnboxedClosureKind => 'm', ty::FnOnceUnboxedClosureKind => 'o', }; - rbml_w.wr_str(ch.to_string().as_slice()); + rbml_w.wr_str(ch.to_string()[]); rbml_w.end_tag(); } @@ -790,7 +788,7 @@ fn encode_generics<'a, 'tcx>(rbml_w: &mut Encoder, rbml_w.end_tag(); rbml_w.wr_tagged_str(tag_region_param_def_def_id, - def_to_string(param.def_id).as_slice()); + def_to_string(param.def_id)[]); rbml_w.wr_tagged_u64(tag_region_param_def_space, param.space.to_uint() as u64); @@ -866,9 +864,9 @@ fn encode_info_for_method<'a, 'tcx>(ecx: &EncodeContext<'a, 'tcx>, encode_path(rbml_w, impl_path.chain(Some(elem).into_iter())); match ast_item_opt { Some(&ast::MethodImplItem(ref ast_method)) => { - encode_attributes(rbml_w, ast_method.attrs.as_slice()); + encode_attributes(rbml_w, ast_method.attrs[]); let any_types = !pty.generics.types.is_empty(); - if any_types || is_default_impl || should_inline(ast_method.attrs.as_slice()) { + if any_types || is_default_impl || should_inline(ast_method.attrs[]) { encode_inlined_item(ecx, rbml_w, IIImplItemRef(local_def(parent_id), ast_item_opt.unwrap())); } @@ -911,7 +909,7 @@ fn encode_info_for_associated_type(ecx: &EncodeContext, match typedef_opt { None => {} Some(typedef) => { - encode_attributes(rbml_w, typedef.attrs.as_slice()); + encode_attributes(rbml_w, typedef.attrs[]); encode_type(ecx, rbml_w, ty::node_id_to_type(ecx.tcx, typedef.id)); } @@ -1045,7 +1043,7 @@ fn encode_info_for_item(ecx: &EncodeContext, encode_path(rbml_w, path); encode_visibility(rbml_w, vis); encode_stability(rbml_w, stab); - encode_attributes(rbml_w, item.attrs.as_slice()); + encode_attributes(rbml_w, item.attrs[]); rbml_w.end_tag(); } ast::ItemConst(_, _) => { @@ -1071,8 +1069,8 @@ fn encode_info_for_item(ecx: &EncodeContext, encode_bounds_and_type(rbml_w, ecx, &lookup_item_type(tcx, def_id)); encode_name(rbml_w, item.ident.name); encode_path(rbml_w, path); - encode_attributes(rbml_w, item.attrs.as_slice()); - if tps_len > 0u || should_inline(item.attrs.as_slice()) { + encode_attributes(rbml_w, item.attrs[]); + if tps_len > 0u || should_inline(item.attrs[]) { encode_inlined_item(ecx, rbml_w, IIItemRef(item)); } if tps_len == 0 { @@ -1088,7 +1086,7 @@ fn encode_info_for_item(ecx: &EncodeContext, encode_info_for_mod(ecx, rbml_w, m, - item.attrs.as_slice(), + item.attrs[], item.id, path, item.ident, @@ -1105,7 +1103,7 @@ fn encode_info_for_item(ecx: &EncodeContext, // Encode all the items in this module. for foreign_item in fm.items.iter() { rbml_w.start_tag(tag_mod_child); - rbml_w.wr_str(def_to_string(local_def(foreign_item.id)).as_slice()); + rbml_w.wr_str(def_to_string(local_def(foreign_item.id))[]); rbml_w.end_tag(); } encode_visibility(rbml_w, vis); @@ -1133,8 +1131,8 @@ fn encode_info_for_item(ecx: &EncodeContext, encode_item_variances(rbml_w, ecx, item.id); encode_bounds_and_type(rbml_w, ecx, &lookup_item_type(tcx, def_id)); encode_name(rbml_w, item.ident.name); - encode_attributes(rbml_w, item.attrs.as_slice()); - encode_repr_attrs(rbml_w, ecx, item.attrs.as_slice()); + encode_attributes(rbml_w, item.attrs[]); + encode_repr_attrs(rbml_w, ecx, item.attrs[]); for v in (*enum_definition).variants.iter() { encode_variant_id(rbml_w, local_def(v.node.id)); } @@ -1151,7 +1149,7 @@ fn encode_info_for_item(ecx: &EncodeContext, encode_enum_variant_info(ecx, rbml_w, item.id, - (*enum_definition).variants.as_slice(), + (*enum_definition).variants[], index); } ast::ItemStruct(ref struct_def, _) => { @@ -1163,7 +1161,7 @@ fn encode_info_for_item(ecx: &EncodeContext, class itself */ let idx = encode_info_for_struct(ecx, rbml_w, - fields.as_slice(), + fields[], index); /* Index the class*/ @@ -1177,16 +1175,16 @@ fn encode_info_for_item(ecx: &EncodeContext, encode_item_variances(rbml_w, ecx, item.id); encode_name(rbml_w, item.ident.name); - encode_attributes(rbml_w, item.attrs.as_slice()); + encode_attributes(rbml_w, item.attrs[]); encode_path(rbml_w, path.clone()); encode_stability(rbml_w, stab); encode_visibility(rbml_w, vis); - encode_repr_attrs(rbml_w, ecx, item.attrs.as_slice()); + encode_repr_attrs(rbml_w, ecx, item.attrs[]); /* Encode def_ids for each field and method for methods, write all the stuff get_trait_method needs to know*/ - encode_struct_fields(rbml_w, fields.as_slice(), def_id); + encode_struct_fields(rbml_w, fields[], def_id); encode_inlined_item(ecx, rbml_w, IIItemRef(item)); @@ -1218,7 +1216,7 @@ fn encode_info_for_item(ecx: &EncodeContext, encode_family(rbml_w, 'i'); encode_bounds_and_type(rbml_w, ecx, &lookup_item_type(tcx, def_id)); encode_name(rbml_w, item.ident.name); - encode_attributes(rbml_w, item.attrs.as_slice()); + encode_attributes(rbml_w, item.attrs[]); encode_unsafety(rbml_w, unsafety); match ty.node { ast::TyPath(ref path, _) if path.segments @@ -1321,7 +1319,7 @@ fn encode_info_for_item(ecx: &EncodeContext, encode_generics(rbml_w, ecx, &trait_def.generics, tag_item_generics); encode_trait_ref(rbml_w, ecx, &*trait_def.trait_ref, tag_item_trait_ref); encode_name(rbml_w, item.ident.name); - encode_attributes(rbml_w, item.attrs.as_slice()); + encode_attributes(rbml_w, item.attrs[]); encode_visibility(rbml_w, vis); encode_stability(rbml_w, stab); for &method_def_id in ty::trait_item_def_ids(tcx, def_id).iter() { @@ -1339,7 +1337,7 @@ fn encode_info_for_item(ecx: &EncodeContext, rbml_w.end_tag(); rbml_w.start_tag(tag_mod_child); - rbml_w.wr_str(def_to_string(method_def_id.def_id()).as_slice()); + rbml_w.wr_str(def_to_string(method_def_id.def_id())[]); rbml_w.end_tag(); } encode_path(rbml_w, path.clone()); @@ -1424,14 +1422,14 @@ fn encode_info_for_item(ecx: &EncodeContext, }; match trait_item { &ast::RequiredMethod(ref m) => { - encode_attributes(rbml_w, m.attrs.as_slice()); + encode_attributes(rbml_w, m.attrs[]); encode_trait_item(rbml_w); encode_item_sort(rbml_w, 'r'); encode_method_argument_names(rbml_w, &*m.decl); } &ast::ProvidedMethod(ref m) => { - encode_attributes(rbml_w, m.attrs.as_slice()); + encode_attributes(rbml_w, m.attrs[]); encode_trait_item(rbml_w); encode_item_sort(rbml_w, 'p'); encode_inlined_item(ecx, rbml_w, IITraitItemRef(def_id, trait_item)); @@ -1440,7 +1438,7 @@ fn encode_info_for_item(ecx: &EncodeContext, &ast::TypeTraitItem(ref associated_type) => { encode_attributes(rbml_w, - associated_type.attrs.as_slice()); + associated_type.attrs[]); encode_item_sort(rbml_w, 't'); } } @@ -1480,6 +1478,9 @@ fn encode_info_for_foreign_item(ecx: &EncodeContext, if abi == abi::RustIntrinsic { encode_inlined_item(ecx, rbml_w, IIForeignRef(nitem)); } + encode_attributes(rbml_w, &*nitem.attrs); + let stab = stability::lookup(ecx.tcx, ast_util::local_def(nitem.id)); + encode_stability(rbml_w, stab); encode_symbol(ecx, rbml_w, nitem.id); } ast::ForeignItemStatic(_, mutbl) => { @@ -1490,6 +1491,9 @@ fn encode_info_for_foreign_item(ecx: &EncodeContext, } encode_bounds_and_type(rbml_w, ecx, &lookup_item_type(ecx.tcx,local_def(nitem.id))); + encode_attributes(rbml_w, &*nitem.attrs); + let stab = stability::lookup(ecx.tcx, ast_util::local_def(nitem.id)); + encode_stability(rbml_w, stab); encode_symbol(ecx, rbml_w, nitem.id); encode_name(rbml_w, nitem.ident.name); } @@ -1798,7 +1802,7 @@ fn encode_macro_def(ecx: &EncodeContext, let def = ecx.tcx.sess.codemap().span_to_snippet(*span) .expect("Unable to find source for macro"); rbml_w.start_tag(tag_macro_def); - rbml_w.wr_str(def.as_slice()); + rbml_w.wr_str(def[]); rbml_w.end_tag(); } @@ -1845,7 +1849,7 @@ fn encode_struct_field_attrs(rbml_w: &mut Encoder, krate: &ast::Crate) { fn visit_struct_field(&mut self, field: &ast::StructField) { self.rbml_w.start_tag(tag_struct_field); self.rbml_w.wr_tagged_u32(tag_struct_field_id, field.node.id); - encode_attributes(self.rbml_w, field.node.attrs.as_slice()); + encode_attributes(self.rbml_w, field.node.attrs[]); self.rbml_w.end_tag(); } } @@ -1917,13 +1921,13 @@ fn encode_misc_info(ecx: &EncodeContext, rbml_w.start_tag(tag_misc_info_crate_items); for item in krate.module.items.iter() { rbml_w.start_tag(tag_mod_child); - rbml_w.wr_str(def_to_string(local_def(item.id)).as_slice()); + rbml_w.wr_str(def_to_string(local_def(item.id))[]); rbml_w.end_tag(); each_auxiliary_node_id(&**item, |auxiliary_node_id| { rbml_w.start_tag(tag_mod_child); rbml_w.wr_str(def_to_string(local_def( - auxiliary_node_id)).as_slice()); + auxiliary_node_id))[]); rbml_w.end_tag(); true }); @@ -2071,7 +2075,7 @@ fn encode_metadata_inner(wr: &mut SeekableMemWriter, item_symbols, diag, tcx, - reexports2, + reexports, cstore, encode_inlined_item, link_meta, @@ -2081,7 +2085,7 @@ fn encode_metadata_inner(wr: &mut SeekableMemWriter, let ecx = EncodeContext { diag: diag, tcx: tcx, - reexports2: reexports2, + reexports: reexports, item_symbols: item_symbols, link_meta: link_meta, cstore: cstore, @@ -2092,17 +2096,17 @@ fn encode_metadata_inner(wr: &mut SeekableMemWriter, let mut rbml_w = writer::Encoder::new(wr); - encode_crate_name(&mut rbml_w, ecx.link_meta.crate_name.as_slice()); + encode_crate_name(&mut rbml_w, ecx.link_meta.crate_name[]); encode_crate_triple(&mut rbml_w, tcx.sess .opts .target_triple - .as_slice()); + []); encode_hash(&mut rbml_w, &ecx.link_meta.crate_hash); encode_dylib_dependency_formats(&mut rbml_w, &ecx); let mut i = rbml_w.writer.tell().unwrap(); - encode_attributes(&mut rbml_w, krate.attrs.as_slice()); + encode_attributes(&mut rbml_w, krate.attrs[]); stats.attr_bytes = rbml_w.writer.tell().unwrap() - i; i = rbml_w.writer.tell().unwrap(); diff --git a/src/librustc/metadata/loader.rs b/src/librustc/metadata/loader.rs index e83f69b1e318e..5f554eb9c1e5c 100644 --- a/src/librustc/metadata/loader.rs +++ b/src/librustc/metadata/loader.rs @@ -228,7 +228,7 @@ use util::fs; use std::c_str::ToCStr; use std::cmp; -use std::collections::hash_map::{Occupied, Vacant}; +use std::collections::hash_map::Entry::{Occupied, Vacant}; use std::collections::{HashMap, HashSet}; use std::io::fs::PathExtensions; use std::io; @@ -316,14 +316,14 @@ impl<'a> Context<'a> { &Some(ref r) => format!("{} which `{}` depends on", message, r.ident) }; - self.sess.span_err(self.span, message.as_slice()); + self.sess.span_err(self.span, message[]); if self.rejected_via_triple.len() > 0 { let mismatches = self.rejected_via_triple.iter(); for (i, &CrateMismatch{ ref path, ref got }) in mismatches.enumerate() { self.sess.fileline_note(self.span, format!("crate `{}`, path #{}, triple {}: {}", - self.ident, i+1, got, path.display()).as_slice()); + self.ident, i+1, got, path.display())[]); } } if self.rejected_via_hash.len() > 0 { @@ -333,7 +333,7 @@ impl<'a> Context<'a> { for (i, &CrateMismatch{ ref path, .. }) in mismatches.enumerate() { self.sess.fileline_note(self.span, format!("crate `{}` path {}{}: {}", - self.ident, "#", i+1, path.display()).as_slice()); + self.ident, "#", i+1, path.display())[]); } match self.root { &None => {} @@ -341,7 +341,7 @@ impl<'a> Context<'a> { for (i, path) in r.paths().iter().enumerate() { self.sess.fileline_note(self.span, format!("crate `{}` path #{}: {}", - r.ident, i+1, path.display()).as_slice()); + r.ident, i+1, path.display())[]); } } } @@ -387,7 +387,7 @@ impl<'a> Context<'a> { None => return FileDoesntMatch, Some(file) => file, }; - let (hash, rlib) = if file.starts_with(rlib_prefix.as_slice()) && + let (hash, rlib) = if file.starts_with(rlib_prefix[]) && file.ends_with(".rlib") { (file.slice(rlib_prefix.len(), file.len() - ".rlib".len()), true) @@ -448,26 +448,26 @@ impl<'a> Context<'a> { _ => { self.sess.span_err(self.span, format!("multiple matching crates for `{}`", - self.crate_name).as_slice()); + self.crate_name)[]); self.sess.note("candidates:"); for lib in libraries.iter() { match lib.dylib { Some(ref p) => { self.sess.note(format!("path: {}", - p.display()).as_slice()); + p.display())[]); } None => {} } match lib.rlib { Some(ref p) => { self.sess.note(format!("path: {}", - p.display()).as_slice()); + p.display())[]); } None => {} } let data = lib.metadata.as_slice(); let name = decoder::get_crate_name(data); - note_crate_name(self.sess.diagnostic(), name.as_slice()); + note_crate_name(self.sess.diagnostic(), name[]); } None } @@ -521,11 +521,11 @@ impl<'a> Context<'a> { format!("multiple {} candidates for `{}` \ found", flavor, - self.crate_name).as_slice()); + self.crate_name)[]); self.sess.span_note(self.span, format!(r"candidate #1: {}", ret.as_ref().unwrap() - .display()).as_slice()); + .display())[]); error = 1; ret = None; } @@ -533,7 +533,7 @@ impl<'a> Context<'a> { error += 1; self.sess.span_note(self.span, format!(r"candidate #{}: {}", error, - lib.display()).as_slice()); + lib.display())[]); continue } *slot = Some(metadata); @@ -608,17 +608,17 @@ impl<'a> Context<'a> { let mut rlibs = HashSet::new(); let mut dylibs = HashSet::new(); { - let mut locs = locs.iter().map(|l| Path::new(l.as_slice())).filter(|loc| { + let mut locs = locs.iter().map(|l| Path::new(l[])).filter(|loc| { if !loc.exists() { sess.err(format!("extern location for {} does not exist: {}", - self.crate_name, loc.display()).as_slice()); + self.crate_name, loc.display())[]); return false; } let file = match loc.filename_str() { Some(file) => file, None => { sess.err(format!("extern location for {} is not a file: {}", - self.crate_name, loc.display()).as_slice()); + self.crate_name, loc.display())[]); return false; } }; @@ -626,12 +626,12 @@ impl<'a> Context<'a> { return true } else { let (ref prefix, ref suffix) = dylibname; - if file.starts_with(prefix.as_slice()) && file.ends_with(suffix.as_slice()) { + if file.starts_with(prefix[]) && file.ends_with(suffix[]) { return true } } sess.err(format!("extern location for {} is of an unknown type: {}", - self.crate_name, loc.display()).as_slice()); + self.crate_name, loc.display())[]); false }); @@ -664,7 +664,7 @@ impl<'a> Context<'a> { } pub fn note_crate_name(diag: &SpanHandler, name: &str) { - diag.handler().note(format!("crate name: {}", name).as_slice()); + diag.handler().note(format!("crate name: {}", name)[]); } impl ArchiveMetadata { diff --git a/src/librustc/metadata/tydecode.rs b/src/librustc/metadata/tydecode.rs index 9d3a2c1d66777..61ea93a9fdd1b 100644 --- a/src/librustc/metadata/tydecode.rs +++ b/src/librustc/metadata/tydecode.rs @@ -233,7 +233,7 @@ fn parse_trait_store(st: &mut PState, conv: conv_did) -> ty::TraitStore { '&' => ty::RegionTraitStore(parse_region(st, conv), parse_mutability(st)), c => { st.tcx.sess.bug(format!("parse_trait_store(): bad input '{}'", - c).as_slice()) + c)[]) } } } @@ -287,7 +287,7 @@ fn parse_bound_region(st: &mut PState, conv: conv_did) -> ty::BoundRegion { } '[' => { let def = parse_def(st, RegionParameter, |x,y| conv(x,y)); - let ident = token::str_to_ident(parse_str(st, ']').as_slice()); + let ident = token::str_to_ident(parse_str(st, ']')[]); ty::BrNamed(def, ident.name) } 'f' => { @@ -318,7 +318,7 @@ fn parse_region(st: &mut PState, conv: conv_did) -> ty::Region { assert_eq!(next(st), '|'); let index = parse_uint(st); assert_eq!(next(st), '|'); - let nm = token::str_to_ident(parse_str(st, ']').as_slice()); + let nm = token::str_to_ident(parse_str(st, ']')[]); ty::ReEarlyBound(node_id, space, index, nm.name) } 'f' => { @@ -453,7 +453,11 @@ fn parse_ty<'a, 'tcx>(st: &mut PState<'a, 'tcx>, conv: conv_did) -> Ty<'tcx> { return ty::mk_closure(st.tcx, parse_closure_ty(st, |x,y| conv(x,y))); } 'F' => { - return ty::mk_bare_fn(st.tcx, parse_bare_fn_ty(st, |x,y| conv(x,y))); + let def_id = parse_def(st, NominalType, |x,y| conv(x,y)); + return ty::mk_bare_fn(st.tcx, Some(def_id), parse_bare_fn_ty(st, |x,y| conv(x,y))); + } + 'G' => { + return ty::mk_bare_fn(st.tcx, None, parse_bare_fn_ty(st, |x,y| conv(x,y))); } '#' => { let pos = parse_hex(st); @@ -560,7 +564,7 @@ fn parse_abi_set(st: &mut PState) -> abi::Abi { assert_eq!(next(st), '['); scan(st, |c| c == ']', |bytes| { let abi_str = str::from_utf8(bytes).unwrap(); - abi::lookup(abi_str.as_slice()).expect(abi_str) + abi::lookup(abi_str[]).expect(abi_str) }) } @@ -639,12 +643,12 @@ pub fn parse_def_id(buf: &[u8]) -> ast::DefId { let crate_part = buf[0u..colon_idx]; let def_part = buf[colon_idx + 1u..len]; - let crate_num = match str::from_utf8(crate_part).and_then(from_str::) { + let crate_num = match str::from_utf8(crate_part).ok().and_then(|s| s.parse::()) { Some(cn) => cn as ast::CrateNum, None => panic!("internal error: parse_def_id: crate number expected, found {}", crate_part) }; - let def_num = match str::from_utf8(def_part).and_then(from_str::) { + let def_num = match str::from_utf8(def_part).ok().and_then(|s| s.parse::()) { Some(dn) => dn as ast::NodeId, None => panic!("internal error: parse_def_id: id expected, found {}", def_part) diff --git a/src/librustc/metadata/tyencode.rs b/src/librustc/metadata/tyencode.rs index 5d7d85d4679d7..7fa23620af4b6 100644 --- a/src/librustc/metadata/tyencode.rs +++ b/src/librustc/metadata/tyencode.rs @@ -55,7 +55,108 @@ pub fn enc_ty<'a, 'tcx>(w: &mut SeekableMemWriter, cx: &ctxt<'a, 'tcx>, t: Ty<'t None => {} } let pos = w.tell().unwrap(); - enc_sty(w, cx, &t.sty); + + match t.sty { + ty::ty_bool => mywrite!(w, "b"), + ty::ty_char => mywrite!(w, "c"), + ty::ty_int(t) => { + match t { + ast::TyI => mywrite!(w, "i"), + ast::TyI8 => mywrite!(w, "MB"), + ast::TyI16 => mywrite!(w, "MW"), + ast::TyI32 => mywrite!(w, "ML"), + ast::TyI64 => mywrite!(w, "MD") + } + } + ty::ty_uint(t) => { + match t { + ast::TyU => mywrite!(w, "u"), + ast::TyU8 => mywrite!(w, "Mb"), + ast::TyU16 => mywrite!(w, "Mw"), + ast::TyU32 => mywrite!(w, "Ml"), + ast::TyU64 => mywrite!(w, "Md") + } + } + ty::ty_float(t) => { + match t { + ast::TyF32 => mywrite!(w, "Mf"), + ast::TyF64 => mywrite!(w, "MF"), + } + } + ty::ty_enum(def, ref substs) => { + mywrite!(w, "t[{}|", (cx.ds)(def)); + enc_substs(w, cx, substs); + mywrite!(w, "]"); + } + ty::ty_trait(box ty::TyTrait { ref principal, + ref bounds }) => { + mywrite!(w, "x["); + enc_trait_ref(w, cx, &principal.0); + enc_existential_bounds(w, cx, bounds); + mywrite!(w, "]"); + } + ty::ty_tup(ref ts) => { + mywrite!(w, "T["); + for t in ts.iter() { enc_ty(w, cx, *t); } + mywrite!(w, "]"); + } + ty::ty_uniq(typ) => { mywrite!(w, "~"); enc_ty(w, cx, typ); } + ty::ty_ptr(mt) => { mywrite!(w, "*"); enc_mt(w, cx, mt); } + ty::ty_rptr(r, mt) => { + mywrite!(w, "&"); + enc_region(w, cx, r); + enc_mt(w, cx, mt); + } + ty::ty_vec(t, sz) => { + mywrite!(w, "V"); + enc_ty(w, cx, t); + mywrite!(w, "/"); + match sz { + Some(n) => mywrite!(w, "{}|", n), + None => mywrite!(w, "|"), + } + } + ty::ty_str => { + mywrite!(w, "v"); + } + ty::ty_closure(ref f) => { + mywrite!(w, "f"); + enc_closure_ty(w, cx, &**f); + } + ty::ty_bare_fn(Some(def_id), ref f) => { + mywrite!(w, "F"); + mywrite!(w, "{}|", (cx.ds)(def_id)); + enc_bare_fn_ty(w, cx, f); + } + ty::ty_bare_fn(None, ref f) => { + mywrite!(w, "G"); + enc_bare_fn_ty(w, cx, f); + } + ty::ty_infer(_) => { + cx.diag.handler().bug("cannot encode inference variable types"); + } + ty::ty_param(ParamTy {space, idx: id, def_id: did}) => { + mywrite!(w, "p{}|{}|{}|", (cx.ds)(did), id, space.to_uint()) + } + ty::ty_struct(def, ref substs) => { + mywrite!(w, "a[{}|", (cx.ds)(def)); + enc_substs(w, cx, substs); + mywrite!(w, "]"); + } + ty::ty_unboxed_closure(def, region, ref substs) => { + mywrite!(w, "k[{}|", (cx.ds)(def)); + enc_region(w, cx, region); + enc_substs(w, cx, substs); + mywrite!(w, "]"); + } + ty::ty_err => { + mywrite!(w, "e"); + } + ty::ty_open(_) => { + cx.diag.handler().bug("unexpected type in enc_sty (ty_open)"); + } + } + let end = w.tell().unwrap(); let len = end - pos; fn estimate_sz(u: u64) -> u64 { @@ -214,105 +315,6 @@ pub fn enc_trait_store(w: &mut SeekableMemWriter, cx: &ctxt, s: ty::TraitStore) } } -fn enc_sty<'a, 'tcx>(w: &mut SeekableMemWriter, cx: &ctxt<'a, 'tcx>, - st: &ty::sty<'tcx>) { - match *st { - ty::ty_bool => mywrite!(w, "b"), - ty::ty_char => mywrite!(w, "c"), - ty::ty_int(t) => { - match t { - ast::TyI => mywrite!(w, "i"), - ast::TyI8 => mywrite!(w, "MB"), - ast::TyI16 => mywrite!(w, "MW"), - ast::TyI32 => mywrite!(w, "ML"), - ast::TyI64 => mywrite!(w, "MD") - } - } - ty::ty_uint(t) => { - match t { - ast::TyU => mywrite!(w, "u"), - ast::TyU8 => mywrite!(w, "Mb"), - ast::TyU16 => mywrite!(w, "Mw"), - ast::TyU32 => mywrite!(w, "Ml"), - ast::TyU64 => mywrite!(w, "Md") - } - } - ty::ty_float(t) => { - match t { - ast::TyF32 => mywrite!(w, "Mf"), - ast::TyF64 => mywrite!(w, "MF"), - } - } - ty::ty_enum(def, ref substs) => { - mywrite!(w, "t[{}|", (cx.ds)(def)); - enc_substs(w, cx, substs); - mywrite!(w, "]"); - } - ty::ty_trait(box ty::TyTrait { ref principal, - ref bounds }) => { - mywrite!(w, "x["); - enc_trait_ref(w, cx, &principal.0); - enc_existential_bounds(w, cx, bounds); - mywrite!(w, "]"); - } - ty::ty_tup(ref ts) => { - mywrite!(w, "T["); - for t in ts.iter() { enc_ty(w, cx, *t); } - mywrite!(w, "]"); - } - ty::ty_uniq(typ) => { mywrite!(w, "~"); enc_ty(w, cx, typ); } - ty::ty_ptr(mt) => { mywrite!(w, "*"); enc_mt(w, cx, mt); } - ty::ty_rptr(r, mt) => { - mywrite!(w, "&"); - enc_region(w, cx, r); - enc_mt(w, cx, mt); - } - ty::ty_vec(t, sz) => { - mywrite!(w, "V"); - enc_ty(w, cx, t); - mywrite!(w, "/"); - match sz { - Some(n) => mywrite!(w, "{}|", n), - None => mywrite!(w, "|"), - } - } - ty::ty_str => { - mywrite!(w, "v"); - } - ty::ty_closure(ref f) => { - mywrite!(w, "f"); - enc_closure_ty(w, cx, &**f); - } - ty::ty_bare_fn(ref f) => { - mywrite!(w, "F"); - enc_bare_fn_ty(w, cx, f); - } - ty::ty_infer(_) => { - cx.diag.handler().bug("cannot encode inference variable types"); - } - ty::ty_param(ParamTy {space, idx: id, def_id: did}) => { - mywrite!(w, "p{}|{}|{}|", (cx.ds)(did), id, space.to_uint()) - } - ty::ty_struct(def, ref substs) => { - mywrite!(w, "a[{}|", (cx.ds)(def)); - enc_substs(w, cx, substs); - mywrite!(w, "]"); - } - ty::ty_unboxed_closure(def, region, ref substs) => { - mywrite!(w, "k[{}|", (cx.ds)(def)); - enc_region(w, cx, region); - enc_substs(w, cx, substs); - mywrite!(w, "]"); - } - ty::ty_err => { - mywrite!(w, "e"); - } - ty::ty_open(_) => { - cx.diag.handler().bug("unexpected type in enc_sty (ty_open)"); - } - } -} - fn enc_unsafety(w: &mut SeekableMemWriter, p: ast::Unsafety) { match p { ast::Unsafety::Normal => mywrite!(w, "n"), diff --git a/src/librustc/middle/astconv_util.rs b/src/librustc/middle/astconv_util.rs index 6b90bcd60e753..060e2f67faf98 100644 --- a/src/librustc/middle/astconv_util.rs +++ b/src/librustc/middle/astconv_util.rs @@ -48,7 +48,7 @@ pub fn ast_ty_to_prim_ty<'tcx>(tcx: &ty::ctxt<'tcx>, ast_ty: &ast::Ty) None => { tcx.sess.span_bug(ast_ty.span, format!("unbound path {}", - path.repr(tcx)).as_slice()) + path.repr(tcx))[]) } Some(&d) => d }; diff --git a/src/librustc/middle/astencode.rs b/src/librustc/middle/astencode.rs index 69fbd59fd9241..e8627dfa64b0b 100644 --- a/src/librustc/middle/astencode.rs +++ b/src/librustc/middle/astencode.rs @@ -132,7 +132,7 @@ pub fn decode_inlined_item<'tcx>(cdata: &cstore::crate_metadata, // Do an Option dance to use the path after it is moved below. let s = ast_map::path_to_string(ast_map::Values(path.iter())); path_as_str = Some(s); - path_as_str.as_ref().map(|x| x.as_slice()) + path_as_str.as_ref().map(|x| x[]) }); let mut ast_dsr = reader::Decoder::new(ast_doc); let from_id_range = Decodable::decode(&mut ast_dsr).unwrap(); @@ -1007,14 +1007,21 @@ impl<'a, 'tcx> rbml_writer_helpers<'tcx> for Encoder<'a> { self.emit_enum("AutoAdjustment", |this| { match *adj { - ty::AdjustAddEnv(store) => { - this.emit_enum_variant("AutoAddEnv", 0, 1, |this| { - this.emit_enum_variant_arg(0, |this| store.encode(this)) + ty::AdjustAddEnv(def_id, store) => { + this.emit_enum_variant("AdjustAddEnv", 0, 2, |this| { + this.emit_enum_variant_arg(0, |this| def_id.encode(this)); + this.emit_enum_variant_arg(1, |this| store.encode(this)) + }) + } + + ty::AdjustReifyFnPointer(def_id) => { + this.emit_enum_variant("AdjustReifyFnPointer", 1, 2, |this| { + this.emit_enum_variant_arg(0, |this| def_id.encode(this)) }) } ty::AdjustDerefRef(ref auto_deref_ref) => { - this.emit_enum_variant("AutoDerefRef", 1, 1, |this| { + this.emit_enum_variant("AdjustDerefRef", 2, 2, |this| { this.emit_enum_variant_arg(0, |this| Ok(this.emit_auto_deref_ref(ecx, auto_deref_ref))) }) @@ -1648,12 +1655,20 @@ impl<'a, 'tcx> rbml_decoder_decoder_helpers<'tcx> for reader::Decoder<'a> { this.read_enum_variant(&variants, |this, i| { Ok(match i { 0 => { + let def_id: ast::DefId = + this.read_def_id(dcx); let store: ty::TraitStore = this.read_enum_variant_arg(0, |this| Decodable::decode(this)).unwrap(); - ty::AdjustAddEnv(store.tr(dcx)) + ty::AdjustAddEnv(def_id, store.tr(dcx)) } 1 => { + let def_id: ast::DefId = + this.read_def_id(dcx); + + ty::AdjustReifyFnPointer(def_id) + } + 2 => { let auto_deref_ref: ty::AutoDerefRef = this.read_enum_variant_arg(0, |this| Ok(this.read_auto_deref_ref(dcx))).unwrap(); @@ -1876,7 +1891,7 @@ fn decode_side_tables(dcx: &DecodeContext, None => { dcx.tcx.sess.bug( format!("unknown tag found in side tables: {:x}", - tag).as_slice()); + tag)[]); } Some(value) => { let val_doc = entry_doc.get(c::tag_table_val as uint); @@ -1961,7 +1976,7 @@ fn decode_side_tables(dcx: &DecodeContext, _ => { dcx.tcx.sess.bug( format!("unknown tag found in side tables: {:x}", - tag).as_slice()); + tag)[]); } } } diff --git a/src/librustc/middle/cfg/construct.rs b/src/librustc/middle/cfg/construct.rs index 82bed2540317f..2d50757782dbd 100644 --- a/src/librustc/middle/cfg/construct.rs +++ b/src/librustc/middle/cfg/construct.rs @@ -362,7 +362,7 @@ impl<'a, 'tcx> CFGBuilder<'a, 'tcx> { let mut cond_exit = discr_exit; for arm in arms.iter() { cond_exit = self.add_dummy_node(&[cond_exit]); // 2 - let pats_exit = self.pats_any(arm.pats.as_slice(), + let pats_exit = self.pats_any(arm.pats[], cond_exit); // 3 let guard_exit = self.opt_expr(&arm.guard, pats_exit); // 4 @@ -617,14 +617,14 @@ impl<'a, 'tcx> CFGBuilder<'a, 'tcx> { self.tcx.sess.span_bug( expr.span, format!("no loop scope for id {}", - loop_id).as_slice()); + loop_id)[]); } r => { self.tcx.sess.span_bug( expr.span, format!("bad entry `{}` in def_map for label", - r).as_slice()); + r)[]); } } } diff --git a/src/librustc/middle/cfg/graphviz.rs b/src/librustc/middle/cfg/graphviz.rs index e33f44967f1ad..13bd22a67c410 100644 --- a/src/librustc/middle/cfg/graphviz.rs +++ b/src/librustc/middle/cfg/graphviz.rs @@ -50,7 +50,7 @@ fn replace_newline_with_backslash_l(s: String) -> String { } impl<'a, 'ast> dot::Labeller<'a, Node<'a>, Edge<'a>> for LabelledCFG<'a, 'ast> { - fn graph_id(&'a self) -> dot::Id<'a> { dot::Id::new(self.name.as_slice()).unwrap() } + fn graph_id(&'a self) -> dot::Id<'a> { dot::Id::new(self.name[]).unwrap() } fn node_id(&'a self, &(i,_): &Node<'a>) -> dot::Id<'a> { dot::Id::new(format!("N{}", i.node_id())).unwrap() @@ -83,8 +83,7 @@ impl<'a, 'ast> dot::Labeller<'a, Node<'a>, Edge<'a>> for LabelledCFG<'a, 'ast> { let s = self.ast_map.node_to_string(node_id); // left-aligns the lines let s = replace_newline_with_backslash_l(s); - label.push_str(format!("exiting scope_{} {}", i, - s.as_slice()).as_slice()); + label.push_str(format!("exiting scope_{} {}", i, s[])[]); } dot::EscStr(label.into_cow()) } diff --git a/src/librustc/middle/cfg/mod.rs b/src/librustc/middle/cfg/mod.rs index a74fff5630bfd..e1c5906f0fb83 100644 --- a/src/librustc/middle/cfg/mod.rs +++ b/src/librustc/middle/cfg/mod.rs @@ -48,4 +48,8 @@ impl CFG { blk: &ast::Block) -> CFG { construct::construct(tcx, blk) } + + pub fn node_is_reachable(&self, id: ast::NodeId) -> bool { + self.graph.depth_traverse(self.entry).any(|node| node.id == id) + } } diff --git a/src/librustc/middle/check_const.rs b/src/librustc/middle/check_const.rs index a91ea8bfef8c7..e08dd64d4d411 100644 --- a/src/librustc/middle/check_const.rs +++ b/src/librustc/middle/check_const.rs @@ -127,7 +127,11 @@ fn check_expr(v: &mut CheckCrateVisitor, e: &ast::Expr) -> bool { ast::ExprCast(ref from, _) => { let toty = ty::expr_ty(v.tcx, e); let fromty = ty::expr_ty(v.tcx, &**from); - if !ty::type_is_numeric(toty) && !ty::type_is_unsafe_ptr(toty) { + let is_legal_cast = + ty::type_is_numeric(toty) || + ty::type_is_unsafe_ptr(toty) || + (ty::type_is_bare_fn(toty) && ty::type_is_bare_fn_item(fromty)); + if !is_legal_cast { span_err!(v.tcx.sess, e.span, E0012, "can not cast to `{}` in a constant expression", ppaux::ty_to_string(v.tcx, toty)); diff --git a/src/librustc/middle/check_loop.rs b/src/librustc/middle/check_loop.rs index cb454f94dc74c..5a08d7c179d1c 100644 --- a/src/librustc/middle/check_loop.rs +++ b/src/librustc/middle/check_loop.rs @@ -74,13 +74,11 @@ impl<'a> CheckLoopVisitor<'a> { Loop => {} Closure => { self.sess.span_err(span, - format!("`{}` inside of a closure", - name).as_slice()); + format!("`{}` inside of a closure", name)[]); } Normal => { self.sess.span_err(span, - format!("`{}` outside of loop", - name).as_slice()); + format!("`{}` outside of loop", name)[]); } } } diff --git a/src/librustc/middle/check_match.rs b/src/librustc/middle/check_match.rs index 79e776c330884..da1bd09ceffdd 100644 --- a/src/librustc/middle/check_match.rs +++ b/src/librustc/middle/check_match.rs @@ -22,7 +22,7 @@ use middle::expr_use_visitor as euv; use middle::mem_categorization::cmt; use middle::pat_util::*; use middle::ty::*; -use middle::ty::{mod, Ty}; +use middle::ty; use std::fmt; use std::iter::AdditiveIterator; use std::iter::range_inclusive; @@ -162,7 +162,7 @@ fn check_expr(cx: &mut MatchCheckCtxt, ex: &ast::Expr) { // First, check legality of move bindings. check_legality_of_move_bindings(cx, arm.guard.is_some(), - arm.pats.as_slice()); + arm.pats[]); // Second, if there is a guard on each arm, make sure it isn't // assigning or borrowing anything mutably. @@ -199,7 +199,7 @@ fn check_expr(cx: &mut MatchCheckCtxt, ex: &ast::Expr) { } // Fourth, check for unreachable arms. - check_arms(cx, inlined_arms.as_slice(), source); + check_arms(cx, inlined_arms[], source); // Finally, check if the whole match expression is exhaustive. // Check for empty enum, because is_useful only works on inhabited types. @@ -231,7 +231,7 @@ fn check_expr(cx: &mut MatchCheckCtxt, ex: &ast::Expr) { pat.span, format!("refutable pattern in `for` loop binding: \ `{}` not covered", - pat_to_string(uncovered_pat)).as_slice()); + pat_to_string(uncovered_pat))[]); }); // Check legality of move bindings. @@ -304,10 +304,10 @@ fn check_arms(cx: &MatchCheckCtxt, for pat in pats.iter() { let v = vec![&**pat]; - match is_useful(cx, &seen, v.as_slice(), LeaveOutWitness) { + match is_useful(cx, &seen, v[], LeaveOutWitness) { NotUseful => { match source { - ast::MatchIfLetDesugar => { + ast::MatchSource::IfLetDesugar { .. } => { if printed_if_let_err { // we already printed an irrefutable if-let pattern error. // We don't want two, that's just confusing. @@ -321,7 +321,7 @@ fn check_arms(cx: &MatchCheckCtxt, } }, - ast::MatchWhileLetDesugar => { + ast::MatchSource::WhileLetDesugar => { // find the first arm pattern so we can use its span let &(ref first_arm_pats, _) = &arms[0]; let first_pat = &first_arm_pats[0]; @@ -329,7 +329,7 @@ fn check_arms(cx: &MatchCheckCtxt, span_err!(cx.tcx.sess, span, E0165, "irrefutable while-let pattern"); }, - ast::MatchNormal => { + ast::MatchSource::Normal => { span_err!(cx.tcx.sess, pat.span, E0001, "unreachable pattern") }, } @@ -356,7 +356,7 @@ fn raw_pat<'a>(p: &'a Pat) -> &'a Pat { fn check_exhaustive(cx: &MatchCheckCtxt, sp: Span, matrix: &Matrix) { match is_useful(cx, matrix, &[DUMMY_WILD_PAT], ConstructWitness) { UsefulWithWitness(pats) => { - let witness = match pats.as_slice() { + let witness = match pats[] { [ref witness] => &**witness, [] => DUMMY_WILD_PAT, _ => unreachable!() @@ -610,7 +610,7 @@ fn is_useful(cx: &MatchCheckCtxt, UsefulWithWitness(pats) => UsefulWithWitness({ let arity = constructor_arity(cx, &c, left_ty); let mut result = { - let pat_slice = pats.as_slice(); + let pat_slice = pats[]; let subpats = Vec::from_fn(arity, |i| { pat_slice.get(i).map_or(DUMMY_WILD_PAT, |p| &**p) }); @@ -657,10 +657,10 @@ fn is_useful_specialized(cx: &MatchCheckCtxt, &Matrix(ref m): &Matrix, witness: WitnessPreference) -> Usefulness { let arity = constructor_arity(cx, &ctor, lty); let matrix = Matrix(m.iter().filter_map(|r| { - specialize(cx, r.as_slice(), &ctor, 0u, arity) + specialize(cx, r[], &ctor, 0u, arity) }).collect()); match specialize(cx, v, &ctor, 0u, arity) { - Some(v) => is_useful(cx, &matrix, v.as_slice(), witness), + Some(v) => is_useful(cx, &matrix, v[], witness), None => NotUseful } } @@ -1047,7 +1047,7 @@ fn check_legality_of_move_bindings(cx: &MatchCheckCtxt, format!("binding pattern {} is not an \ identifier: {}", p.id, - p.node).as_slice()); + p.node)[]); } } } diff --git a/src/librustc/middle/check_static.rs b/src/librustc/middle/check_static.rs index 21e94d69366d6..6ff34d625005f 100644 --- a/src/librustc/middle/check_static.rs +++ b/src/librustc/middle/check_static.rs @@ -112,7 +112,7 @@ impl<'a, 'tcx> CheckStaticVisitor<'a, 'tcx> { }; self.tcx.sess.span_err(e.span, format!("mutable statics are not allowed \ - to have {}", suffix).as_slice()); + to have {}", suffix)[]); } fn check_static_type(&self, e: &ast::Expr) { @@ -168,7 +168,7 @@ impl<'a, 'tcx, 'v> Visitor<'v> for CheckStaticVisitor<'a, 'tcx> { ty::ty_enum(did, _) if ty::has_dtor(self.tcx, did) => { self.tcx.sess.span_err(e.span, format!("{} are not allowed to have \ - destructors", self.msg()).as_slice()) + destructors", self.msg())[]) } _ => {} } @@ -232,7 +232,7 @@ impl<'a, 'tcx, 'v> Visitor<'v> for CheckStaticVisitor<'a, 'tcx> { let msg = "constants cannot refer to other statics, \ insert an intermediate constant \ instead"; - self.tcx.sess.span_err(e.span, msg.as_slice()); + self.tcx.sess.span_err(e.span, msg[]); } _ => {} } diff --git a/src/librustc/middle/check_static_recursion.rs b/src/librustc/middle/check_static_recursion.rs index b32eb64025f88..c36b4aa7f231e 100644 --- a/src/librustc/middle/check_static_recursion.rs +++ b/src/librustc/middle/check_static_recursion.rs @@ -12,8 +12,7 @@ // recursively. use session::Session; -use middle::resolve; -use middle::def::{DefStatic, DefConst}; +use middle::def::{DefStatic, DefConst, DefMap}; use syntax::ast; use syntax::{ast_util, ast_map}; @@ -22,7 +21,7 @@ use syntax::visit; struct CheckCrateVisitor<'a, 'ast: 'a> { sess: &'a Session, - def_map: &'a resolve::DefMap, + def_map: &'a DefMap, ast_map: &'a ast_map::Map<'ast> } @@ -34,7 +33,7 @@ impl<'v, 'a, 'ast> Visitor<'v> for CheckCrateVisitor<'a, 'ast> { pub fn check_crate<'ast>(sess: &Session, krate: &ast::Crate, - def_map: &resolve::DefMap, + def_map: &DefMap, ast_map: &ast_map::Map<'ast>) { let mut visitor = CheckCrateVisitor { sess: sess, @@ -60,7 +59,7 @@ struct CheckItemRecursionVisitor<'a, 'ast: 'a> { root_it: &'a ast::Item, sess: &'a Session, ast_map: &'a ast_map::Map<'ast>, - def_map: &'a resolve::DefMap, + def_map: &'a DefMap, idstack: Vec } @@ -68,7 +67,7 @@ struct CheckItemRecursionVisitor<'a, 'ast: 'a> { // FIXME: Should use the dependency graph when it's available (#1356) pub fn check_item_recursion<'a>(sess: &'a Session, ast_map: &'a ast_map::Map, - def_map: &'a resolve::DefMap, + def_map: &'a DefMap, it: &'a ast::Item) { let mut visitor = CheckItemRecursionVisitor { @@ -106,7 +105,7 @@ impl<'a, 'ast, 'v> Visitor<'v> for CheckItemRecursionVisitor<'a, 'ast> { _ => { self.sess.span_err(e.span, format!("expected item, found {}", - self.ast_map.node_to_string(def_id.node)).as_slice()); + self.ast_map.node_to_string(def_id.node))[]); return; }, } diff --git a/src/librustc/middle/const_eval.rs b/src/librustc/middle/const_eval.rs index 9b94335654734..5b89912dd03fc 100644 --- a/src/librustc/middle/const_eval.rs +++ b/src/librustc/middle/const_eval.rs @@ -28,7 +28,7 @@ use syntax::visit::{mod, Visitor}; use syntax::{ast_map, ast_util, codemap}; use std::rc::Rc; -use std::collections::hash_map::Vacant; +use std::collections::hash_map::Entry::Vacant; // // This pass classifies expressions by their constant-ness. @@ -117,7 +117,7 @@ fn lookup_variant_by_id<'a>(tcx: &'a ty::ctxt, None => None, Some(ast_map::NodeItem(it)) => match it.node { ast::ItemEnum(ast::EnumDef { ref variants }, _) => { - variant_expr(variants.as_slice(), variant_def.node) + variant_expr(variants[], variant_def.node) } _ => None }, @@ -138,7 +138,7 @@ fn lookup_variant_by_id<'a>(tcx: &'a ty::ctxt, // NOTE this doesn't do the right thing, it compares inlined // NodeId's to the original variant_def's NodeId, but they // come from different crates, so they will likely never match. - variant_expr(variants.as_slice(), variant_def.node).map(|e| e.id) + variant_expr(variants[], variant_def.node).map(|e| e.id) } _ => None }, @@ -364,7 +364,7 @@ pub fn const_expr_to_pat(tcx: &ty::ctxt, expr: &Expr) -> P { pub fn eval_const_expr(tcx: &ty::ctxt, e: &Expr) -> const_val { match eval_const_expr_partial(tcx, e) { Ok(r) => r, - Err(s) => tcx.sess.span_fatal(e.span, s.as_slice()) + Err(s) => tcx.sess.span_fatal(e.span, s[]) } } @@ -603,7 +603,7 @@ pub fn lit_to_const(lit: &ast::Lit) -> const_val { ast::LitInt(n, ast::UnsignedIntLit(_)) => const_uint(n), ast::LitFloat(ref n, _) | ast::LitFloatUnsuffixed(ref n) => { - const_float(from_str::(n.get()).unwrap() as f64) + const_float(n.get().parse::().unwrap() as f64) } ast::LitBool(b) => const_bool(b) } diff --git a/src/librustc/middle/dataflow.rs b/src/librustc/middle/dataflow.rs index 17ebd1b94a708..a2d417ca345d8 100644 --- a/src/librustc/middle/dataflow.rs +++ b/src/librustc/middle/dataflow.rs @@ -311,7 +311,7 @@ impl<'a, 'tcx, O:DataFlowOperator> DataFlowContext<'a, 'tcx, O> { let mut t = on_entry.to_vec(); self.apply_gen_kill(cfgidx, t.as_mut_slice()); temp_bits = t; - temp_bits.as_slice() + temp_bits[] } }; debug!("{} each_bit_for_node({}, cfgidx={}) bits={}", @@ -420,7 +420,7 @@ impl<'a, 'tcx, O:DataFlowOperator> DataFlowContext<'a, 'tcx, O> { let bits = self.kills.slice_mut(start, end); debug!("{} add_kills_from_flow_exits flow_exit={} bits={} [before]", self.analysis_name, flow_exit, mut_bits_to_string(bits)); - bits.clone_from_slice(orig_kills.as_slice()); + bits.clone_from_slice(orig_kills[]); debug!("{} add_kills_from_flow_exits flow_exit={} bits={} [after]", self.analysis_name, flow_exit, mut_bits_to_string(bits)); } @@ -553,7 +553,7 @@ fn bits_to_string(words: &[uint]) -> String { let mut v = word; for _ in range(0u, uint::BYTES) { result.push(sep); - result.push_str(format!("{:02x}", v & 0xFF).as_slice()); + result.push_str(format!("{:02x}", v & 0xFF)[]); v >>= 8; sep = '-'; } diff --git a/src/librustc/middle/def.rs b/src/librustc/middle/def.rs index 20a0dbdc1eefa..a54bc4a945ae2 100644 --- a/src/librustc/middle/def.rs +++ b/src/librustc/middle/def.rs @@ -10,12 +10,17 @@ pub use self::Def::*; pub use self::MethodProvenance::*; +pub use self::TraitItemKind::*; use middle::subst::ParamSpace; +use middle::ty::{ExplicitSelfCategory, StaticExplicitSelfCategory}; +use util::nodemap::NodeMap; use syntax::ast; use syntax::ast_util::local_def; -#[deriving(Clone, Copy, PartialEq, Eq, Encodable, Decodable, Hash, Show)] +use std::cell::RefCell; + +#[deriving(Clone, Copy, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Show)] pub enum Def { DefFn(ast::DefId, bool /* is_ctor */), DefStaticMethod(/* method */ ast::DefId, MethodProvenance), @@ -56,13 +61,25 @@ pub enum Def { DefMethod(ast::DefId /* method */, Option /* trait */, MethodProvenance), } -#[deriving(Clone, Copy, PartialEq, Eq, Encodable, Decodable, Hash, Show)] +// Definition mapping +pub type DefMap = RefCell>; +// This is the replacement export map. It maps a module to all of the exports +// within. +pub type ExportMap = NodeMap>; + +#[deriving(Copy)] +pub struct Export { + pub name: ast::Name, // The name of the target. + pub def_id: ast::DefId, // The definition of the target. +} + +#[deriving(Clone, Copy, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Show)] pub enum MethodProvenance { FromTrait(ast::DefId), FromImpl(ast::DefId), } -#[deriving(Clone, Copy, PartialEq, Eq, Encodable, Decodable, Hash, Show)] +#[deriving(Clone, Copy, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Show)] pub enum TyParamProvenance { FromSelf(ast::DefId), FromParam(ast::DefId), @@ -88,6 +105,25 @@ impl TyParamProvenance { } } +#[deriving(Clone, Copy, Eq, PartialEq)] +pub enum TraitItemKind { + NonstaticMethodTraitItemKind, + StaticMethodTraitItemKind, + TypeTraitItemKind, +} + +impl TraitItemKind { + pub fn from_explicit_self_category(explicit_self_category: + ExplicitSelfCategory) + -> TraitItemKind { + if explicit_self_category == StaticExplicitSelfCategory { + StaticMethodTraitItemKind + } else { + NonstaticMethodTraitItemKind + } + } +} + impl Def { pub fn def_id(&self) -> ast::DefId { match *self { @@ -122,4 +158,3 @@ impl Def { } } } - diff --git a/src/librustc/middle/dependency_format.rs b/src/librustc/middle/dependency_format.rs index 3cb628c2e65c5..6b56ece28bdb2 100644 --- a/src/librustc/middle/dependency_format.rs +++ b/src/librustc/middle/dependency_format.rs @@ -118,7 +118,7 @@ fn calculate_type(sess: &session::Session, let src = sess.cstore.get_used_crate_source(cnum).unwrap(); if src.rlib.is_some() { return } sess.err(format!("dependency `{}` not found in rlib format", - data.name).as_slice()); + data.name)[]); }); return Vec::new(); } @@ -197,7 +197,7 @@ fn calculate_type(sess: &session::Session, match kind { cstore::RequireStatic => "rlib", cstore::RequireDynamic => "dylib", - }).as_slice()); + })[]); } } } @@ -222,7 +222,7 @@ fn add_library(sess: &session::Session, let data = sess.cstore.get_crate_data(cnum); sess.err(format!("cannot satisfy dependencies so `{}` only \ shows up once", - data.name).as_slice()); + data.name)[]); sess.help("having upstream crates all available in one format \ will likely make this go away"); } diff --git a/src/librustc/middle/effect.rs b/src/librustc/middle/effect.rs index 0c0cba6e53e06..52899aaba412f 100644 --- a/src/librustc/middle/effect.rs +++ b/src/librustc/middle/effect.rs @@ -32,7 +32,7 @@ enum UnsafeContext { fn type_is_unsafe_function(ty: Ty) -> bool { match ty.sty { - ty::ty_bare_fn(ref f) => f.unsafety == ast::Unsafety::Unsafe, + ty::ty_bare_fn(_, ref f) => f.unsafety == ast::Unsafety::Unsafe, ty::ty_closure(ref f) => f.unsafety == ast::Unsafety::Unsafe, _ => false, } diff --git a/src/librustc/middle/expr_use_visitor.rs b/src/librustc/middle/expr_use_visitor.rs index abc3c8d0d8fa4..7e31ae04ae083 100644 --- a/src/librustc/middle/expr_use_visitor.rs +++ b/src/librustc/middle/expr_use_visitor.rs @@ -678,7 +678,7 @@ impl<'d,'t,'tcx,TYPER:mc::Typer<'tcx>> ExprUseVisitor<'d,'t,'tcx,TYPER> { self.tcx().sess.span_bug( callee.span, format!("unexpected callee type {}", - callee_ty.repr(self.tcx())).as_slice()) + callee_ty.repr(self.tcx()))[]) } }; match overloaded_call_type { @@ -824,10 +824,12 @@ impl<'d,'t,'tcx,TYPER:mc::Typer<'tcx>> ExprUseVisitor<'d,'t,'tcx,TYPER> { None => { } Some(adjustment) => { match *adjustment { - ty::AdjustAddEnv(..) => { - // Creating a closure consumes the input and stores it - // into the resulting rvalue. - debug!("walk_adjustment(AutoAddEnv)"); + ty::AdjustAddEnv(..) | + ty::AdjustReifyFnPointer(..) => { + // Creating a closure/fn-pointer consumes the + // input and stores it into the resulting + // rvalue. + debug!("walk_adjustment(AutoAddEnv|AdjustReifyFnPointer)"); let cmt_unadjusted = return_if_err!(self.mc.cat_expr_unadjusted(expr)); self.delegate_consume(expr.id, expr.span, cmt_unadjusted); @@ -869,7 +871,7 @@ impl<'d,'t,'tcx,TYPER:mc::Typer<'tcx>> ExprUseVisitor<'d,'t,'tcx,TYPER> { ty::ty_rptr(r, ref m) => (m.mutbl, r), _ => self.tcx().sess.span_bug(expr.span, format!("bad overloaded deref type {}", - method_ty.repr(self.tcx())).as_slice()) + method_ty.repr(self.tcx()))[]) }; let bk = ty::BorrowKind::from_mutbl(m); self.delegate.borrow(expr.id, expr.span, cmt, @@ -1186,7 +1188,7 @@ impl<'d,'t,'tcx,TYPER:mc::Typer<'tcx>> ExprUseVisitor<'d,'t,'tcx,TYPER> { // pattern. let msg = format!("Pattern has unexpected type: {}", def); - tcx.sess.span_bug(pat.span, msg.as_slice()) + tcx.sess.span_bug(pat.span, msg[]) } Some(def) => { @@ -1195,7 +1197,7 @@ impl<'d,'t,'tcx,TYPER:mc::Typer<'tcx>> ExprUseVisitor<'d,'t,'tcx,TYPER> { // should not resolve. let msg = format!("Pattern has unexpected def: {}", def); - tcx.sess.span_bug(pat.span, msg.as_slice()) + tcx.sess.span_bug(pat.span, msg[]) } } } diff --git a/src/librustc/middle/fast_reject.rs b/src/librustc/middle/fast_reject.rs index 297d6bcb03cb9..62cf47da68701 100644 --- a/src/librustc/middle/fast_reject.rs +++ b/src/librustc/middle/fast_reject.rs @@ -83,7 +83,7 @@ pub fn simplify_type(tcx: &ty::ctxt, ty::ty_closure(ref f) => { Some(FunctionSimplifiedType(f.sig.0.inputs.len())) } - ty::ty_bare_fn(ref f) => { + ty::ty_bare_fn(_, ref f) => { Some(FunctionSimplifiedType(f.sig.0.inputs.len())) } ty::ty_param(_) => { diff --git a/src/librustc/middle/graph.rs b/src/librustc/middle/graph.rs index e73fcd93e0504..06e6ef30f74da 100644 --- a/src/librustc/middle/graph.rs +++ b/src/librustc/middle/graph.rs @@ -34,6 +34,7 @@ use std::fmt::{Formatter, Error, Show}; use std::uint; +use std::collections::BitvSet; pub struct Graph { nodes: Vec> , @@ -288,6 +289,40 @@ impl Graph { } } } + + pub fn depth_traverse<'a>(&'a self, start: NodeIndex) -> DepthFirstTraversal<'a, N, E> { + DepthFirstTraversal { + graph: self, + stack: vec![start], + visited: BitvSet::new() + } + } +} + +pub struct DepthFirstTraversal<'g, N:'g, E:'g> { + graph: &'g Graph, + stack: Vec, + visited: BitvSet +} + +impl<'g, N, E> Iterator<&'g N> for DepthFirstTraversal<'g, N, E> { + fn next(&mut self) -> Option<&'g N> { + while let Some(idx) = self.stack.pop() { + if !self.visited.insert(idx.node_id()) { + continue; + } + self.graph.each_outgoing_edge(idx, |_, e| -> bool { + if !self.visited.contains(&e.target().node_id()) { + self.stack.push(e.target()); + } + true + }); + + return Some(self.graph.node_data(idx)); + } + + return None; + } } pub fn each_edge_index(max_edge_index: EdgeIndex, mut f: F) where diff --git a/src/librustc/middle/infer/coercion.rs b/src/librustc/middle/infer/coercion.rs index 805d4532aa1c4..ec83b8fae9b7c 100644 --- a/src/librustc/middle/infer/coercion.rs +++ b/src/librustc/middle/infer/coercion.rs @@ -84,14 +84,18 @@ impl<'f, 'tcx> Coerce<'f, 'tcx> { let Coerce(ref v) = *self; v } + fn tcx(&self) -> &ty::ctxt<'tcx> { + self.get_ref().infcx.tcx + } + pub fn tys(&self, a: Ty<'tcx>, b: Ty<'tcx>) -> CoerceResult<'tcx> { debug!("Coerce.tys({} => {})", - a.repr(self.get_ref().infcx.tcx), - b.repr(self.get_ref().infcx.tcx)); + a.repr(self.tcx()), + b.repr(self.tcx())); // Consider coercing the subtype to a DST - let unsize = self.unpack_actual_value(a, |sty_a| { - self.coerce_unsized(a, sty_a, b) + let unsize = self.unpack_actual_value(a, |a| { + self.coerce_unsized(a, b) }); if unsize.is_ok() { return unsize; @@ -105,14 +109,14 @@ impl<'f, 'tcx> Coerce<'f, 'tcx> { ty::ty_ptr(mt_b) => { match mt_b.ty.sty { ty::ty_str => { - return self.unpack_actual_value(a, |sty_a| { - self.coerce_unsafe_ptr(a, sty_a, b, ast::MutImmutable) + return self.unpack_actual_value(a, |a| { + self.coerce_unsafe_ptr(a, b, ast::MutImmutable) }); } ty::ty_trait(..) => { - let result = self.unpack_actual_value(a, |sty_a| { - self.coerce_unsafe_object(a, sty_a, b, mt_b.mutbl) + let result = self.unpack_actual_value(a, |a| { + self.coerce_unsafe_object(a, b, mt_b.mutbl) }); match result { @@ -122,8 +126,8 @@ impl<'f, 'tcx> Coerce<'f, 'tcx> { } _ => { - return self.unpack_actual_value(a, |sty_a| { - self.coerce_unsafe_ptr(a, sty_a, b, mt_b.mutbl) + return self.unpack_actual_value(a, |a| { + self.coerce_unsafe_ptr(a, b, mt_b.mutbl) }); } }; @@ -132,14 +136,14 @@ impl<'f, 'tcx> Coerce<'f, 'tcx> { ty::ty_rptr(_, mt_b) => { match mt_b.ty.sty { ty::ty_str => { - return self.unpack_actual_value(a, |sty_a| { - self.coerce_borrowed_pointer(a, sty_a, b, ast::MutImmutable) + return self.unpack_actual_value(a, |a| { + self.coerce_borrowed_pointer(a, b, ast::MutImmutable) }); } ty::ty_trait(..) => { - let result = self.unpack_actual_value(a, |sty_a| { - self.coerce_borrowed_object(a, sty_a, b, mt_b.mutbl) + let result = self.unpack_actual_value(a, |a| { + self.coerce_borrowed_object(a, b, mt_b.mutbl) }); match result { @@ -149,8 +153,8 @@ impl<'f, 'tcx> Coerce<'f, 'tcx> { } _ => { - return self.unpack_actual_value(a, |sty_a| { - self.coerce_borrowed_pointer(a, sty_a, b, mt_b.mutbl) + return self.unpack_actual_value(a, |a| { + self.coerce_borrowed_pointer(a, b, mt_b.mutbl) }); } }; @@ -160,23 +164,21 @@ impl<'f, 'tcx> Coerce<'f, 'tcx> { store: ty::RegionTraitStore(..), .. }) => { - return self.unpack_actual_value(a, |sty_a| { - self.coerce_borrowed_fn(a, sty_a, b) + return self.unpack_actual_value(a, |a| { + self.coerce_borrowed_fn(a, b) }); } _ => {} } - self.unpack_actual_value(a, |sty_a| { - match *sty_a { - ty::ty_bare_fn(ref a_f) => { - // Bare functions are coercible to any closure type. - // - // FIXME(#3320) this should go away and be - // replaced with proper inference, got a patch - // underway - ndm - self.coerce_from_bare_fn(a, a_f, b) + self.unpack_actual_value(a, |a| { + match a.sty { + ty::ty_bare_fn(Some(a_def_id), ref a_f) => { + // Function items are coercible to any closure + // type; function pointers are not (that would + // require double indirection). + self.coerce_from_fn_item(a, a_def_id, a_f, b) } _ => { // Otherwise, just use subtyping rules. @@ -194,21 +196,20 @@ impl<'f, 'tcx> Coerce<'f, 'tcx> { } pub fn unpack_actual_value(&self, a: Ty<'tcx>, f: F) -> T where - F: FnOnce(&ty::sty<'tcx>) -> T, + F: FnOnce(Ty<'tcx>) -> T, { - f(&self.get_ref().infcx.shallow_resolve(a).sty) + f(self.get_ref().infcx.shallow_resolve(a)) } // ~T -> &T or &mut T -> &T (including where T = [U] or str) pub fn coerce_borrowed_pointer(&self, a: Ty<'tcx>, - sty_a: &ty::sty<'tcx>, b: Ty<'tcx>, mutbl_b: ast::Mutability) -> CoerceResult<'tcx> { - debug!("coerce_borrowed_pointer(a={}, sty_a={}, b={})", - a.repr(self.get_ref().infcx.tcx), sty_a, - b.repr(self.get_ref().infcx.tcx)); + debug!("coerce_borrowed_pointer(a={}, b={})", + a.repr(self.tcx()), + b.repr(self.tcx())); // If we have a parameter of type `&M T_a` and the value // provided is `expr`, we will be adding an implicit borrow, @@ -220,7 +221,7 @@ impl<'f, 'tcx> Coerce<'f, 'tcx> { let coercion = Coercion(self.get_ref().trace.clone()); let r_borrow = self.get_ref().infcx.next_region_var(coercion); - let inner_ty = match *sty_a { + let inner_ty = match a.sty { ty::ty_uniq(_) => return Err(ty::terr_mismatch), ty::ty_rptr(_, mt_a) => mt_a.ty, _ => { @@ -228,7 +229,7 @@ impl<'f, 'tcx> Coerce<'f, 'tcx> { } }; - let a_borrowed = ty::mk_rptr(self.get_ref().infcx.tcx, + let a_borrowed = ty::mk_rptr(self.tcx(), r_borrow, mt {ty: inner_ty, mutbl: mutbl_b}); try!(sub.tys(a_borrowed, b)); @@ -245,12 +246,11 @@ impl<'f, 'tcx> Coerce<'f, 'tcx> { // or &Concrete -> &Trait, etc. fn coerce_unsized(&self, a: Ty<'tcx>, - sty_a: &ty::sty<'tcx>, b: Ty<'tcx>) -> CoerceResult<'tcx> { - debug!("coerce_unsized(a={}, sty_a={}, b={})", - a.repr(self.get_ref().infcx.tcx), sty_a, - b.repr(self.get_ref().infcx.tcx)); + debug!("coerce_unsized(a={}, b={})", + a.repr(self.tcx()), + b.repr(self.tcx())); // Note, we want to avoid unnecessary unsizing. We don't want to coerce to // a DST unless we have to. This currently comes out in the wash since @@ -259,11 +259,10 @@ impl<'f, 'tcx> Coerce<'f, 'tcx> { let sub = Sub(self.get_ref().clone()); - let sty_b = &b.sty; - match (sty_a, sty_b) { + match (&a.sty, &b.sty) { (&ty::ty_rptr(_, ty::mt{ty: t_a, mutbl: mutbl_a}), &ty::ty_rptr(_, mt_b)) => { - self.unpack_actual_value(t_a, |sty_a| { - match self.unsize_ty(t_a, sty_a, mt_b.ty) { + self.unpack_actual_value(t_a, |a| { + match self.unsize_ty(t_a, a, mt_b.ty) { Some((ty, kind)) => { if !can_coerce_mutbls(mutbl_a, mt_b.mutbl) { return Err(ty::terr_mutability); @@ -271,7 +270,7 @@ impl<'f, 'tcx> Coerce<'f, 'tcx> { let coercion = Coercion(self.get_ref().trace.clone()); let r_borrow = self.get_ref().infcx.next_region_var(coercion); - let ty = ty::mk_rptr(self.get_ref().infcx.tcx, + let ty = ty::mk_rptr(self.tcx(), r_borrow, ty::mt{ty: ty, mutbl: mt_b.mutbl}); try!(self.get_ref().infcx.try(|_| sub.tys(ty, b))); @@ -288,14 +287,14 @@ impl<'f, 'tcx> Coerce<'f, 'tcx> { }) } (&ty::ty_rptr(_, ty::mt{ty: t_a, mutbl: mutbl_a}), &ty::ty_ptr(mt_b)) => { - self.unpack_actual_value(t_a, |sty_a| { - match self.unsize_ty(t_a, sty_a, mt_b.ty) { + self.unpack_actual_value(t_a, |a| { + match self.unsize_ty(t_a, a, mt_b.ty) { Some((ty, kind)) => { if !can_coerce_mutbls(mutbl_a, mt_b.mutbl) { return Err(ty::terr_mutability); } - let ty = ty::mk_ptr(self.get_ref().infcx.tcx, + let ty = ty::mk_ptr(self.tcx(), ty::mt{ty: ty, mutbl: mt_b.mutbl}); try!(self.get_ref().infcx.try(|_| sub.tys(ty, b))); debug!("Success, coerced with AutoDerefRef(1, \ @@ -311,10 +310,10 @@ impl<'f, 'tcx> Coerce<'f, 'tcx> { }) } (&ty::ty_uniq(t_a), &ty::ty_uniq(t_b)) => { - self.unpack_actual_value(t_a, |sty_a| { - match self.unsize_ty(t_a, sty_a, t_b) { + self.unpack_actual_value(t_a, |a| { + match self.unsize_ty(t_a, a, t_b) { Some((ty, kind)) => { - let ty = ty::mk_uniq(self.get_ref().infcx.tcx, ty); + let ty = ty::mk_uniq(self.tcx(), ty); try!(self.get_ref().infcx.try(|_| sub.tys(ty, b))); debug!("Success, coerced with AutoDerefRef(1, \ AutoUnsizeUniq({}))", kind); @@ -336,15 +335,15 @@ impl<'f, 'tcx> Coerce<'f, 'tcx> { // E.g., `[T, ..n]` -> `([T], UnsizeLength(n))` fn unsize_ty(&self, ty_a: Ty<'tcx>, - sty_a: &ty::sty<'tcx>, + a: Ty<'tcx>, ty_b: Ty<'tcx>) -> Option<(Ty<'tcx>, ty::UnsizeKind<'tcx>)> { - debug!("unsize_ty(sty_a={}, ty_b={})", sty_a, ty_b.repr(self.get_ref().infcx.tcx)); + debug!("unsize_ty(a={}, ty_b={})", a, ty_b.repr(self.tcx())); - let tcx = self.get_ref().infcx.tcx; + let tcx = self.tcx(); - self.unpack_actual_value(ty_b, |sty_b| - match (sty_a, sty_b) { + self.unpack_actual_value(ty_b, |b| + match (&a.sty, &b.sty) { (&ty::ty_vec(t_a, Some(len)), &ty::ty_vec(_, None)) => { let ty = ty::mk_vec(tcx, t_a, None); Some((ty, ty::UnsizeLength(len))) @@ -412,44 +411,41 @@ impl<'f, 'tcx> Coerce<'f, 'tcx> { fn coerce_borrowed_object(&self, a: Ty<'tcx>, - sty_a: &ty::sty<'tcx>, b: Ty<'tcx>, b_mutbl: ast::Mutability) -> CoerceResult<'tcx> { - let tcx = self.get_ref().infcx.tcx; + let tcx = self.tcx(); - debug!("coerce_borrowed_object(a={}, sty_a={}, b={}, b_mutbl={})", - a.repr(tcx), sty_a, + debug!("coerce_borrowed_object(a={}, b={}, b_mutbl={})", + a.repr(tcx), b.repr(tcx), b_mutbl); let coercion = Coercion(self.get_ref().trace.clone()); let r_a = self.get_ref().infcx.next_region_var(coercion); - self.coerce_object(a, sty_a, b, b_mutbl, + self.coerce_object(a, b, b_mutbl, |tr| ty::mk_rptr(tcx, r_a, ty::mt{ mutbl: b_mutbl, ty: tr }), || AutoPtr(r_a, b_mutbl, None)) } fn coerce_unsafe_object(&self, a: Ty<'tcx>, - sty_a: &ty::sty<'tcx>, b: Ty<'tcx>, b_mutbl: ast::Mutability) -> CoerceResult<'tcx> { - let tcx = self.get_ref().infcx.tcx; + let tcx = self.tcx(); - debug!("coerce_unsafe_object(a={}, sty_a={}, b={}, b_mutbl={})", - a.repr(tcx), sty_a, + debug!("coerce_unsafe_object(a={}, b={}, b_mutbl={})", + a.repr(tcx), b.repr(tcx), b_mutbl); - self.coerce_object(a, sty_a, b, b_mutbl, + self.coerce_object(a, b, b_mutbl, |tr| ty::mk_ptr(tcx, ty::mt{ mutbl: b_mutbl, ty: tr }), || AutoUnsafe(b_mutbl, None)) } fn coerce_object(&self, a: Ty<'tcx>, - sty_a: &ty::sty<'tcx>, b: Ty<'tcx>, b_mutbl: ast::Mutability, mk_ty: F, @@ -457,9 +453,9 @@ impl<'f, 'tcx> Coerce<'f, 'tcx> { F: FnOnce(Ty<'tcx>) -> Ty<'tcx>, G: FnOnce() -> ty::AutoRef<'tcx>, { - let tcx = self.get_ref().infcx.tcx; + let tcx = self.tcx(); - match *sty_a { + match a.sty { ty::ty_rptr(_, ty::mt{ty, mutbl}) => match ty.sty { ty::ty_trait(box ty::TyTrait { ref principal, bounds }) => { debug!("mutbl={} b_mutbl={}", mutbl, b_mutbl); @@ -483,16 +479,15 @@ impl<'f, 'tcx> Coerce<'f, 'tcx> { pub fn coerce_borrowed_fn(&self, a: Ty<'tcx>, - sty_a: &ty::sty<'tcx>, b: Ty<'tcx>) -> CoerceResult<'tcx> { - debug!("coerce_borrowed_fn(a={}, sty_a={}, b={})", - a.repr(self.get_ref().infcx.tcx), sty_a, - b.repr(self.get_ref().infcx.tcx)); + debug!("coerce_borrowed_fn(a={}, b={})", + a.repr(self.tcx()), + b.repr(self.tcx())); - match *sty_a { - ty::ty_bare_fn(ref f) => { - self.coerce_from_bare_fn(a, f, b) + match a.sty { + ty::ty_bare_fn(Some(a_def_id), ref f) => { + self.coerce_from_fn_item(a, a_def_id, f, b) } _ => { self.subtype(a, b) @@ -500,46 +495,59 @@ impl<'f, 'tcx> Coerce<'f, 'tcx> { } } - /// Attempts to coerce from a bare Rust function (`extern "Rust" fn`) into a closure or a - /// `proc`. - fn coerce_from_bare_fn(&self, a: Ty<'tcx>, fn_ty_a: &ty::BareFnTy<'tcx>, b: Ty<'tcx>) + fn coerce_from_fn_item(&self, + a: Ty<'tcx>, + fn_def_id_a: ast::DefId, + fn_ty_a: &ty::BareFnTy<'tcx>, + b: Ty<'tcx>) -> CoerceResult<'tcx> { - self.unpack_actual_value(b, |sty_b| { - - debug!("coerce_from_bare_fn(a={}, b={})", - a.repr(self.get_ref().infcx.tcx), b.repr(self.get_ref().infcx.tcx)); + /*! + * Attempts to coerce from the type of a Rust function item + * into a closure or a `proc`. + */ + + self.unpack_actual_value(b, |b| { + debug!("coerce_from_fn_item(a={}, b={})", + a.repr(self.tcx()), b.repr(self.tcx())); + + match b.sty { + ty::ty_closure(ref f) => { + if fn_ty_a.abi != abi::Rust || fn_ty_a.unsafety != ast::Unsafety::Normal { + return self.subtype(a, b); + } - if fn_ty_a.abi != abi::Rust || fn_ty_a.unsafety != ast::Unsafety::Normal { - return self.subtype(a, b); + let fn_ty_b = (*f).clone(); + let adj = ty::AdjustAddEnv(fn_def_id_a, fn_ty_b.store); + let a_closure = ty::mk_closure(self.tcx(), + ty::ClosureTy { + sig: fn_ty_a.sig.clone(), + .. *fn_ty_b + }); + try!(self.subtype(a_closure, b)); + Ok(Some(adj)) + } + ty::ty_bare_fn(None, _) => { + let a_fn_pointer = ty::mk_bare_fn(self.tcx(), None, (*fn_ty_a).clone()); + try!(self.subtype(a_fn_pointer, b)); + Ok(Some(ty::AdjustReifyFnPointer(fn_def_id_a))) + } + _ => { + return self.subtype(a, b) + } } - - let fn_ty_b = match *sty_b { - ty::ty_closure(ref f) => (*f).clone(), - _ => return self.subtype(a, b) - }; - - let adj = ty::AdjustAddEnv(fn_ty_b.store); - let a_closure = ty::mk_closure(self.get_ref().infcx.tcx, - ty::ClosureTy { - sig: fn_ty_a.sig.clone(), - .. *fn_ty_b - }); - try!(self.subtype(a_closure, b)); - Ok(Some(adj)) }) } pub fn coerce_unsafe_ptr(&self, a: Ty<'tcx>, - sty_a: &ty::sty<'tcx>, b: Ty<'tcx>, mutbl_b: ast::Mutability) -> CoerceResult<'tcx> { - debug!("coerce_unsafe_ptr(a={}, sty_a={}, b={})", - a.repr(self.get_ref().infcx.tcx), sty_a, - b.repr(self.get_ref().infcx.tcx)); + debug!("coerce_unsafe_ptr(a={}, b={})", + a.repr(self.tcx()), + b.repr(self.tcx())); - let mt_a = match *sty_a { + let mt_a = match a.sty { ty::ty_rptr(_, mt) | ty::ty_ptr(mt) => mt, _ => { return self.subtype(a, b); @@ -547,7 +555,7 @@ impl<'f, 'tcx> Coerce<'f, 'tcx> { }; // Check that the types which they point at are compatible. - let a_unsafe = ty::mk_ptr(self.get_ref().infcx.tcx, ty::mt{ mutbl: mutbl_b, ty: mt_a.ty }); + let a_unsafe = ty::mk_ptr(self.tcx(), ty::mt{ mutbl: mutbl_b, ty: mt_a.ty }); try!(self.subtype(a_unsafe, b)); if !can_coerce_mutbls(mt_a.mutbl, mutbl_b) { return Err(ty::terr_mutability); diff --git a/src/librustc/middle/infer/combine.rs b/src/librustc/middle/infer/combine.rs index 82ddbcee5a72e..14687b8fd75c6 100644 --- a/src/librustc/middle/infer/combine.rs +++ b/src/librustc/middle/infer/combine.rs @@ -141,7 +141,7 @@ pub trait Combine<'tcx> { for _ in a_regions.iter() { invariance.push(ty::Invariant); } - invariance.as_slice() + invariance[] } }; @@ -411,7 +411,7 @@ pub fn super_tys<'tcx, C: Combine<'tcx>>(this: &C, format!("{}: bot and var types should have been handled ({},{})", this.tag(), a.repr(this.infcx().tcx), - b.repr(this.infcx().tcx)).as_slice()); + b.repr(this.infcx().tcx))[]); } (&ty::ty_err, _) | (_, &ty::ty_err) => { @@ -568,11 +568,12 @@ pub fn super_tys<'tcx, C: Combine<'tcx>>(this: &C, } } - (&ty::ty_bare_fn(ref a_fty), &ty::ty_bare_fn(ref b_fty)) => { - this.bare_fn_tys(a_fty, b_fty).and_then(|fty| { - Ok(ty::mk_bare_fn(tcx, fty)) - }) - } + (&ty::ty_bare_fn(a_opt_def_id, ref a_fty), &ty::ty_bare_fn(b_opt_def_id, ref b_fty)) + if a_opt_def_id == b_opt_def_id => + { + let fty = try!(this.bare_fn_tys(a_fty, b_fty)); + Ok(ty::mk_bare_fn(tcx, a_opt_def_id, fty)) + } (&ty::ty_closure(ref a_fty), &ty::ty_closure(ref b_fty)) => { this.closure_tys(&**a_fty, &**b_fty).and_then(|fty| { diff --git a/src/librustc/middle/infer/error_reporting.rs b/src/librustc/middle/infer/error_reporting.rs index b4c1c0b396b64..0ea3d415ec5ce 100644 --- a/src/librustc/middle/infer/error_reporting.rs +++ b/src/librustc/middle/infer/error_reporting.rs @@ -199,9 +199,9 @@ impl<'a, 'tcx> ErrorReporting<'tcx> for InferCtxt<'a, 'tcx> { ref trace_origins, ref same_regions) => { if !same_regions.is_empty() { - self.report_processed_errors(var_origins.as_slice(), - trace_origins.as_slice(), - same_regions.as_slice()); + self.report_processed_errors(var_origins[], + trace_origins[], + same_regions[]); } } } @@ -374,7 +374,7 @@ impl<'a, 'tcx> ErrorReporting<'tcx> for InferCtxt<'a, 'tcx> { format!("{}: {} ({})", message_root_str, expected_found_str, - ty::type_err_to_str(self.tcx, terr)).as_slice()); + ty::type_err_to_str(self.tcx, terr))[]); match trace.origin { infer::MatchExpressionArm(_, arm_span) => @@ -438,13 +438,13 @@ impl<'a, 'tcx> ErrorReporting<'tcx> for InferCtxt<'a, 'tcx> { origin.span(), format!( "the parameter type `{}` may not live long enough", - param_ty.user_string(self.tcx)).as_slice()); + param_ty.user_string(self.tcx))[]); self.tcx.sess.span_help( origin.span(), format!( "consider adding an explicit lifetime bound `{}: {}`...", param_ty.user_string(self.tcx), - sub.user_string(self.tcx)).as_slice()); + sub.user_string(self.tcx))[]); } ty::ReStatic => { @@ -453,12 +453,12 @@ impl<'a, 'tcx> ErrorReporting<'tcx> for InferCtxt<'a, 'tcx> { origin.span(), format!( "the parameter type `{}` may not live long enough", - param_ty.user_string(self.tcx)).as_slice()); + param_ty.user_string(self.tcx))[]); self.tcx.sess.span_help( origin.span(), format!( "consider adding an explicit lifetime bound `{}: 'static`...", - param_ty.user_string(self.tcx)).as_slice()); + param_ty.user_string(self.tcx))[]); } _ => { @@ -467,16 +467,16 @@ impl<'a, 'tcx> ErrorReporting<'tcx> for InferCtxt<'a, 'tcx> { origin.span(), format!( "the parameter type `{}` may not live long enough", - param_ty.user_string(self.tcx)).as_slice()); + param_ty.user_string(self.tcx))[]); self.tcx.sess.span_help( origin.span(), format!( "consider adding an explicit lifetime bound to `{}`", - param_ty.user_string(self.tcx)).as_slice()); + param_ty.user_string(self.tcx))[]); note_and_explain_region( self.tcx, format!("the parameter type `{}` must be valid for ", - param_ty.user_string(self.tcx)).as_slice(), + param_ty.user_string(self.tcx))[], sub, "..."); } @@ -518,7 +518,7 @@ impl<'a, 'tcx> ErrorReporting<'tcx> for InferCtxt<'a, 'tcx> { ty::local_var_name_str(self.tcx, upvar_id.var_id) .get() - .to_string()).as_slice()); + .to_string())[]); note_and_explain_region( self.tcx, "...the borrowed pointer is valid for ", @@ -530,7 +530,7 @@ impl<'a, 'tcx> ErrorReporting<'tcx> for InferCtxt<'a, 'tcx> { ty::local_var_name_str(self.tcx, upvar_id.var_id) .get() - .to_string()).as_slice(), + .to_string())[], sup, ""); } @@ -576,7 +576,7 @@ impl<'a, 'tcx> ErrorReporting<'tcx> for InferCtxt<'a, 'tcx> { outlive the enclosing closure", ty::local_var_name_str(self.tcx, id).get() - .to_string()).as_slice()); + .to_string())[]); note_and_explain_region( self.tcx, "captured variable is valid for ", @@ -618,7 +618,7 @@ impl<'a, 'tcx> ErrorReporting<'tcx> for InferCtxt<'a, 'tcx> { span, format!("the type `{}` does not fulfill the \ required lifetime", - self.ty_to_string(ty)).as_slice()); + self.ty_to_string(ty))[]); note_and_explain_region(self.tcx, "type must outlive ", sub, @@ -644,7 +644,7 @@ impl<'a, 'tcx> ErrorReporting<'tcx> for InferCtxt<'a, 'tcx> { span, format!("the type `{}` (provided as the value of \ a type parameter) is not valid at this point", - self.ty_to_string(ty)).as_slice()); + self.ty_to_string(ty))[]); note_and_explain_region(self.tcx, "type must outlive ", sub, @@ -710,7 +710,7 @@ impl<'a, 'tcx> ErrorReporting<'tcx> for InferCtxt<'a, 'tcx> { span, format!("type of expression contains references \ that are not valid during the expression: `{}`", - self.ty_to_string(t)).as_slice()); + self.ty_to_string(t))[]); note_and_explain_region( self.tcx, "type is only valid for ", @@ -732,7 +732,7 @@ impl<'a, 'tcx> ErrorReporting<'tcx> for InferCtxt<'a, 'tcx> { span, format!("in type `{}`, reference has a longer lifetime \ than the data it references", - self.ty_to_string(ty)).as_slice()); + self.ty_to_string(ty))[]); note_and_explain_region( self.tcx, "the pointer is valid for ", @@ -857,7 +857,7 @@ impl<'a, 'tcx> ErrorReporting<'tcx> for InferCtxt<'a, 'tcx> { let (fn_decl, generics, unsafety, ident, expl_self, span) = node_inner.expect("expect item fn"); let taken = lifetimes_in_scope(self.tcx, scope_id); - let life_giver = LifeGiver::with_taken(taken.as_slice()); + let life_giver = LifeGiver::with_taken(taken[]); let rebuilder = Rebuilder::new(self.tcx, fn_decl, expl_self, generics, same_regions, &life_giver); let (fn_decl, expl_self, generics) = rebuilder.rebuild(); @@ -933,7 +933,7 @@ impl<'a, 'tcx> Rebuilder<'a, 'tcx> { } expl_self_opt = self.rebuild_expl_self(expl_self_opt, lifetime, &anon_nums, ®ion_names); - inputs = self.rebuild_args_ty(inputs.as_slice(), lifetime, + inputs = self.rebuild_args_ty(inputs[], lifetime, &anon_nums, ®ion_names); output = self.rebuild_output(&output, lifetime, &anon_nums, ®ion_names); ty_params = self.rebuild_ty_params(ty_params, lifetime, @@ -968,7 +968,7 @@ impl<'a, 'tcx> Rebuilder<'a, 'tcx> { names.push(lt_name); } names.sort(); - let name = token::str_to_ident(names[0].as_slice()).name; + let name = token::str_to_ident(names[0][]).name; return (name_to_dummy_lifetime(name), Kept); } return (self.life_giver.give_lifetime(), Fresh); @@ -1219,7 +1219,7 @@ impl<'a, 'tcx> Rebuilder<'a, 'tcx> { .sess .fatal(format!( "unbound path {}", - pprust::path_to_string(path)).as_slice()) + pprust::path_to_string(path))[]) } Some(&d) => d }; @@ -1417,7 +1417,7 @@ impl<'a, 'tcx> ErrorReportingHelpers<'tcx> for InferCtxt<'a, 'tcx> { opt_explicit_self, generics); let msg = format!("consider using an explicit lifetime \ parameter as shown: {}", suggested_fn); - self.tcx.sess.span_help(span, msg.as_slice()); + self.tcx.sess.span_help(span, msg[]); } fn report_inference_failure(&self, @@ -1455,7 +1455,7 @@ impl<'a, 'tcx> ErrorReportingHelpers<'tcx> for InferCtxt<'a, 'tcx> { var_origin.span(), format!("cannot infer an appropriate lifetime{} \ due to conflicting requirements", - var_description).as_slice()); + var_description)[]); } fn note_region_origin(&self, origin: &SubregionOrigin<'tcx>) { @@ -1500,7 +1500,7 @@ impl<'a, 'tcx> ErrorReportingHelpers<'tcx> for InferCtxt<'a, 'tcx> { self.tcx.sess.span_note( trace.origin.span(), format!("...so that {} ({})", - desc, values_str).as_slice()); + desc, values_str)[]); } None => { // Really should avoid printing this error at @@ -1509,7 +1509,7 @@ impl<'a, 'tcx> ErrorReportingHelpers<'tcx> for InferCtxt<'a, 'tcx> { // doing right now. - nmatsakis self.tcx.sess.span_note( trace.origin.span(), - format!("...so that {}", desc).as_slice()); + format!("...so that {}", desc)[]); } } } @@ -1526,7 +1526,7 @@ impl<'a, 'tcx> ErrorReportingHelpers<'tcx> for InferCtxt<'a, 'tcx> { "...so that closure can access `{}`", ty::local_var_name_str(self.tcx, upvar_id.var_id) .get() - .to_string()).as_slice()) + .to_string())[]) } infer::InfStackClosure(span) => { self.tcx.sess.span_note( @@ -1551,7 +1551,7 @@ impl<'a, 'tcx> ErrorReportingHelpers<'tcx> for InferCtxt<'a, 'tcx> { does not outlive the enclosing closure", ty::local_var_name_str( self.tcx, - id).get().to_string()).as_slice()); + id).get().to_string())[]); } infer::IndexSlice(span) => { self.tcx.sess.span_note( @@ -1595,7 +1595,7 @@ impl<'a, 'tcx> ErrorReportingHelpers<'tcx> for InferCtxt<'a, 'tcx> { span, format!("...so type `{}` of expression is valid during the \ expression", - self.ty_to_string(t)).as_slice()); + self.ty_to_string(t))[]); } infer::BindingTypeIsNotValidAtDecl(span) => { self.tcx.sess.span_note( @@ -1607,14 +1607,14 @@ impl<'a, 'tcx> ErrorReportingHelpers<'tcx> for InferCtxt<'a, 'tcx> { span, format!("...so that the reference type `{}` \ does not outlive the data it points at", - self.ty_to_string(ty)).as_slice()); + self.ty_to_string(ty))[]); } infer::RelateParamBound(span, t) => { self.tcx.sess.span_note( span, format!("...so that the type `{}` \ will meet the declared lifetime bounds", - self.ty_to_string(t)).as_slice()); + self.ty_to_string(t))[]); } infer::RelateDefaultParamBound(span, t) => { self.tcx.sess.span_note( @@ -1622,13 +1622,13 @@ impl<'a, 'tcx> ErrorReportingHelpers<'tcx> for InferCtxt<'a, 'tcx> { format!("...so that type parameter \ instantiated with `{}`, \ will meet its declared lifetime bounds", - self.ty_to_string(t)).as_slice()); + self.ty_to_string(t))[]); } infer::RelateRegionParamBound(span) => { self.tcx.sess.span_note( span, format!("...so that the declared lifetime parameter bounds \ - are satisfied").as_slice()); + are satisfied")[]); } } } @@ -1677,7 +1677,7 @@ fn lifetimes_in_scope(tcx: &ty::ctxt, Some(node) => match node { ast_map::NodeItem(item) => match item.node { ast::ItemFn(_, _, _, ref gen, _) => { - taken.push_all(gen.lifetimes.as_slice()); + taken.push_all(gen.lifetimes[]); None }, _ => None @@ -1685,7 +1685,7 @@ fn lifetimes_in_scope(tcx: &ty::ctxt, ast_map::NodeImplItem(ii) => { match *ii { ast::MethodImplItem(ref m) => { - taken.push_all(m.pe_generics().lifetimes.as_slice()); + taken.push_all(m.pe_generics().lifetimes[]); Some(m.id) } ast::TypeImplItem(_) => None, @@ -1744,10 +1744,10 @@ impl LifeGiver { let mut lifetime; loop { let mut s = String::from_str("'"); - s.push_str(num_to_string(self.counter.get()).as_slice()); + s.push_str(num_to_string(self.counter.get())[]); if !self.taken.contains(&s) { lifetime = name_to_dummy_lifetime( - token::str_to_ident(s.as_slice()).name); + token::str_to_ident(s[]).name); self.generated.borrow_mut().push(lifetime); break; } diff --git a/src/librustc/middle/infer/freshen.rs b/src/librustc/middle/infer/freshen.rs index ebff854060cae..a8bf7546559fd 100644 --- a/src/librustc/middle/infer/freshen.rs +++ b/src/librustc/middle/infer/freshen.rs @@ -34,7 +34,7 @@ use middle::ty::{mod, Ty}; use middle::ty_fold; use middle::ty_fold::TypeFoldable; use middle::ty_fold::TypeFolder; -use std::collections::hash_map; +use std::collections::hash_map::{mod, Entry}; use super::InferCtxt; use super::unify::InferCtxtMethodsForSimplyUnifiableTypes; @@ -67,8 +67,8 @@ impl<'a, 'tcx> TypeFreshener<'a, 'tcx> { } match self.freshen_map.entry(key) { - hash_map::Occupied(entry) => *entry.get(), - hash_map::Vacant(entry) => { + Entry::Occupied(entry) => *entry.get(), + Entry::Vacant(entry) => { let index = self.freshen_count; self.freshen_count += 1; let t = ty::mk_infer(self.infcx.tcx, freshener(index)); diff --git a/src/librustc/middle/infer/higher_ranked/mod.rs b/src/librustc/middle/infer/higher_ranked/mod.rs index ab0f98ec74a7f..2a19f37e7d415 100644 --- a/src/librustc/middle/infer/higher_ranked/mod.rs +++ b/src/librustc/middle/infer/higher_ranked/mod.rs @@ -189,7 +189,7 @@ impl<'tcx,C> HigherRankedRelations<'tcx> for C span, format!("region {} is not associated with \ any bound region from A!", - r0).as_slice()) + r0)[]) } } @@ -339,7 +339,7 @@ fn var_ids<'tcx, T: Combine<'tcx>>(combiner: &T, r => { combiner.infcx().tcx.sess.span_bug( combiner.trace().origin.span(), - format!("found non-region-vid: {}", r).as_slice()); + format!("found non-region-vid: {}", r)[]); } }).collect() } diff --git a/src/librustc/middle/infer/mod.rs b/src/librustc/middle/infer/mod.rs index 25eadae5b92f1..6d031c8650756 100644 --- a/src/librustc/middle/infer/mod.rs +++ b/src/librustc/middle/infer/mod.rs @@ -992,7 +992,7 @@ impl<'a, 'tcx> InferCtxt<'a, 'tcx> { self.tcx.sess.span_err(sp, format!("{}{}", mk_msg(resolved_expected.map(|t| self.ty_to_string(t)), actual_ty), - error_str).as_slice()); + error_str)[]); for err in err.iter() { ty::note_and_explain_type_err(self.tcx, *err) diff --git a/src/librustc/middle/infer/region_inference/graphviz.rs b/src/librustc/middle/infer/region_inference/graphviz.rs index 720de357a273d..0ca1a593ce7fe 100644 --- a/src/librustc/middle/infer/region_inference/graphviz.rs +++ b/src/librustc/middle/infer/region_inference/graphviz.rs @@ -26,7 +26,7 @@ use session::config; use util::nodemap::{FnvHashMap, FnvHashSet}; use util::ppaux::Repr; -use std::collections::hash_map::Vacant; +use std::collections::hash_map::Entry::Vacant; use std::io::{mod, File}; use std::os; use std::sync::atomic; @@ -60,7 +60,7 @@ pub fn maybe_print_constraints_for<'a, 'tcx>(region_vars: &RegionVarBindings<'a, } let requested_node : Option = - os::getenv("RUST_REGION_GRAPH_NODE").and_then(|s|from_str(s.as_slice())); + os::getenv("RUST_REGION_GRAPH_NODE").and_then(|s| s.parse()); if requested_node.is_some() && requested_node != Some(subject_node) { return; diff --git a/src/librustc/middle/infer/region_inference/mod.rs b/src/librustc/middle/infer/region_inference/mod.rs index bcaf39cc8dbd1..661f7e56429ee 100644 --- a/src/librustc/middle/infer/region_inference/mod.rs +++ b/src/librustc/middle/infer/region_inference/mod.rs @@ -464,7 +464,7 @@ impl<'a, 'tcx> RegionVarBindings<'a, 'tcx> { origin.span(), format!("cannot relate bound region: {} <= {}", sub.repr(self.tcx), - sup.repr(self.tcx)).as_slice()); + sup.repr(self.tcx))[]); } (_, ReStatic) => { // all regions are subregions of static, so we can ignore this @@ -724,7 +724,7 @@ impl<'a, 'tcx> RegionVarBindings<'a, 'tcx> { self.tcx.sess.bug( format!("cannot relate bound region: LUB({}, {})", a.repr(self.tcx), - b.repr(self.tcx)).as_slice()); + b.repr(self.tcx))[]); } (ReStatic, _) | (_, ReStatic) => { @@ -741,7 +741,7 @@ impl<'a, 'tcx> RegionVarBindings<'a, 'tcx> { format!("lub_concrete_regions invoked with \ non-concrete regions: {}, {}", a, - b).as_slice()); + b)[]); } (ReFree(ref fr), ReScope(s_id)) | @@ -824,7 +824,7 @@ impl<'a, 'tcx> RegionVarBindings<'a, 'tcx> { self.tcx.sess.bug( format!("cannot relate bound region: GLB({}, {})", a.repr(self.tcx), - b.repr(self.tcx)).as_slice()); + b.repr(self.tcx))[]); } (ReStatic, r) | (r, ReStatic) => { @@ -844,7 +844,7 @@ impl<'a, 'tcx> RegionVarBindings<'a, 'tcx> { format!("glb_concrete_regions invoked with \ non-concrete regions: {}, {}", a, - b).as_slice()); + b)[]); } (ReFree(ref fr), ReScope(s_id)) | @@ -965,7 +965,7 @@ impl<'a, 'tcx> RegionVarBindings<'a, 'tcx> { self.expansion(var_data.as_mut_slice()); self.contraction(var_data.as_mut_slice()); let values = - self.extract_values_and_collect_conflicts(var_data.as_slice(), + self.extract_values_and_collect_conflicts(var_data[], errors); self.collect_concrete_region_errors(&values, errors); values @@ -1403,7 +1403,7 @@ impl<'a, 'tcx> RegionVarBindings<'a, 'tcx> { for var {}, lower_bounds={}, upper_bounds={}", node_idx, lower_bounds.repr(self.tcx), - upper_bounds.repr(self.tcx)).as_slice()); + upper_bounds.repr(self.tcx))[]); } fn collect_error_for_contracting_node( @@ -1447,7 +1447,7 @@ impl<'a, 'tcx> RegionVarBindings<'a, 'tcx> { format!("collect_error_for_contracting_node() could not find error \ for var {}, upper_bounds={}", node_idx, - upper_bounds.repr(self.tcx)).as_slice()); + upper_bounds.repr(self.tcx))[]); } fn collect_concrete_regions(&self, diff --git a/src/librustc/middle/intrinsicck.rs b/src/librustc/middle/intrinsicck.rs index ea19111ce3d67..6acbc98b4b27b 100644 --- a/src/librustc/middle/intrinsicck.rs +++ b/src/librustc/middle/intrinsicck.rs @@ -74,7 +74,7 @@ struct IntrinsicCheckingVisitor<'a, 'tcx: 'a> { impl<'a, 'tcx> IntrinsicCheckingVisitor<'a, 'tcx> { fn def_id_is_transmute(&self, def_id: DefId) -> bool { let intrinsic = match ty::lookup_item_type(self.tcx, def_id).ty.sty { - ty::ty_bare_fn(ref bfty) => bfty.abi == RustIntrinsic, + ty::ty_bare_fn(_, ref bfty) => bfty.abi == RustIntrinsic, _ => return false }; if def_id.krate == ast::LOCAL_CRATE { @@ -123,7 +123,7 @@ impl<'a, 'tcx, 'v> Visitor<'v> for IntrinsicCheckingVisitor<'a, 'tcx> { DefFn(did, _) if self.def_id_is_transmute(did) => { let typ = ty::node_id_to_type(self.tcx, expr.id); match typ.sty { - ty_bare_fn(ref bare_fn_ty) if bare_fn_ty.abi == RustIntrinsic => { + ty_bare_fn(_, ref bare_fn_ty) if bare_fn_ty.abi == RustIntrinsic => { if let ty::FnConverging(to) = bare_fn_ty.sig.0.output { let from = bare_fn_ty.sig.0.inputs[0]; self.check_transmute(expr.span, from, to, expr.id); diff --git a/src/librustc/middle/lang_items.rs b/src/librustc/middle/lang_items.rs index 2ffc5d8a510a3..967e7f070c5ae 100644 --- a/src/librustc/middle/lang_items.rs +++ b/src/librustc/middle/lang_items.rs @@ -65,7 +65,7 @@ impl LanguageItems { } } - pub fn items<'a>(&'a self) -> Enumerate>> { + pub fn items<'a>(&'a self) -> Enumerate>> { self.items.iter().enumerate() } diff --git a/src/librustc/middle/liveness.rs b/src/librustc/middle/liveness.rs index b76d798941ef9..798daf8d5410b 100644 --- a/src/librustc/middle/liveness.rs +++ b/src/librustc/middle/liveness.rs @@ -323,7 +323,7 @@ impl<'a, 'tcx> IrMaps<'a, 'tcx> { self.tcx .sess .span_bug(span, format!("no variable registered for id {}", - node_id).as_slice()); + node_id)[]); } } } @@ -594,7 +594,7 @@ impl<'a, 'tcx> Liveness<'a, 'tcx> { self.ir.tcx.sess.span_bug( span, format!("no live node registered for node {}", - node_id).as_slice()); + node_id)[]); } } } @@ -1129,7 +1129,7 @@ impl<'a, 'tcx> Liveness<'a, 'tcx> { // Uninteresting cases: just propagate in rev exec order ast::ExprVec(ref exprs) => { - self.propagate_through_exprs(exprs.as_slice(), succ) + self.propagate_through_exprs(exprs[], succ) } ast::ExprRepeat(ref element, ref count) => { @@ -1154,7 +1154,7 @@ impl<'a, 'tcx> Liveness<'a, 'tcx> { } else { succ }; - let succ = self.propagate_through_exprs(args.as_slice(), succ); + let succ = self.propagate_through_exprs(args[], succ); self.propagate_through_expr(&**f, succ) } @@ -1167,11 +1167,11 @@ impl<'a, 'tcx> Liveness<'a, 'tcx> { } else { succ }; - self.propagate_through_exprs(args.as_slice(), succ) + self.propagate_through_exprs(args[], succ) } ast::ExprTup(ref exprs) => { - self.propagate_through_exprs(exprs.as_slice(), succ) + self.propagate_through_exprs(exprs[], succ) } ast::ExprBinary(op, ref l, ref r) if ast_util::lazy_binop(op) => { diff --git a/src/librustc/middle/mem_categorization.rs b/src/librustc/middle/mem_categorization.rs index dce75579ca0a2..006515ea0a051 100644 --- a/src/librustc/middle/mem_categorization.rs +++ b/src/librustc/middle/mem_categorization.rs @@ -238,7 +238,7 @@ pub fn deref_kind<'tcx>(tcx: &ty::ctxt<'tcx>, t: Ty<'tcx>) -> deref_kind { None => { tcx.sess.bug( format!("deref_kind() invoked on non-derefable type {}", - ty_to_string(tcx, t)).as_slice()); + ty_to_string(tcx, t))[]); } } } @@ -441,8 +441,8 @@ impl<'t,'tcx,TYPER:Typer<'tcx>> MemCategorizationContext<'t,TYPER> { Some(adjustment) => { match *adjustment { - ty::AdjustAddEnv(..) => { - debug!("cat_expr(AdjustAddEnv): {}", + ty::AdjustAddEnv(..) | ty::AdjustReifyFnPointer(..) => { + debug!("cat_expr(AdjustAddEnv|AdjustReifyFnPointer): {}", expr.repr(self.tcx())); // Convert a bare fn to a closure by adding NULL env. // Result is an rvalue. @@ -635,7 +635,7 @@ impl<'t,'tcx,TYPER:Typer<'tcx>> MemCategorizationContext<'t,TYPER> { span, format!("Upvar of non-closure {} - {}", fn_node_id, - ty.repr(self.tcx())).as_slice()); + ty.repr(self.tcx()))[]); } } } @@ -917,7 +917,7 @@ impl<'t,'tcx,TYPER:Typer<'tcx>> MemCategorizationContext<'t,TYPER> { self.tcx().sess.span_bug( node.span(), format!("Explicit deref of non-derefable type: {}", - base_cmt.ty.repr(self.tcx())).as_slice()); + base_cmt.ty.repr(self.tcx()))[]); } } } @@ -996,7 +996,7 @@ impl<'t,'tcx,TYPER:Typer<'tcx>> MemCategorizationContext<'t,TYPER> { self.tcx().sess.span_bug( elt.span(), format!("Explicit index of non-index type `{}`", - base_cmt.ty.repr(self.tcx())).as_slice()); + base_cmt.ty.repr(self.tcx()))[]); } } } diff --git a/src/librustc/middle/pat_util.rs b/src/librustc/middle/pat_util.rs index 8ef8e091c9485..0a6c29d1cb65a 100644 --- a/src/librustc/middle/pat_util.rs +++ b/src/librustc/middle/pat_util.rs @@ -9,7 +9,6 @@ // except according to those terms. use middle::def::*; -use middle::resolve; use middle::ty; use util::nodemap::FnvHashMap; @@ -21,7 +20,7 @@ pub type PatIdMap = FnvHashMap; // This is used because same-named variables in alternative patterns need to // use the NodeId of their namesake in the first pattern. -pub fn pat_id_map(dm: &resolve::DefMap, pat: &ast::Pat) -> PatIdMap { +pub fn pat_id_map(dm: &DefMap, pat: &ast::Pat) -> PatIdMap { let mut map = FnvHashMap::new(); pat_bindings(dm, pat, |_bm, p_id, _s, path1| { map.insert(path1.node, p_id); @@ -29,7 +28,7 @@ pub fn pat_id_map(dm: &resolve::DefMap, pat: &ast::Pat) -> PatIdMap { map } -pub fn pat_is_refutable(dm: &resolve::DefMap, pat: &ast::Pat) -> bool { +pub fn pat_is_refutable(dm: &DefMap, pat: &ast::Pat) -> bool { match pat.node { ast::PatLit(_) | ast::PatRange(_, _) => true, ast::PatEnum(_, _) | @@ -45,7 +44,7 @@ pub fn pat_is_refutable(dm: &resolve::DefMap, pat: &ast::Pat) -> bool { } } -pub fn pat_is_variant_or_struct(dm: &resolve::DefMap, pat: &ast::Pat) -> bool { +pub fn pat_is_variant_or_struct(dm: &DefMap, pat: &ast::Pat) -> bool { match pat.node { ast::PatEnum(_, _) | ast::PatIdent(_, _, None) | @@ -59,7 +58,7 @@ pub fn pat_is_variant_or_struct(dm: &resolve::DefMap, pat: &ast::Pat) -> bool { } } -pub fn pat_is_const(dm: &resolve::DefMap, pat: &ast::Pat) -> bool { +pub fn pat_is_const(dm: &DefMap, pat: &ast::Pat) -> bool { match pat.node { ast::PatIdent(_, _, None) | ast::PatEnum(..) => { match dm.borrow().get(&pat.id) { @@ -71,7 +70,7 @@ pub fn pat_is_const(dm: &resolve::DefMap, pat: &ast::Pat) -> bool { } } -pub fn pat_is_binding(dm: &resolve::DefMap, pat: &ast::Pat) -> bool { +pub fn pat_is_binding(dm: &DefMap, pat: &ast::Pat) -> bool { match pat.node { ast::PatIdent(..) => { !pat_is_variant_or_struct(dm, pat) && @@ -81,7 +80,7 @@ pub fn pat_is_binding(dm: &resolve::DefMap, pat: &ast::Pat) -> bool { } } -pub fn pat_is_binding_or_wild(dm: &resolve::DefMap, pat: &ast::Pat) -> bool { +pub fn pat_is_binding_or_wild(dm: &DefMap, pat: &ast::Pat) -> bool { match pat.node { ast::PatIdent(..) => pat_is_binding(dm, pat), ast::PatWild(_) => true, @@ -91,7 +90,7 @@ pub fn pat_is_binding_or_wild(dm: &resolve::DefMap, pat: &ast::Pat) -> bool { /// Call `it` on every "binding" in a pattern, e.g., on `a` in /// `match foo() { Some(a) => (), None => () }` -pub fn pat_bindings(dm: &resolve::DefMap, pat: &ast::Pat, mut it: I) where +pub fn pat_bindings(dm: &DefMap, pat: &ast::Pat, mut it: I) where I: FnMut(ast::BindingMode, ast::NodeId, Span, &ast::SpannedIdent), { walk_pat(pat, |p| { @@ -107,7 +106,7 @@ pub fn pat_bindings(dm: &resolve::DefMap, pat: &ast::Pat, mut it: I) where /// Checks if the pattern contains any patterns that bind something to /// an ident, e.g. `foo`, or `Foo(foo)` or `foo @ Bar(..)`. -pub fn pat_contains_bindings(dm: &resolve::DefMap, pat: &ast::Pat) -> bool { +pub fn pat_contains_bindings(dm: &DefMap, pat: &ast::Pat) -> bool { let mut contains_bindings = false; walk_pat(pat, |p| { if pat_is_binding(dm, p) { diff --git a/src/librustc/middle/privacy.rs b/src/librustc/middle/privacy.rs index 8cce1321d728b..6f63ae166fe41 100644 --- a/src/librustc/middle/privacy.rs +++ b/src/librustc/middle/privacy.rs @@ -11,17 +11,20 @@ //! A pass that checks to make sure private fields and methods aren't used //! outside their scopes. This pass will also generate a set of exported items //! which are available for use externally when compiled as a library. +pub use self::PrivateDep::*; +pub use self::ImportUse::*; +pub use self::LastPrivate::*; use self::PrivacyResult::*; use self::FieldName::*; use std::mem::replace; use metadata::csearch; -use middle::{def, resolve}; +use middle::def; use middle::ty::{mod, Ty}; use middle::ty::{MethodCall, MethodMap, MethodOrigin, MethodParam, MethodTypeParam}; use middle::ty::{MethodStatic, MethodStaticUnboxedClosure, MethodObject, MethodTraitObject}; -use util::nodemap::{NodeMap, NodeSet}; +use util::nodemap::{DefIdSet, NodeMap, NodeSet}; use syntax::{ast, ast_map}; use syntax::ast_util::{is_local, local_def, PostExpansionMethod}; @@ -29,16 +32,59 @@ use syntax::codemap::Span; use syntax::parse::token; use syntax::visit::{mod, Visitor}; -type Context<'a, 'tcx> = (&'a MethodMap<'tcx>, &'a resolve::ExportMap2); +type Context<'a, 'tcx> = (&'a MethodMap<'tcx>, &'a def::ExportMap); /// A set of AST nodes exported by the crate. pub type ExportedItems = NodeSet; +/// A set containing all exported definitions from external crates. +/// The set does not contain any entries from local crates. +pub type ExternalExports = DefIdSet; + /// A set of AST nodes that are fully public in the crate. This map is used for /// documentation purposes (reexporting a private struct inlines the doc, /// reexporting a public struct doesn't inline the doc). pub type PublicItems = NodeSet; +// FIXME: dox +pub type LastPrivateMap = NodeMap; + +#[deriving(Copy, Show)] +pub enum LastPrivate { + LastMod(PrivateDep), + // `use` directives (imports) can refer to two separate definitions in the + // type and value namespaces. We record here the last private node for each + // and whether the import is in fact used for each. + // If the Option fields are None, it means there is no definition + // in that namespace. + LastImport{value_priv: Option, + value_used: ImportUse, + type_priv: Option, + type_used: ImportUse}, +} + +#[deriving(Copy, Show)] +pub enum PrivateDep { + AllPublic, + DependsOn(ast::DefId), +} + +// How an import is used. +#[deriving(Copy, PartialEq, Show)] +pub enum ImportUse { + Unused, // The import is not used. + Used, // The import is used. +} + +impl LastPrivate { + pub fn or(self, other: LastPrivate) -> LastPrivate { + match (self, other) { + (me, LastMod(AllPublic)) => me, + (_, other) => other, + } + } +} + /// Result of a checking operation - None => no errors were found. Some => an /// error and contains the span and message for reporting that error and /// optionally the same for a note about the error. @@ -136,7 +182,7 @@ impl<'v> Visitor<'v> for ParentVisitor { struct EmbargoVisitor<'a, 'tcx: 'a> { tcx: &'a ty::ctxt<'tcx>, - exp_map2: &'a resolve::ExportMap2, + export_map: &'a def::ExportMap, // This flag is an indicator of whether the previous item in the // hierarchical chain was exported or not. This is the indicator of whether @@ -342,8 +388,8 @@ impl<'a, 'tcx, 'v> Visitor<'v> for EmbargoVisitor<'a, 'tcx> { // This code is here instead of in visit_item so that the // crate module gets processed as well. if self.prev_exported { - assert!(self.exp_map2.contains_key(&id), "wut {}", id); - for export in self.exp_map2[id].iter() { + assert!(self.export_map.contains_key(&id), "wut {}", id); + for export in self.export_map[id].iter() { if is_local(export.def_id) { self.reexports.insert(export.def_id.node); } @@ -362,8 +408,8 @@ struct PrivacyVisitor<'a, 'tcx: 'a> { curitem: ast::NodeId, in_foreign: bool, parents: NodeMap, - external_exports: resolve::ExternalExports, - last_private_map: resolve::LastPrivateMap, + external_exports: ExternalExports, + last_private_map: LastPrivateMap, } enum PrivacyResult { @@ -569,10 +615,10 @@ impl<'a, 'tcx> PrivacyVisitor<'a, 'tcx> { match result { None => true, Some((span, msg, note)) => { - self.tcx.sess.span_err(span, msg.as_slice()); + self.tcx.sess.span_err(span, msg[]); match note { Some((span, msg)) => { - self.tcx.sess.span_note(span, msg.as_slice()) + self.tcx.sess.span_note(span, msg[]) } None => {}, } @@ -674,7 +720,7 @@ impl<'a, 'tcx> PrivacyVisitor<'a, 'tcx> { UnnamedField(idx) => format!("field #{} of {} is private", idx + 1, struct_desc), }; - self.tcx.sess.span_err(span, msg.as_slice()); + self.tcx.sess.span_err(span, msg[]); } // Given the ID of a method, checks to ensure it's in scope. @@ -696,7 +742,7 @@ impl<'a, 'tcx> PrivacyVisitor<'a, 'tcx> { method_id, None, format!("method `{}`", - string).as_slice())); + string)[])); } // Checks that a path is in scope. @@ -713,31 +759,29 @@ impl<'a, 'tcx> PrivacyVisitor<'a, 'tcx> { self.ensure_public(span, def, Some(origdid), - format!("{} `{}`", - tyname, - name).as_slice()) + format!("{} `{}`", tyname, name)[]) }; match self.last_private_map[path_id] { - resolve::LastMod(resolve::AllPublic) => {}, - resolve::LastMod(resolve::DependsOn(def)) => { + LastMod(AllPublic) => {}, + LastMod(DependsOn(def)) => { self.report_error(ck_public(def)); }, - resolve::LastImport{value_priv, - value_used: check_value, - type_priv, - type_used: check_type} => { + LastImport { value_priv, + value_used: check_value, + type_priv, + type_used: check_type } => { // This dance with found_error is because we don't want to report // a privacy error twice for the same directive. let found_error = match (type_priv, check_type) { - (Some(resolve::DependsOn(def)), resolve::Used) => { + (Some(DependsOn(def)), Used) => { !self.report_error(ck_public(def)) }, _ => false, }; if !found_error { match (value_priv, check_value) { - (Some(resolve::DependsOn(def)), resolve::Used) => { + (Some(DependsOn(def)), Used) => { self.report_error(ck_public(def)); }, _ => {}, @@ -749,24 +793,24 @@ impl<'a, 'tcx> PrivacyVisitor<'a, 'tcx> { // be illegal. We only report one error, even if it is // illegal to import from both namespaces. match (value_priv, check_value, type_priv, check_type) { - (Some(p), resolve::Unused, None, _) | - (None, _, Some(p), resolve::Unused) => { + (Some(p), Unused, None, _) | + (None, _, Some(p), Unused) => { let p = match p { - resolve::AllPublic => None, - resolve::DependsOn(def) => ck_public(def), + AllPublic => None, + DependsOn(def) => ck_public(def), }; if p.is_some() { self.report_error(p); } }, - (Some(v), resolve::Unused, Some(t), resolve::Unused) => { + (Some(v), Unused, Some(t), Unused) => { let v = match v { - resolve::AllPublic => None, - resolve::DependsOn(def) => ck_public(def), + AllPublic => None, + DependsOn(def) => ck_public(def), }; let t = match t { - resolve::AllPublic => None, - resolve::DependsOn(def) => ck_public(def), + AllPublic => None, + DependsOn(def) => ck_public(def), }; if let (Some(_), Some(t)) = (v, t) { self.report_error(Some(t)); @@ -1261,13 +1305,13 @@ impl<'a, 'tcx> VisiblePrivateTypesVisitor<'a, 'tcx> { } fn check_ty_param_bound(&self, - span: Span, ty_param_bound: &ast::TyParamBound) { if let ast::TraitTyParamBound(ref trait_ref) = *ty_param_bound { if !self.tcx.sess.features.borrow().visible_private_types && self.path_is_private_type(trait_ref.trait_ref.ref_id) { + let span = trait_ref.trait_ref.path.span; self.tcx.sess.span_err(span, - "private type in exported type \ + "private trait in exported type \ parameter bound"); } } @@ -1311,7 +1355,7 @@ impl<'a, 'tcx, 'v> Visitor<'v> for VisiblePrivateTypesVisitor<'a, 'tcx> { } for bound in bounds.iter() { - self.check_ty_param_bound(item.span, bound) + self.check_ty_param_bound(bound) } } @@ -1449,16 +1493,17 @@ impl<'a, 'tcx, 'v> Visitor<'v> for VisiblePrivateTypesVisitor<'a, 'tcx> { fn visit_generics(&mut self, generics: &ast::Generics) { for ty_param in generics.ty_params.iter() { for bound in ty_param.bounds.iter() { - self.check_ty_param_bound(ty_param.span, bound) + self.check_ty_param_bound(bound) } } for predicate in generics.where_clause.predicates.iter() { match predicate { &ast::WherePredicate::BoundPredicate(ref bound_pred) => { for bound in bound_pred.bounds.iter() { - self.check_ty_param_bound(bound_pred.span, bound) + self.check_ty_param_bound(bound) } } + &ast::WherePredicate::RegionPredicate(_) => {} &ast::WherePredicate::EqPredicate(ref eq_pred) => { self.visit_ty(&*eq_pred.ty); } @@ -1520,9 +1565,9 @@ impl<'a, 'tcx, 'v> Visitor<'v> for VisiblePrivateTypesVisitor<'a, 'tcx> { } pub fn check_crate(tcx: &ty::ctxt, - exp_map2: &resolve::ExportMap2, - external_exports: resolve::ExternalExports, - last_private_map: resolve::LastPrivateMap) + export_map: &def::ExportMap, + external_exports: ExternalExports, + last_private_map: LastPrivateMap) -> (ExportedItems, PublicItems) { let krate = tcx.map.krate(); @@ -1561,7 +1606,7 @@ pub fn check_crate(tcx: &ty::ctxt, exported_items: NodeSet::new(), public_items: NodeSet::new(), reexports: NodeSet::new(), - exp_map2: exp_map2, + export_map: export_map, prev_exported: true, prev_public: true, }; diff --git a/src/librustc/middle/reachable.rs b/src/librustc/middle/reachable.rs index 38d3b859c9d22..4d83075480bcc 100644 --- a/src/librustc/middle/reachable.rs +++ b/src/librustc/middle/reachable.rs @@ -50,7 +50,7 @@ fn generics_require_inlining(generics: &ast::Generics) -> bool { // monomorphized or it was marked with `#[inline]`. This will only return // true for functions. fn item_might_be_inlined(item: &ast::Item) -> bool { - if attributes_specify_inlining(item.attrs.as_slice()) { + if attributes_specify_inlining(item.attrs[]) { return true } @@ -65,7 +65,7 @@ fn item_might_be_inlined(item: &ast::Item) -> bool { fn method_might_be_inlined(tcx: &ty::ctxt, method: &ast::Method, impl_src: ast::DefId) -> bool { - if attributes_specify_inlining(method.attrs.as_slice()) || + if attributes_specify_inlining(method.attrs[]) || generics_require_inlining(method.pe_generics()) { return true } @@ -202,7 +202,7 @@ impl<'a, 'tcx> ReachableContext<'a, 'tcx> { ast::MethodImplItem(ref method) => { if generics_require_inlining(method.pe_generics()) || attributes_specify_inlining( - method.attrs.as_slice()) { + method.attrs[]) { true } else { let impl_did = self.tcx @@ -249,7 +249,7 @@ impl<'a, 'tcx> ReachableContext<'a, 'tcx> { None => { self.tcx.sess.bug(format!("found unmapped ID in worklist: \ {}", - search_item).as_slice()) + search_item)[]) } } } @@ -341,7 +341,7 @@ impl<'a, 'tcx> ReachableContext<'a, 'tcx> { .bug(format!("found unexpected thingy in worklist: {}", self.tcx .map - .node_to_string(search_item)).as_slice()) + .node_to_string(search_item))[]) } } } diff --git a/src/librustc/middle/region.rs b/src/librustc/middle/region.rs index e0d5a3a50e612..8df78281cc227 100644 --- a/src/librustc/middle/region.rs +++ b/src/librustc/middle/region.rs @@ -36,7 +36,8 @@ use syntax::visit::{Visitor, FnKind}; /// placate the same deriving in `ty::FreeRegion`, but we may want to /// actually attach a more meaningful ordering to scopes than the one /// generated via deriving here. -#[deriving(Clone, Copy, PartialEq, PartialOrd, Eq, Ord, Hash, Encodable, Decodable, Show)] +#[deriving(Clone, PartialEq, PartialOrd, Eq, Ord, Hash, RustcEncodable, + RustcDecodable, Show, Copy)] pub enum CodeExtent { Misc(ast::NodeId) } diff --git a/src/librustc/middle/resolve_lifetime.rs b/src/librustc/middle/resolve_lifetime.rs index 2202137d14936..28cb80df7713b 100644 --- a/src/librustc/middle/resolve_lifetime.rs +++ b/src/librustc/middle/resolve_lifetime.rs @@ -19,9 +19,8 @@ pub use self::DefRegion::*; use self::ScopeChain::*; use session::Session; -use middle::def; +use middle::def::{mod, DefMap}; use middle::region; -use middle::resolve::DefMap; use middle::subst; use middle::ty; use std::fmt; @@ -34,7 +33,7 @@ use syntax::visit; use syntax::visit::Visitor; use util::nodemap::NodeMap; -#[deriving(Clone, Copy, PartialEq, Eq, Hash, Encodable, Decodable, Show)] +#[deriving(Clone, Copy, PartialEq, Eq, Hash, RustcEncodable, RustcDecodable, Show)] pub enum DefRegion { DefStaticRegion, DefEarlyBoundRegion(/* space */ subst::ParamSpace, @@ -207,13 +206,21 @@ impl<'a, 'v> Visitor<'v> for LifetimeContext<'a> { } for predicate in generics.where_clause.predicates.iter() { match predicate { - &ast::WherePredicate::BoundPredicate(ast::WhereBoundPredicate{ ident, + &ast::WherePredicate::BoundPredicate(ast::WhereBoundPredicate{ ref bounded_ty, ref bounds, - span, .. }) => { - self.visit_ident(span, ident); + self.visit_ty(&**bounded_ty); visit::walk_ty_param_bounds_helper(self, bounds); } + &ast::WherePredicate::RegionPredicate(ast::WhereRegionPredicate{ref lifetime, + ref bounds, + .. }) => { + + self.visit_lifetime_ref(lifetime); + for bound in bounds.iter() { + self.visit_lifetime_ref(bound); + } + } &ast::WherePredicate::EqPredicate(ast::WhereEqPredicate{ id, ref path, ref ty, @@ -402,7 +409,7 @@ impl<'a> LifetimeContext<'a> { self.sess.span_err( lifetime_ref.span, format!("use of undeclared lifetime name `{}`", - token::get_name(lifetime_ref.name)).as_slice()); + token::get_name(lifetime_ref.name))[]); } fn check_lifetime_defs(&mut self, old_scope: Scope, lifetimes: &Vec) { @@ -416,7 +423,7 @@ impl<'a> LifetimeContext<'a> { lifetime.lifetime.span, format!("illegal lifetime parameter name: `{}`", token::get_name(lifetime.lifetime.name)) - .as_slice()); + []); } } @@ -430,7 +437,7 @@ impl<'a> LifetimeContext<'a> { format!("lifetime name `{}` declared twice in \ the same scope", token::get_name(lifetime_j.lifetime.name)) - .as_slice()); + []); } } @@ -546,9 +553,21 @@ fn early_bound_lifetime_names(generics: &ast::Generics) -> Vec { } for predicate in generics.where_clause.predicates.iter() { match predicate { - &ast::WherePredicate::BoundPredicate(ast::WhereBoundPredicate{ref bounds, ..}) => { + &ast::WherePredicate::BoundPredicate(ast::WhereBoundPredicate{ref bounds, + ref bounded_ty, + ..}) => { + collector.visit_ty(&**bounded_ty); visit::walk_ty_param_bounds_helper(&mut collector, bounds); } + &ast::WherePredicate::RegionPredicate(ast::WhereRegionPredicate{ref lifetime, + ref bounds, + ..}) => { + collector.visit_lifetime_ref(lifetime); + + for bound in bounds.iter() { + collector.visit_lifetime_ref(bound); + } + } &ast::WherePredicate::EqPredicate(_) => unimplemented!() } } diff --git a/src/librustc/middle/stability.rs b/src/librustc/middle/stability.rs index ca8029fdfca3b..d793f49efe5e8 100644 --- a/src/librustc/middle/stability.rs +++ b/src/librustc/middle/stability.rs @@ -43,7 +43,8 @@ struct Annotator { impl Annotator { // Determine the stability for a node based on its attributes and inherited // stability. The stability is recorded in the index and used as the parent. - fn annotate(&mut self, id: NodeId, attrs: &Vec, f: F) where + fn annotate(&mut self, id: NodeId, use_parent: bool, + attrs: &Vec, f: F) where F: FnOnce(&mut Annotator), { match attr::find_stability(attrs.as_slice()) { @@ -60,7 +61,9 @@ impl Annotator { } } None => { - self.parent.clone().map(|stab| self.index.local.insert(id, stab)); + if use_parent { + self.parent.clone().map(|stab| self.index.local.insert(id, stab)); + } f(self); } } @@ -69,11 +72,24 @@ impl Annotator { impl<'v> Visitor<'v> for Annotator { fn visit_item(&mut self, i: &Item) { - self.annotate(i.id, &i.attrs, |v| visit::walk_item(v, i)); + // FIXME (#18969): the following is a hack around the fact + // that we cannot currently annotate the stability of + // `deriving`. Basically, we do *not* allow stability + // inheritance on trait implementations, so that derived + // implementations appear to be unannotated. This then allows + // derived implementations to be automatically tagged with the + // stability of the trait. This is WRONG, but expedient to get + // libstd stabilized for the 1.0 release. + let use_parent = match i.node { + ast::ItemImpl(_, _, Some(_), _, _) => false, + _ => true, + }; + + self.annotate(i.id, use_parent, &i.attrs, |v| visit::walk_item(v, i)); if let ast::ItemStruct(ref sd, _) = i.node { sd.ctor_id.map(|id| { - self.annotate(id, &i.attrs, |_| {}) + self.annotate(id, true, &i.attrs, |_| {}) }); } } @@ -82,7 +98,7 @@ impl<'v> Visitor<'v> for Annotator { _: &'v Block, _: Span, _: NodeId) { if let FkMethod(_, _, meth) = fk { // Methods are not already annotated, so we annotate it - self.annotate(meth.id, &meth.attrs, |_| {}); + self.annotate(meth.id, true, &meth.attrs, |_| {}); } // Items defined in a function body have no reason to have // a stability attribute, so we don't recurse. @@ -101,15 +117,21 @@ impl<'v> Visitor<'v> for Annotator { TypeTraitItem(ref typedef) => (typedef.ty_param.id, &typedef.attrs), }; - self.annotate(id, attrs, |v| visit::walk_trait_item(v, t)); + self.annotate(id, true, attrs, |v| visit::walk_trait_item(v, t)); } fn visit_variant(&mut self, var: &Variant, g: &'v Generics) { - self.annotate(var.node.id, &var.node.attrs, |v| visit::walk_variant(v, var, g)) + self.annotate(var.node.id, true, &var.node.attrs, + |v| visit::walk_variant(v, var, g)) } fn visit_struct_field(&mut self, s: &StructField) { - self.annotate(s.node.id, &s.node.attrs, |v| visit::walk_struct_field(v, s)); + self.annotate(s.node.id, true, &s.node.attrs, + |v| visit::walk_struct_field(v, s)); + } + + fn visit_foreign_item(&mut self, i: &ast::ForeignItem) { + self.annotate(i.id, true, &i.attrs, |_| {}); } } @@ -123,7 +145,8 @@ impl Index { }, parent: None }; - annotator.annotate(ast::CRATE_NODE_ID, &krate.attrs, |v| visit::walk_crate(v, krate)); + annotator.annotate(ast::CRATE_NODE_ID, true, &krate.attrs, + |v| visit::walk_crate(v, krate)); annotator.index } } @@ -135,16 +158,29 @@ pub fn lookup(tcx: &ty::ctxt, id: DefId) -> Option { match ty::trait_item_of_item(tcx, id) { Some(ty::MethodTraitItemId(trait_method_id)) if trait_method_id != id => { - lookup(tcx, trait_method_id) - } - _ if is_local(id) => { - tcx.stability.borrow().local.get(&id.node).cloned() - } - _ => { - let stab = csearch::get_stability(&tcx.sess.cstore, id); - let mut index = tcx.stability.borrow_mut(); - (*index).extern_cache.insert(id, stab.clone()); - stab + return lookup(tcx, trait_method_id) } + _ => {} } + + let item_stab = if is_local(id) { + tcx.stability.borrow().local.get(&id.node).cloned() + } else { + let stab = csearch::get_stability(&tcx.sess.cstore, id); + let mut index = tcx.stability.borrow_mut(); + (*index).extern_cache.insert(id, stab.clone()); + stab + }; + + item_stab.or_else(|| { + if let Some(trait_id) = ty::trait_id_of_impl(tcx, id) { + // FIXME (#18969): for the time being, simply use the + // stability of the trait to determine the stability of any + // unmarked impls for it. See FIXME above for more details. + + lookup(tcx, trait_id) + } else { + None + } + }) } diff --git a/src/librustc/middle/subst.rs b/src/librustc/middle/subst.rs index 30a47ff913258..6ae639e0313e0 100644 --- a/src/librustc/middle/subst.rs +++ b/src/librustc/middle/subst.rs @@ -18,7 +18,7 @@ use middle::ty_fold::{mod, TypeFoldable, TypeFolder}; use util::ppaux::Repr; use std::fmt; -use std::slice::Items; +use std::slice::Iter; use std::vec::Vec; use syntax::codemap::{Span, DUMMY_SP}; @@ -187,8 +187,8 @@ impl RegionSubsts { /////////////////////////////////////////////////////////////////////////// // ParamSpace -#[deriving(Copy, PartialOrd, Ord, PartialEq, Eq, - Clone, Hash, Encodable, Decodable, Show)] +#[deriving(PartialOrd, Ord, PartialEq, Eq, Copy, + Clone, Hash, RustcEncodable, RustcDecodable, Show)] pub enum ParamSpace { TypeSpace, // Type parameters attached to a type definition, trait, or impl SelfSpace, // Self parameter on a trait @@ -224,7 +224,7 @@ impl ParamSpace { /// Vector of things sorted by param space. Used to keep /// the set of things declared on the type, self, or method /// distinct. -#[deriving(PartialEq, Eq, Clone, Hash, Encodable, Decodable)] +#[deriving(PartialEq, Eq, Clone, Hash, RustcEncodable, RustcDecodable)] pub struct VecPerParamSpace { // This was originally represented as a tuple with one Vec for // each variant of ParamSpace, and that remains the abstraction @@ -400,7 +400,7 @@ impl VecPerParamSpace { &self.get_slice(space)[index] } - pub fn iter<'a>(&'a self) -> Items<'a,T> { + pub fn iter<'a>(&'a self) -> Iter<'a,T> { self.content.iter() } @@ -620,7 +620,7 @@ impl<'a, 'tcx> TypeFolder<'tcx> for SubstFolder<'a, 'tcx> { (space={}, index={})", region_name.as_str(), self.root_ty.repr(self.tcx()), - space, i).as_slice()); + space, i)[]); } } } @@ -677,7 +677,7 @@ impl<'a,'tcx> SubstFolder<'a,'tcx> { p.space, p.idx, self.root_ty.repr(self.tcx()), - self.substs.repr(self.tcx())).as_slice()); + self.substs.repr(self.tcx()))[]); } }; diff --git a/src/librustc/middle/traits/coherence.rs b/src/librustc/middle/traits/coherence.rs index 9804f6d222afd..d48685ce27d89 100644 --- a/src/librustc/middle/traits/coherence.rs +++ b/src/librustc/middle/traits/coherence.rs @@ -154,7 +154,7 @@ pub fn ty_is_local<'tcx>(tcx: &ty::ctxt<'tcx>, ty: Ty<'tcx>) -> bool { ty::ty_err => { tcx.sess.bug( format!("ty_is_local invoked on unexpected type: {}", - ty.repr(tcx)).as_slice()) + ty.repr(tcx))[]) } } } diff --git a/src/librustc/middle/traits/fulfill.rs b/src/librustc/middle/traits/fulfill.rs index 213d97b4b344a..72e4eb5d1d634 100644 --- a/src/librustc/middle/traits/fulfill.rs +++ b/src/librustc/middle/traits/fulfill.rs @@ -12,7 +12,7 @@ use middle::infer::InferCtxt; use middle::mem_categorization::Typer; use middle::ty::{mod, Ty}; use std::collections::HashSet; -use std::collections::hash_map::{Occupied, Vacant}; +use std::collections::hash_map::Entry::{Occupied, Vacant}; use std::default::Default; use std::rc::Rc; use syntax::ast; diff --git a/src/librustc/middle/traits/mod.rs b/src/librustc/middle/traits/mod.rs index 3289acd0c2e5e..8028971a46346 100644 --- a/src/librustc/middle/traits/mod.rs +++ b/src/librustc/middle/traits/mod.rs @@ -19,7 +19,7 @@ use middle::subst; use middle::ty::{mod, Ty}; use middle::infer::InferCtxt; use std::rc::Rc; -use std::slice::Items; +use std::slice::Iter; use syntax::ast; use syntax::codemap::{Span, DUMMY_SP}; @@ -304,7 +304,7 @@ impl<'tcx> ObligationCause<'tcx> { } impl<'tcx, N> Vtable<'tcx, N> { - pub fn iter_nested(&self) -> Items { + pub fn iter_nested(&self) -> Iter { match *self { VtableImpl(ref i) => i.iter_nested(), VtableFnPointer(..) => (&[]).iter(), @@ -338,7 +338,7 @@ impl<'tcx, N> Vtable<'tcx, N> { } impl<'tcx, N> VtableImplData<'tcx, N> { - pub fn iter_nested(&self) -> Items { + pub fn iter_nested(&self) -> Iter { self.nested.iter() } @@ -365,7 +365,7 @@ impl<'tcx, N> VtableImplData<'tcx, N> { } impl VtableBuiltinData { - pub fn iter_nested(&self) -> Items { + pub fn iter_nested(&self) -> Iter { self.nested.iter() } diff --git a/src/librustc/middle/traits/select.rs b/src/librustc/middle/traits/select.rs index 8ba28b61006bd..f849f4c9b937e 100644 --- a/src/librustc/middle/traits/select.rs +++ b/src/librustc/middle/traits/select.rs @@ -709,7 +709,7 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> { let all_bounds = util::transitive_bounds( - self.tcx(), caller_trait_refs.as_slice()); + self.tcx(), caller_trait_refs[]); let matching_bounds = all_bounds.filter( @@ -762,7 +762,7 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> { self.tcx().sess.span_bug( obligation.cause.span, format!("No entry for unboxed closure: {}", - closure_def_id.repr(self.tcx())).as_slice()); + closure_def_id.repr(self.tcx()))[]); } }; @@ -795,7 +795,7 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> { } // provide an impl, but only for suitable `fn` pointers - ty::ty_bare_fn(ty::BareFnTy { + ty::ty_bare_fn(_, ty::BareFnTy { unsafety: ast::Unsafety::Normal, abi: abi::Rust, sig: ty::Binder(ty::FnSig { @@ -984,7 +984,7 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> { ty::ty_int(_) | ty::ty_bool | ty::ty_float(_) | - ty::ty_bare_fn(_) | + ty::ty_bare_fn(..) | ty::ty_char => { // safe for everything Ok(If(Vec::new())) @@ -1281,7 +1281,7 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> { self.tcx().sess.bug( format!( "asked to assemble builtin bounds of unexpected type: {}", - self_ty.repr(self.tcx())).as_slice()); + self_ty.repr(self.tcx()))[]); } }; @@ -1436,7 +1436,7 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> { self.tcx().sess.span_bug( obligation.cause.span, format!("builtin bound for {} was ambig", - obligation.repr(self.tcx())).as_slice()); + obligation.repr(self.tcx()))[]); } } } @@ -1543,7 +1543,7 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> { let self_ty = self.infcx.shallow_resolve(obligation.self_ty()); let sig = match self_ty.sty { - ty::ty_bare_fn(ty::BareFnTy { + ty::ty_bare_fn(_, ty::BareFnTy { unsafety: ast::Unsafety::Normal, abi: abi::Rust, ref sig @@ -1554,7 +1554,7 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> { self.tcx().sess.span_bug( obligation.cause.span, format!("Fn pointer candidate for inappropriate self type: {}", - self_ty.repr(self.tcx())).as_slice()); + self_ty.repr(self.tcx()))[]); } }; @@ -1595,7 +1595,7 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> { self.tcx().sess.span_bug( obligation.cause.span, format!("No entry for unboxed closure: {}", - closure_def_id.repr(self.tcx())).as_slice()); + closure_def_id.repr(self.tcx()))[]); } }; @@ -1692,8 +1692,7 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> { self.tcx().sess.bug( format!("Impl {} was matchable against {} but now is not", impl_def_id.repr(self.tcx()), - obligation.repr(self.tcx())) - .as_slice()); + obligation.repr(self.tcx()))[]); } } } diff --git a/src/librustc/middle/ty.rs b/src/librustc/middle/ty.rs index acf1fced72cae..22fdea8afb59b 100644 --- a/src/librustc/middle/ty.rs +++ b/src/librustc/middle/ty.rs @@ -46,13 +46,12 @@ use lint; use metadata::csearch; use middle; use middle::const_eval; -use middle::def; +use middle::def::{mod, DefMap, ExportMap}; use middle::dependency_format; use middle::lang_items::{FnTraitLangItem, FnMutTraitLangItem}; use middle::lang_items::{FnOnceTraitLangItem, TyDescStructLangItem}; use middle::mem_categorization as mc; use middle::region; -use middle::resolve; use middle::resolve_lifetime; use middle::infer; use middle::stability; @@ -78,7 +77,8 @@ use std::mem; use std::ops; use std::rc::Rc; use collections::enum_set::{EnumSet, CLike}; -use std::collections::hash_map::{HashMap, Occupied, Vacant}; +use std::collections::hash_map::HashMap; +use std::collections::hash_map::Entry::{Occupied, Vacant}; use syntax::abi; use syntax::ast::{CrateNum, DefId, DUMMY_NODE_ID, Ident, ItemTrait, LOCAL_CRATE}; use syntax::ast::{MutImmutable, MutMutable, Name, NamedField, NodeId}; @@ -99,7 +99,7 @@ pub const INITIAL_DISCRIMINANT_VALUE: Disr = 0; /// The complete set of all analyses described in this module. This is /// produced by the driver and fed to trans and later passes. pub struct CrateAnalysis<'tcx> { - pub exp_map2: middle::resolve::ExportMap2, + pub export_map: ExportMap, pub exported_items: middle::privacy::ExportedItems, pub public_items: middle::privacy::PublicItems, pub ty_cx: ty::ctxt<'tcx>, @@ -246,7 +246,7 @@ pub struct mt<'tcx> { pub mutbl: ast::Mutability, } -#[deriving(Clone, Copy, PartialEq, Eq, Hash, Encodable, Decodable, Show)] +#[deriving(Clone, Copy, PartialEq, Eq, Hash, RustcEncodable, RustcDecodable, Show)] pub enum TraitStore { /// Box UniqTraitStore, @@ -277,13 +277,13 @@ pub enum ast_ty_to_ty_cache_entry<'tcx> { atttce_resolved(Ty<'tcx>) /* resolved to a type, irrespective of region */ } -#[deriving(Clone, PartialEq, Decodable, Encodable)] +#[deriving(Clone, PartialEq, RustcDecodable, RustcEncodable)] pub struct ItemVariances { pub types: VecPerParamSpace, pub regions: VecPerParamSpace, } -#[deriving(Clone, Copy, PartialEq, Decodable, Encodable, Show)] +#[deriving(Clone, PartialEq, RustcDecodable, RustcEncodable, Show, Copy)] pub enum Variance { Covariant, // T <: T iff A <: B -- e.g., function return type Invariant, // T <: T iff B == A -- e.g., type of mutable cell @@ -293,7 +293,8 @@ pub enum Variance { #[deriving(Clone, Show)] pub enum AutoAdjustment<'tcx> { - AdjustAddEnv(ty::TraitStore), + AdjustAddEnv(ast::DefId, ty::TraitStore), + AdjustReifyFnPointer(ast::DefId), // go from a fn-item type to a fn-pointer type AdjustDerefRef(AutoDerefRef<'tcx>) } @@ -430,7 +431,7 @@ pub fn type_of_adjust<'tcx>(cx: &ctxt<'tcx>, adj: &AutoAdjustment<'tcx>) -> Opti } } -#[deriving(Clone, Copy, Encodable, Decodable, PartialEq, PartialOrd, Show)] +#[deriving(Clone, Copy, RustcEncodable, RustcDecodable, PartialEq, PartialOrd, Show)] pub struct param_index { pub space: subst::ParamSpace, pub index: uint @@ -510,7 +511,7 @@ pub struct MethodCall { pub adjustment: ExprAdjustment } -#[deriving(Clone, Copy, PartialEq, Eq, Hash, Show, Encodable, Decodable)] +#[deriving(Clone, PartialEq, Eq, Hash, Show, RustcEncodable, RustcDecodable, Copy)] pub enum ExprAdjustment { NoAdjustment, AutoDeref(uint), @@ -615,7 +616,7 @@ pub struct ctxt<'tcx> { // queried from a HashSet. interner: RefCell, Ty<'tcx>>>, pub sess: Session, - pub def_map: resolve::DefMap, + pub def_map: DefMap, pub named_region_map: resolve_lifetime::NamedRegionMap, @@ -973,7 +974,7 @@ pub struct ParamTy { /// is the outer fn. /// /// [dbi]: http://en.wikipedia.org/wiki/De_Bruijn_index -#[deriving(Clone, Copy, PartialEq, Eq, Hash, Encodable, Decodable, Show)] +#[deriving(Clone, PartialEq, Eq, Hash, RustcEncodable, RustcDecodable, Show, Copy)] pub struct DebruijnIndex { // We maintain the invariant that this is never 0. So 1 indicates // the innermost binder. To ensure this, create with `DebruijnIndex::new`. @@ -981,7 +982,7 @@ pub struct DebruijnIndex { } /// Representation of regions: -#[deriving(Clone, Copy, PartialEq, Eq, Hash, Encodable, Decodable, Show)] +#[deriving(Clone, PartialEq, Eq, Hash, RustcEncodable, RustcDecodable, Show, Copy)] pub enum Region { // Region bound in a type or fn declaration which will be // substituted 'early' -- that is, at the same time when type @@ -1028,7 +1029,7 @@ pub struct UpvarId { pub closure_expr_id: ast::NodeId, } -#[deriving(Clone, Copy, PartialEq, Eq, Hash, Show, Encodable, Decodable)] +#[deriving(Clone, PartialEq, Eq, Hash, Show, RustcEncodable, RustcDecodable, Copy)] pub enum BorrowKind { /// Data must be immutable and is aliasable. ImmBorrow, @@ -1121,7 +1122,7 @@ pub enum BorrowKind { /// - Through mutation, the borrowed upvars can actually escape /// the closure, so sometimes it is necessary for them to be larger /// than the closure lifetime itself. -#[deriving(Copy, PartialEq, Clone, Encodable, Decodable, Show)] +#[deriving(PartialEq, Clone, RustcEncodable, RustcDecodable, Show, Copy)] pub struct UpvarBorrow { pub kind: BorrowKind, pub region: ty::Region, @@ -1146,7 +1147,8 @@ impl Region { } } -#[deriving(Clone, Copy, PartialEq, PartialOrd, Eq, Ord, Hash, Encodable, Decodable, Show)] +#[deriving(Clone, PartialEq, PartialOrd, Eq, Ord, Hash, + RustcEncodable, RustcDecodable, Show, Copy)] /// A "free" region `fr` can be interpreted as "some region /// at least as big as the scope `fr.scope`". pub struct FreeRegion { @@ -1154,7 +1156,8 @@ pub struct FreeRegion { pub bound_region: BoundRegion } -#[deriving(Clone, Copy, PartialEq, PartialOrd, Eq, Ord, Hash, Encodable, Decodable, Show)] +#[deriving(Clone, PartialEq, PartialOrd, Eq, Ord, Hash, + RustcEncodable, RustcDecodable, Show, Copy)] pub enum BoundRegion { /// An anonymous region parameter for a given fn (&T) BrAnon(uint), @@ -1243,11 +1246,17 @@ pub enum sty<'tcx> { ty_vec(Ty<'tcx>, Option), // Second field is length. ty_ptr(mt<'tcx>), ty_rptr(Region, mt<'tcx>), - ty_bare_fn(BareFnTy<'tcx>), + + // If the def-id is Some(_), then this is the type of a specific + // fn item. Otherwise, if None(_), it a fn pointer type. + ty_bare_fn(Option, BareFnTy<'tcx>), + ty_closure(Box>), ty_trait(Box>), ty_struct(DefId, Substs<'tcx>), + ty_unboxed_closure(DefId, Region, Substs<'tcx>), + ty_tup(Vec>), ty_param(ParamTy), // type parameter @@ -1412,7 +1421,8 @@ pub struct ExistentialBounds { pub type BuiltinBounds = EnumSet; -#[deriving(Copy, Clone, Encodable, PartialEq, Eq, Decodable, Hash, Show)] +#[deriving(Clone, RustcEncodable, PartialEq, Eq, RustcDecodable, Hash, + Show, Copy)] #[repr(uint)] pub enum BuiltinBound { BoundSend, @@ -1463,7 +1473,7 @@ pub struct FloatVid { pub index: uint } -#[deriving(Clone, Copy, PartialEq, Eq, Encodable, Decodable, Hash)] +#[deriving(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Copy)] pub struct RegionVid { pub index: uint } @@ -1485,7 +1495,7 @@ pub enum InferTy { FreshIntTy(uint), } -#[deriving(Clone, Copy, Encodable, Decodable, Eq, Hash, Show)] +#[deriving(Clone, RustcEncodable, RustcDecodable, Eq, Hash, Show, Copy)] pub enum InferRegion { ReVar(RegionVid), ReSkolemized(uint, BoundRegion) @@ -1571,7 +1581,7 @@ pub struct TypeParameterDef<'tcx> { pub default: Option>, } -#[deriving(Encodable, Decodable, Clone, Show)] +#[deriving(RustcEncodable, RustcDecodable, Clone, Show)] pub struct RegionParameterDef { pub name: ast::Name, pub def_id: ast::DefId, @@ -1891,7 +1901,7 @@ impl<'tcx> ParameterEnvironment<'tcx> { _ => { cx.sess.bug(format!("ParameterEnvironment::from_item(): \ `{}` is not an item", - cx.map.node_to_string(id)).as_slice()) + cx.map.node_to_string(id))[]) } } } @@ -1960,14 +1970,14 @@ impl UnboxedClosureKind { }; match result { Ok(trait_did) => trait_did, - Err(err) => cx.sess.fatal(err.as_slice()), + Err(err) => cx.sess.fatal(err[]), } } } pub fn mk_ctxt<'tcx>(s: Session, type_arena: &'tcx TypedArena>, - dm: resolve::DefMap, + dm: DefMap, named_region_map: resolve_lifetime::NamedRegionMap, map: ast_map::Map<'tcx>, freevars: RefCell, @@ -2178,7 +2188,7 @@ impl FlagComputation { self.add_tys(ts[]); } - &ty_bare_fn(ref f) => { + &ty_bare_fn(_, ref f) => { self.add_fn_sig(&f.sig); } @@ -2339,15 +2349,19 @@ pub fn mk_closure<'tcx>(cx: &ctxt<'tcx>, fty: ClosureTy<'tcx>) -> Ty<'tcx> { mk_t(cx, ty_closure(box fty)) } -pub fn mk_bare_fn<'tcx>(cx: &ctxt<'tcx>, fty: BareFnTy<'tcx>) -> Ty<'tcx> { - mk_t(cx, ty_bare_fn(fty)) +pub fn mk_bare_fn<'tcx>(cx: &ctxt<'tcx>, + opt_def_id: Option, + fty: BareFnTy<'tcx>) -> Ty<'tcx> { + mk_t(cx, ty_bare_fn(opt_def_id, fty)) } pub fn mk_ctor_fn<'tcx>(cx: &ctxt<'tcx>, + def_id: ast::DefId, input_tys: &[Ty<'tcx>], output: Ty<'tcx>) -> Ty<'tcx> { let input_args = input_tys.iter().map(|ty| *ty).collect(); mk_bare_fn(cx, + Some(def_id), BareFnTy { unsafety: ast::Unsafety::Normal, abi: abi::Rust, @@ -2446,7 +2460,7 @@ pub fn maybe_walk_ty<'tcx>(ty: Ty<'tcx>, f: |Ty<'tcx>| -> bool) { } } ty_tup(ref ts) => { for tt in ts.iter() { maybe_walk_ty(*tt, |x| f(x)); } } - ty_bare_fn(ref ft) => { + ty_bare_fn(_, ref ft) => { for a in ft.sig.0.inputs.iter() { maybe_walk_ty(*a, |x| f(x)); } if let ty::FnConverging(output) = ft.sig.0.output { maybe_walk_ty(output, f); @@ -2596,7 +2610,7 @@ pub fn sequence_element_type<'tcx>(cx: &ctxt<'tcx>, ty: Ty<'tcx>) -> Ty<'tcx> { ty_str => mk_mach_uint(ast::TyU8), ty_open(ty) => sequence_element_type(cx, ty), _ => cx.sess.bug(format!("sequence_element_type called on non-sequence value: {}", - ty_to_string(cx, ty)).as_slice()), + ty_to_string(cx, ty))[]), } } @@ -2852,45 +2866,18 @@ impl TypeContents { } } -// NOTE(stage0): Remove impl after a snapshot -#[cfg(stage0)] -impl ops::BitOr for TypeContents { - fn bitor(&self, other: &TypeContents) -> TypeContents { - TypeContents {bits: self.bits | other.bits} - } -} - -#[cfg(not(stage0))] // NOTE(stage0): Remove cfg after a snapshot impl ops::BitOr for TypeContents { fn bitor(self, other: TypeContents) -> TypeContents { TypeContents {bits: self.bits | other.bits} } } -// NOTE(stage0): Remove impl after a snapshot -#[cfg(stage0)] -impl ops::BitAnd for TypeContents { - fn bitand(&self, other: &TypeContents) -> TypeContents { - TypeContents {bits: self.bits & other.bits} - } -} - -#[cfg(not(stage0))] // NOTE(stage0): Remove cfg after a snapshot impl ops::BitAnd for TypeContents { fn bitand(self, other: TypeContents) -> TypeContents { TypeContents {bits: self.bits & other.bits} } } -// NOTE(stage0): Remove impl after a snapshot -#[cfg(stage0)] -impl ops::Sub for TypeContents { - fn sub(&self, other: &TypeContents) -> TypeContents { - TypeContents {bits: self.bits & !other.bits} - } -} - -#[cfg(not(stage0))] // NOTE(stage0): Remove cfg after a snapshot impl ops::Sub for TypeContents { fn sub(self, other: TypeContents) -> TypeContents { TypeContents {bits: self.bits & !other.bits} @@ -2956,7 +2943,7 @@ pub fn type_contents<'tcx>(cx: &ctxt<'tcx>, ty: Ty<'tcx>) -> TypeContents { // Scalar and unique types are sendable, and durable ty_infer(ty::FreshIntTy(_)) | ty_bool | ty_int(_) | ty_uint(_) | ty_float(_) | - ty_bare_fn(_) | ty::ty_char => { + ty_bare_fn(..) | ty::ty_char => { TC::None } @@ -2999,7 +2986,7 @@ pub fn type_contents<'tcx>(cx: &ctxt<'tcx>, ty: Ty<'tcx>) -> TypeContents { ty_struct(did, ref substs) => { let flds = struct_fields(cx, did, substs); let mut res = - TypeContents::union(flds.as_slice(), + TypeContents::union(flds[], |f| tc_mt(cx, f.mt, cache)); if !lookup_repr_hints(cx, did).contains(&attr::ReprExtern) { @@ -3016,21 +3003,21 @@ pub fn type_contents<'tcx>(cx: &ctxt<'tcx>, ty: Ty<'tcx>) -> TypeContents { // FIXME(#14449): `borrowed_contents` below assumes `&mut` // unboxed closure. let upvars = unboxed_closure_upvars(cx, did, substs); - TypeContents::union(upvars.as_slice(), + TypeContents::union(upvars[], |f| tc_ty(cx, f.ty, cache)) | borrowed_contents(r, MutMutable) } ty_tup(ref tys) => { - TypeContents::union(tys.as_slice(), + TypeContents::union(tys[], |ty| tc_ty(cx, *ty, cache)) } ty_enum(did, ref substs) => { let variants = substd_enum_variants(cx, did, substs); let mut res = - TypeContents::union(variants.as_slice(), |variant| { - TypeContents::union(variant.args.as_slice(), + TypeContents::union(variants[], |variant| { + TypeContents::union(variant.args[], |arg_ty| { tc_ty(cx, *arg_ty, cache) }) @@ -3095,7 +3082,7 @@ pub fn type_contents<'tcx>(cx: &ctxt<'tcx>, ty: Ty<'tcx>) -> TypeContents { kind_bounds_to_contents( cx, tp_def.bounds.builtin_bounds, - tp_def.bounds.trait_bounds.as_slice()) + tp_def.bounds.trait_bounds[]) } ty_infer(_) => { @@ -3291,7 +3278,7 @@ pub fn is_instantiable<'tcx>(cx: &ctxt<'tcx>, r_ty: Ty<'tcx>) -> bool { ty_uint(_) | ty_float(_) | ty_str | - ty_bare_fn(_) | + ty_bare_fn(..) | ty_closure(_) | ty_infer(_) | ty_err | @@ -3587,6 +3574,13 @@ pub fn type_is_bare_fn(ty: Ty) -> bool { } } +pub fn type_is_bare_fn_item(ty: Ty) -> bool { + match ty.sty { + ty_bare_fn(Some(_), _) => true, + _ => false + } +} + pub fn type_is_fp(ty: Ty) -> bool { match ty.sty { ty_infer(FloatVar(_)) | ty_float(_) => true, @@ -3685,7 +3679,7 @@ pub fn close_type<'tcx>(cx: &ctxt<'tcx>, ty: Ty<'tcx>) -> Ty<'tcx> { match ty.sty { ty_open(ty) => mk_rptr(cx, ReStatic, mt {ty: ty, mutbl:ast::MutImmutable}), _ => cx.sess.bug(format!("Trying to close a non-open type {}", - ty_to_string(cx, ty)).as_slice()) + ty_to_string(cx, ty))[]) } } @@ -3786,7 +3780,7 @@ pub fn node_id_to_trait_ref<'tcx>(cx: &ctxt<'tcx>, id: ast::NodeId) Some(ty) => ty.clone(), None => cx.sess.bug( format!("node_id_to_trait_ref: no trait ref for node `{}`", - cx.map.node_to_string(id)).as_slice()) + cx.map.node_to_string(id))[]) } } @@ -3799,7 +3793,7 @@ pub fn node_id_to_type<'tcx>(cx: &ctxt<'tcx>, id: ast::NodeId) -> Ty<'tcx> { Some(ty) => ty, None => cx.sess.bug( format!("node_id_to_type: no type for node `{}`", - cx.map.node_to_string(id)).as_slice()) + cx.map.node_to_string(id))[]) } } @@ -3819,7 +3813,7 @@ pub fn node_id_item_substs<'tcx>(cx: &ctxt<'tcx>, id: ast::NodeId) -> ItemSubsts pub fn fn_is_variadic(fty: Ty) -> bool { match fty.sty { - ty_bare_fn(ref f) => f.sig.0.variadic, + ty_bare_fn(_, ref f) => f.sig.0.variadic, ty_closure(ref f) => f.sig.0.variadic, ref s => { panic!("fn_is_variadic() called on non-fn type: {}", s) @@ -3829,7 +3823,7 @@ pub fn fn_is_variadic(fty: Ty) -> bool { pub fn ty_fn_sig<'tcx>(fty: Ty<'tcx>) -> &'tcx PolyFnSig<'tcx> { match fty.sty { - ty_bare_fn(ref f) => &f.sig, + ty_bare_fn(_, ref f) => &f.sig, ty_closure(ref f) => &f.sig, ref s => { panic!("ty_fn_sig() called on non-fn type: {}", s) @@ -3840,7 +3834,7 @@ pub fn ty_fn_sig<'tcx>(fty: Ty<'tcx>) -> &'tcx PolyFnSig<'tcx> { /// Returns the ABI of the given function. pub fn ty_fn_abi(fty: Ty) -> abi::Abi { match fty.sty { - ty_bare_fn(ref f) => f.abi, + ty_bare_fn(_, ref f) => f.abi, ty_closure(ref f) => f.abi, _ => panic!("ty_fn_abi() called on non-fn type"), } @@ -3867,7 +3861,7 @@ pub fn ty_closure_store(fty: Ty) -> TraitStore { pub fn ty_fn_ret<'tcx>(fty: Ty<'tcx>) -> FnOutput<'tcx> { match fty.sty { - ty_bare_fn(ref f) => f.sig.0.output, + ty_bare_fn(_, ref f) => f.sig.0.output, ty_closure(ref f) => f.sig.0.output, ref s => { panic!("ty_fn_ret() called on non-fn type: {}", s) @@ -3877,7 +3871,7 @@ pub fn ty_fn_ret<'tcx>(fty: Ty<'tcx>) -> FnOutput<'tcx> { pub fn is_fn_ty(fty: Ty) -> bool { match fty.sty { - ty_bare_fn(_) => true, + ty_bare_fn(..) => true, ty_closure(_) => true, _ => false } @@ -3892,7 +3886,7 @@ pub fn ty_region(tcx: &ctxt, tcx.sess.span_bug( span, format!("ty_region() invoked on an inappropriate ty: {}", - s).as_slice()); + s)[]); } } } @@ -3953,11 +3947,11 @@ pub fn expr_span(cx: &ctxt, id: NodeId) -> Span { Some(f) => { cx.sess.bug(format!("Node id {} is not an expr: {}", id, - f).as_slice()); + f)[]); } None => { cx.sess.bug(format!("Node id {} is not present \ - in the node map", id).as_slice()); + in the node map", id)[]); } } } @@ -3973,14 +3967,14 @@ pub fn local_var_name_str(cx: &ctxt, id: NodeId) -> InternedString { cx.sess.bug( format!("Variable id {} maps to {}, not local", id, - pat).as_slice()); + pat)[]); } } } r => { cx.sess.bug(format!("Variable id {} maps to {}, not local", id, - r).as_slice()); + r)[]); } } } @@ -4002,9 +3996,9 @@ pub fn adjust_ty<'tcx, F>(cx: &ctxt<'tcx>, return match adjustment { Some(adjustment) => { match *adjustment { - AdjustAddEnv(store) => { + AdjustAddEnv(_, store) => { match unadjusted_ty.sty { - ty::ty_bare_fn(ref b) => { + ty::ty_bare_fn(Some(_), ref b) => { let bounds = ty::ExistentialBounds { region_bound: ReStatic, builtin_bounds: all_builtin_bounds(), @@ -4021,13 +4015,27 @@ pub fn adjust_ty<'tcx, F>(cx: &ctxt<'tcx>, } ref b => { cx.sess.bug( - format!("add_env adjustment on non-bare-fn: \ + format!("add_env adjustment on non-fn-item: \ {}", b).as_slice()); } } } + AdjustReifyFnPointer(_) => { + match unadjusted_ty.sty { + ty::ty_bare_fn(Some(_), ref b) => { + ty::mk_bare_fn(cx, None, (*b).clone()) + } + ref b => { + cx.sess.bug( + format!("AdjustReifyFnPointer adjustment on non-fn-item: \ + {}", + b)[]); + } + } + } + AdjustDerefRef(ref adj) => { let mut adjusted_ty = unadjusted_ty; @@ -4051,7 +4059,7 @@ pub fn adjust_ty<'tcx, F>(cx: &ctxt<'tcx>, {}", i, ty_to_string(cx, adjusted_ty)) - .as_slice()); + []); } } } @@ -4114,7 +4122,7 @@ pub fn unsize_ty<'tcx>(cx: &ctxt<'tcx>, } _ => cx.sess.span_bug(span, format!("UnsizeLength with bad sty: {}", - ty_to_string(cx, ty)).as_slice()) + ty_to_string(cx, ty))[]) }, &UnsizeStruct(box ref k, tp_index) => match ty.sty { ty_struct(did, ref substs) => { @@ -4126,7 +4134,7 @@ pub fn unsize_ty<'tcx>(cx: &ctxt<'tcx>, } _ => cx.sess.span_bug(span, format!("UnsizeStruct with bad sty: {}", - ty_to_string(cx, ty)).as_slice()) + ty_to_string(cx, ty))[]) }, &UnsizeVtable(TyTrait { ref principal, bounds }, _) => { mk_trait(cx, (*principal).clone(), bounds) @@ -4139,7 +4147,7 @@ pub fn resolve_expr(tcx: &ctxt, expr: &ast::Expr) -> def::Def { Some(&def) => def, None => { tcx.sess.span_bug(expr.span, format!( - "no def-map entry for expr {}", expr.id).as_slice()); + "no def-map entry for expr {}", expr.id)[]); } } } @@ -4233,7 +4241,7 @@ pub fn expr_kind(tcx: &ctxt, expr: &ast::Expr) -> ExprKind { expr.span, format!("uncategorized def for expr {}: {}", expr.id, - def).as_slice()); + def)[]); } } } @@ -4358,7 +4366,7 @@ pub fn field_idx_strict(tcx: &ctxt, name: ast::Name, fields: &[field]) token::get_name(name), fields.iter() .map(|f| token::get_name(f.name).get().to_string()) - .collect::>()).as_slice()); + .collect::>())[]); } pub fn impl_or_trait_item_idx(id: ast::Name, trait_items: &[ImplOrTraitItem]) @@ -4380,7 +4388,8 @@ pub fn ty_sort_string<'tcx>(cx: &ctxt<'tcx>, ty: Ty<'tcx>) -> String { ty_vec(_, None) => "slice".to_string(), ty_ptr(_) => "*-ptr".to_string(), ty_rptr(_, _) => "&-ptr".to_string(), - ty_bare_fn(_) => "extern fn".to_string(), + ty_bare_fn(Some(_), _) => format!("fn item"), + ty_bare_fn(None, _) => "fn pointer".to_string(), ty_closure(_) => "fn".to_string(), ty_trait(ref inner) => { format!("trait {}", item_path_str(cx, inner.principal.def_id())) @@ -4571,6 +4580,10 @@ pub fn note_and_explain_type_err(cx: &ctxt, err: &type_err) { "concrete lifetime that was found is ", conc_region, ""); } + terr_regions_overly_polymorphic(_, ty::ReInfer(ty::ReVar(_))) => { + // don't bother to print out the message below for + // inference variables, it's not very illuminating. + } terr_regions_overly_polymorphic(_, conc_region) => { note_and_explain_region(cx, "expected concrete lifetime is ", @@ -4592,7 +4605,7 @@ pub fn provided_trait_methods<'tcx>(cx: &ctxt<'tcx>, id: ast::DefId) match item.node { ItemTrait(_, _, _, _, ref ms) => { let (_, p) = - ast_util::split_trait_methods(ms.as_slice()); + ast_util::split_trait_methods(ms[]); p.iter() .map(|m| { match impl_or_trait_item( @@ -4611,14 +4624,14 @@ pub fn provided_trait_methods<'tcx>(cx: &ctxt<'tcx>, id: ast::DefId) _ => { cx.sess.bug(format!("provided_trait_methods: `{}` is \ not a trait", - id).as_slice()) + id)[]) } } } _ => { cx.sess.bug(format!("provided_trait_methods: `{}` is not a \ trait", - id).as_slice()) + id)[]) } } } else { @@ -4854,7 +4867,7 @@ impl<'tcx> VariantInfo<'tcx> { }, ast::StructVariantKind(ref struct_def) => { - let fields: &[StructField] = struct_def.fields.as_slice(); + let fields: &[StructField] = struct_def.fields[]; assert!(fields.len() > 0); @@ -5005,7 +5018,7 @@ pub fn enum_variants<'tcx>(cx: &ctxt<'tcx>, id: ast::DefId) cx.sess .span_err(e.span, format!("expected constant: {}", - *err).as_slice()); + *err)[]); } }, None => {} @@ -5285,7 +5298,7 @@ pub fn lookup_struct_fields(cx: &ctxt, did: ast::DefId) -> Vec { _ => { cx.sess.bug( format!("ID not mapped to struct fields: {}", - cx.map.node_to_string(did.node)).as_slice()); + cx.map.node_to_string(did.node))[]); } } } else { @@ -5318,7 +5331,7 @@ pub fn struct_fields<'tcx>(cx: &ctxt<'tcx>, did: ast::DefId, substs: &Substs<'tc pub fn tup_fields<'tcx>(v: &[Ty<'tcx>]) -> Vec> { v.iter().enumerate().map(|(i, &f)| { field { - name: token::intern(i.to_string().as_slice()), + name: token::intern(i.to_string()[]), mt: mt { ty: f, mutbl: MutImmutable @@ -5497,7 +5510,7 @@ pub fn eval_repeat_count(tcx: &ctxt, count_expr: &ast::Expr) -> uint { }; tcx.sess.span_err(count_expr.span, format!( "expected positive integer for repeat count, found {}", - found).as_slice()); + found)[]); } Err(_) => { let found = match count_expr.node { @@ -5512,7 +5525,7 @@ pub fn eval_repeat_count(tcx: &ctxt, count_expr: &ast::Expr) -> uint { }; tcx.sess.span_err(count_expr.span, format!( "expected constant integer for repeat count, found {}", - found).as_slice()); + found)[]); } } 0 @@ -5911,8 +5924,9 @@ pub fn hash_crate_independent<'tcx>(tcx: &ctxt<'tcx>, ty: Ty<'tcx>, svh: &Svh) - region(state, r); mt(state, m); } - ty_bare_fn(ref b) => { + ty_bare_fn(opt_def_id, ref b) => { byte!(14); + hash!(opt_def_id); hash!(b.unsafety); hash!(b.abi); fn_sig(state, &b.sig); @@ -6227,7 +6241,7 @@ pub fn accumulate_lifetimes_in_type(accumulator: &mut Vec, ty_str | ty_vec(_, _) | ty_ptr(_) | - ty_bare_fn(_) | + ty_bare_fn(..) | ty_tup(_) | ty_param(_) | ty_infer(_) | @@ -6250,7 +6264,7 @@ pub fn accumulate_lifetimes_in_type(accumulator: &mut Vec, } /// A free variable referred to in a function. -#[deriving(Copy, Encodable, Decodable)] +#[deriving(Copy, RustcEncodable, RustcDecodable)] pub struct Freevar { /// The variable being accessed free. pub def: def::Def, @@ -6263,12 +6277,15 @@ pub type FreevarMap = NodeMap>; pub type CaptureModeMap = NodeMap; +// Trait method resolution +pub type TraitMap = NodeMap>; + pub fn with_freevars(tcx: &ty::ctxt, fid: ast::NodeId, f: F) -> T where F: FnOnce(&[Freevar]) -> T, { match tcx.freevars.borrow().get(&fid) { None => f(&[]), - Some(d) => f(d.as_slice()) + Some(d) => f(d[]) } } @@ -6276,6 +6293,7 @@ impl<'tcx> AutoAdjustment<'tcx> { pub fn is_identity(&self) -> bool { match *self { AdjustAddEnv(..) => false, + AdjustReifyFnPointer(..) => false, AdjustDerefRef(ref r) => r.is_identity(), } } @@ -6391,8 +6409,11 @@ impl DebruijnIndex { impl<'tcx> Repr<'tcx> for AutoAdjustment<'tcx> { fn repr(&self, tcx: &ctxt<'tcx>) -> String { match *self { - AdjustAddEnv(ref trait_store) => { - format!("AdjustAddEnv({})", trait_store) + AdjustAddEnv(def_id, ref trait_store) => { + format!("AdjustAddEnv({},{})", def_id.repr(tcx), trait_store) + } + AdjustReifyFnPointer(def_id) => { + format!("AdjustAddEnv({})", def_id.repr(tcx)) } AdjustDerefRef(ref data) => { data.repr(tcx) diff --git a/src/librustc/middle/ty_fold.rs b/src/librustc/middle/ty_fold.rs index 71e42a9dbb3de..a35ea30b21791 100644 --- a/src/librustc/middle/ty_fold.rs +++ b/src/librustc/middle/ty_fold.rs @@ -82,10 +82,6 @@ pub trait TypeFolder<'tcx> { super_fold_trait_ref(self, t) } - fn fold_sty(&mut self, sty: &ty::sty<'tcx>) -> ty::sty<'tcx> { - super_fold_sty(self, sty) - } - fn fold_substs(&mut self, substs: &subst::Substs<'tcx>) -> subst::Substs<'tcx> { @@ -260,12 +256,6 @@ impl<'tcx> TypeFoldable<'tcx> for ty::FnSig<'tcx> { } } -impl<'tcx> TypeFoldable<'tcx> for ty::sty<'tcx> { - fn fold_with>(&self, folder: &mut F) -> ty::sty<'tcx> { - folder.fold_sty(self) - } -} - impl<'tcx> TypeFoldable<'tcx> for ty::TraitRef<'tcx> { fn fold_with>(&self, folder: &mut F) -> ty::TraitRef<'tcx> { folder.fold_trait_ref(self) @@ -521,9 +511,55 @@ impl<'tcx,T,U> TypeFoldable<'tcx> for ty::OutlivesPredicate // They should invoke `foo.fold_with()` to do recursive folding. pub fn super_fold_ty<'tcx, T: TypeFolder<'tcx>>(this: &mut T, - t: Ty<'tcx>) + ty: Ty<'tcx>) -> Ty<'tcx> { - let sty = t.sty.fold_with(this); + let sty = match ty.sty { + ty::ty_uniq(typ) => { + ty::ty_uniq(typ.fold_with(this)) + } + ty::ty_ptr(ref tm) => { + ty::ty_ptr(tm.fold_with(this)) + } + ty::ty_vec(typ, sz) => { + ty::ty_vec(typ.fold_with(this), sz) + } + ty::ty_open(typ) => { + ty::ty_open(typ.fold_with(this)) + } + ty::ty_enum(tid, ref substs) => { + ty::ty_enum(tid, substs.fold_with(this)) + } + ty::ty_trait(box ty::TyTrait { ref principal, bounds }) => { + ty::ty_trait(box ty::TyTrait { + principal: (*principal).fold_with(this), + bounds: bounds.fold_with(this), + }) + } + ty::ty_tup(ref ts) => { + ty::ty_tup(ts.fold_with(this)) + } + ty::ty_bare_fn(opt_def_id, ref f) => { + ty::ty_bare_fn(opt_def_id, f.fold_with(this)) + } + ty::ty_closure(ref f) => { + ty::ty_closure(box f.fold_with(this)) + } + ty::ty_rptr(r, ref tm) => { + ty::ty_rptr(r.fold_with(this), tm.fold_with(this)) + } + ty::ty_struct(did, ref substs) => { + ty::ty_struct(did, substs.fold_with(this)) + } + ty::ty_unboxed_closure(did, ref region, ref substs) => { + ty::ty_unboxed_closure(did, region.fold_with(this), substs.fold_with(this)) + } + ty::ty_bool | ty::ty_char | ty::ty_str | + ty::ty_int(_) | ty::ty_uint(_) | ty::ty_float(_) | + ty::ty_err | ty::ty_infer(_) | + ty::ty_param(..) => { + ty.sty.clone() + } + }; ty::mk_t(this.tcx(), sty) } @@ -601,58 +637,6 @@ pub fn super_fold_mt<'tcx, T: TypeFolder<'tcx>>(this: &mut T, mutbl: mt.mutbl} } -pub fn super_fold_sty<'tcx, T: TypeFolder<'tcx>>(this: &mut T, - sty: &ty::sty<'tcx>) - -> ty::sty<'tcx> { - match *sty { - ty::ty_uniq(typ) => { - ty::ty_uniq(typ.fold_with(this)) - } - ty::ty_ptr(ref tm) => { - ty::ty_ptr(tm.fold_with(this)) - } - ty::ty_vec(typ, sz) => { - ty::ty_vec(typ.fold_with(this), sz) - } - ty::ty_open(typ) => { - ty::ty_open(typ.fold_with(this)) - } - ty::ty_enum(tid, ref substs) => { - ty::ty_enum(tid, substs.fold_with(this)) - } - ty::ty_trait(box ty::TyTrait { ref principal, bounds }) => { - ty::ty_trait(box ty::TyTrait { - principal: (*principal).fold_with(this), - bounds: bounds.fold_with(this), - }) - } - ty::ty_tup(ref ts) => { - ty::ty_tup(ts.fold_with(this)) - } - ty::ty_bare_fn(ref f) => { - ty::ty_bare_fn(f.fold_with(this)) - } - ty::ty_closure(ref f) => { - ty::ty_closure(box f.fold_with(this)) - } - ty::ty_rptr(r, ref tm) => { - ty::ty_rptr(r.fold_with(this), tm.fold_with(this)) - } - ty::ty_struct(did, ref substs) => { - ty::ty_struct(did, substs.fold_with(this)) - } - ty::ty_unboxed_closure(did, ref region, ref substs) => { - ty::ty_unboxed_closure(did, region.fold_with(this), substs.fold_with(this)) - } - ty::ty_bool | ty::ty_char | ty::ty_str | - ty::ty_int(_) | ty::ty_uint(_) | ty::ty_float(_) | - ty::ty_err | ty::ty_infer(_) | - ty::ty_param(..) => { - (*sty).clone() - } - } -} - pub fn super_fold_trait_store<'tcx, T: TypeFolder<'tcx>>(this: &mut T, trait_store: ty::TraitStore) -> ty::TraitStore { diff --git a/src/librustc/plugin/load.rs b/src/librustc/plugin/load.rs index 5c2fe0854ee77..a2e334543206c 100644 --- a/src/librustc/plugin/load.rs +++ b/src/librustc/plugin/load.rs @@ -141,17 +141,17 @@ impl<'a> PluginLoader<'a> { // this is fatal: there are almost certainly macros we need // inside this crate, so continue would spew "macro undefined" // errors - Err(err) => self.sess.span_fatal(vi.span, err.as_slice()) + Err(err) => self.sess.span_fatal(vi.span, err[]) }; unsafe { let registrar = - match lib.symbol(symbol.as_slice()) { + match lib.symbol(symbol[]) { Ok(registrar) => { mem::transmute::<*mut u8,PluginRegistrarFun>(registrar) } // again fatal if we can't register macros - Err(err) => self.sess.span_fatal(vi.span, err.as_slice()) + Err(err) => self.sess.span_fatal(vi.span, err[]) }; self.plugins.registrars.push(registrar); diff --git a/src/librustc/session/config.rs b/src/librustc/session/config.rs index 0c014d615caf5..6629f6620d484 100644 --- a/src/librustc/session/config.rs +++ b/src/librustc/session/config.rs @@ -18,7 +18,7 @@ pub use self::OptLevel::*; pub use self::OutputType::*; pub use self::DebugInfoLevel::*; -use session::{early_error, early_warn, Session}; +use session::{early_error, Session}; use rustc_back::target::Target; use lint; @@ -33,8 +33,7 @@ use syntax::parse; use syntax::parse::token::InternedString; use std::collections::HashMap; -use std::collections::hash_map::{Occupied, Vacant}; -use getopts::{optopt, optmulti, optflag, optflagopt}; +use std::collections::hash_map::Entry::{Occupied, Vacant}; use getopts; use std::cell::{RefCell}; use std::fmt; @@ -69,6 +68,7 @@ pub enum OutputType { OutputTypeLlvmAssembly, OutputTypeObject, OutputTypeExe, + OutputTypeDepInfo, } #[deriving(Clone)] @@ -102,8 +102,7 @@ pub struct Options { pub debugging_opts: u64, /// Whether to write dependency files. It's (enabled, optional filename). pub write_dependency_info: (bool, Option), - /// Crate id-related things to maybe print. It's (crate_name, crate_file_name). - pub print_metas: (bool, bool), + pub prints: Vec, pub cg: CodegenOptions, pub color: ColorConfig, pub externs: HashMap>, @@ -114,6 +113,14 @@ pub struct Options { pub alt_std_name: Option } +#[deriving(Clone, PartialEq, Eq)] +#[allow(missing_copy_implementations)] +pub enum PrintRequest { + FileNames, + Sysroot, + CrateName, +} + pub enum Input { /// Load source from file File(Path), @@ -154,6 +161,7 @@ impl OutputFilenames { OutputTypeAssembly => base.with_extension("s"), OutputTypeLlvmAssembly => base.with_extension("ll"), OutputTypeObject => base.with_extension("o"), + OutputTypeDepInfo => base.with_extension("d"), OutputTypeExe => base, } } @@ -200,7 +208,7 @@ pub fn basic_options() -> Options { no_analysis: false, debugging_opts: 0, write_dependency_info: (false, None), - print_metas: (false, false), + prints: Vec::new(), cg: basic_codegen_options(), color: Auto, externs: HashMap::new(), @@ -266,8 +274,11 @@ debugging_opts! { FLOWGRAPH_PRINT_MOVES, FLOWGRAPH_PRINT_ASSIGNS, FLOWGRAPH_PRINT_ALL, - PRINT_SYSROOT, - PRINT_REGION_GRAPH + PRINT_REGION_GRAPH, + PARSE_ONLY, + NO_TRANS, + NO_ANALYSIS, + UNSTABLE_OPTIONS ] 0 } @@ -312,11 +323,15 @@ pub fn debugging_opts_map() -> Vec<(&'static str, &'static str, u64)> { --pretty flowgraph output", FLOWGRAPH_PRINT_ASSIGNS), ("flowgraph-print-all", "Include all dataflow analysis data in \ --pretty flowgraph output", FLOWGRAPH_PRINT_ALL), - ("print-sysroot", "Print the sysroot as used by this rustc invocation", - PRINT_SYSROOT), ("print-region-graph", "Prints region inference graph. \ Use with RUST_REGION_GRAPH=help for more info", - PRINT_REGION_GRAPH)] + PRINT_REGION_GRAPH), + ("parse-only", "Parse only; do not compile, assemble, or link", PARSE_ONLY), + ("no-trans", "Run all passes except translation; no output", NO_TRANS), + ("no-analysis", "Parse and expand the source, but run no analysis and", + NO_TRANS), + ("unstable-options", "Adds unstable command line options to rustc interface", + UNSTABLE_OPTIONS)] } #[deriving(Clone)] @@ -370,6 +385,8 @@ macro_rules! cgoptions { pub const parse_uint: Option<&'static str> = Some("a number"); pub const parse_passes: Option<&'static str> = Some("a space-separated list of passes, or `all`"); + pub const parse_opt_uint: Option<&'static str> = + Some("a number"); } mod cgsetters { @@ -441,6 +458,13 @@ macro_rules! cgoptions { } } + fn parse_opt_uint(slot: &mut Option, v: Option<&str>) -> bool { + match v { + Some(s) => { *slot = from_str(s); slot.is_some() } + None => { *slot = None; true } + } + } + fn parse_passes(slot: &mut Passes, v: Option<&str>) -> bool { match v { Some("all") => { @@ -510,6 +534,11 @@ cgoptions! { "print remarks for these optimization passes (space separated, or \"all\")"), no_stack_check: bool = (false, parse_bool, "disable checks for stack exhaustion (a memory-safety hazard!)"), + debuginfo: Option = (None, parse_opt_uint, + "debug info emission level, 0 = no debug info, 1 = line tables only, \ + 2 = full debug info with variable and type information"), + opt_level: Option = (None, parse_opt_uint, + "Optimize with possible levels 0-3"), } pub fn build_codegen_options(matches: &getopts::Matches) -> CodegenOptions @@ -527,17 +556,17 @@ pub fn build_codegen_options(matches: &getopts::Matches) -> CodegenOptions match (value, opt_type_desc) { (Some(..), None) => { early_error(format!("codegen option `{}` takes no \ - value", key).as_slice()) + value", key)[]) } (None, Some(type_desc)) => { early_error(format!("codegen option `{0}` requires \ {1} (-C {0}=)", - key, type_desc).as_slice()) + key, type_desc)[]) } (Some(value), Some(type_desc)) => { early_error(format!("incorrect value `{}` for codegen \ option `{}` - {} was expected", - value, key, type_desc).as_slice()) + value, key, type_desc)[]) } (None, None) => unreachable!() } @@ -547,7 +576,7 @@ pub fn build_codegen_options(matches: &getopts::Matches) -> CodegenOptions } if !found { early_error(format!("unknown codegen option: `{}`", - key).as_slice()); + key)[]); } } return cg; @@ -560,10 +589,10 @@ pub fn default_lib_output() -> CrateType { pub fn default_configuration(sess: &Session) -> ast::CrateConfig { use syntax::parse::token::intern_and_get_ident as intern; - let end = sess.target.target.target_endian.as_slice(); - let arch = sess.target.target.arch.as_slice(); - let wordsz = sess.target.target.target_word_size.as_slice(); - let os = sess.target.target.target_os.as_slice(); + let end = sess.target.target.target_endian[]; + let arch = sess.target.target.arch[]; + let wordsz = sess.target.target.target_word_size[]; + let os = sess.target.target.target_os[]; let fam = match sess.target.target.options.is_like_windows { true => InternedString::new("windows"), @@ -599,23 +628,23 @@ pub fn build_configuration(sess: &Session) -> ast::CrateConfig { append_configuration(&mut user_cfg, InternedString::new("test")) } let mut v = user_cfg.into_iter().collect::>(); - v.push_all(default_cfg.as_slice()); + v.push_all(default_cfg[]); v } pub fn build_target_config(opts: &Options, sp: &SpanHandler) -> Config { - let target = match Target::search(opts.target_triple.as_slice()) { + let target = match Target::search(opts.target_triple[]) { Ok(t) => t, Err(e) => { - sp.handler().fatal((format!("Error loading target specification: {}", e)).as_slice()); + sp.handler().fatal((format!("Error loading target specification: {}", e))[]); } }; - let (int_type, uint_type) = match target.target_word_size.as_slice() { + let (int_type, uint_type) = match target.target_word_size[] { "32" => (ast::TyI32, ast::TyU32), "64" => (ast::TyI64, ast::TyU64), w => sp.handler().fatal((format!("target specification was invalid: unrecognized \ - target-word-size {}", w)).as_slice()) + target-word-size {}", w))[]) }; Config { @@ -625,74 +654,179 @@ pub fn build_target_config(opts: &Options, sp: &SpanHandler) -> Config { } } -// rustc command line options +/// Returns the "short" subset of the stable rustc command line options. +pub fn short_optgroups() -> Vec { + rustc_short_optgroups().into_iter() + .filter(|g|g.is_stable()) + .map(|g|g.opt_group) + .collect() +} + +/// Returns all of the stable rustc command line options. pub fn optgroups() -> Vec { - vec!( - optflag("h", "help", "Display this message"), - optmulti("", "cfg", "Configure the compilation environment", "SPEC"), - optmulti("L", "", "Add a directory to the library search path", "PATH"), - optmulti("l", "", "Link the generated crate(s) to the specified native + rustc_optgroups().into_iter() + .filter(|g|g.is_stable()) + .map(|g|g.opt_group) + .collect() +} + +#[deriving(Copy, Clone, PartialEq, Eq, Show)] +pub enum OptionStability { Stable, Unstable } + +#[deriving(Clone, PartialEq, Eq)] +pub struct RustcOptGroup { + pub opt_group: getopts::OptGroup, + pub stability: OptionStability, +} + +impl RustcOptGroup { + pub fn is_stable(&self) -> bool { + self.stability == OptionStability::Stable + } + + fn stable(g: getopts::OptGroup) -> RustcOptGroup { + RustcOptGroup { opt_group: g, stability: OptionStability::Stable } + } + + fn unstable(g: getopts::OptGroup) -> RustcOptGroup { + RustcOptGroup { opt_group: g, stability: OptionStability::Unstable } + } +} + +// The `opt` local module holds wrappers around the `getopts` API that +// adds extra rustc-specific metadata to each option; such metadata +// is exposed by . The public +// functions below ending with `_u` are the functions that return +// *unstable* options, i.e. options that are only enabled when the +// user also passes the `-Z unstable-options` debugging flag. +mod opt { + // The `fn opt_u` etc below are written so that we can use them + // in the future; do not warn about them not being used right now. + #![allow(dead_code)] + + use getopts; + use super::RustcOptGroup; + + type R = RustcOptGroup; + type S<'a> = &'a str; + + fn stable(g: getopts::OptGroup) -> R { RustcOptGroup::stable(g) } + fn unstable(g: getopts::OptGroup) -> R { RustcOptGroup::unstable(g) } + + // FIXME (pnkfelix): We default to stable since the current set of + // options is defacto stable. However, it would be good to revise the + // code so that a stable option is the thing that takes extra effort + // to encode. + + pub fn opt(a: S, b: S, c: S, d: S) -> R { stable(getopts::optopt(a, b, c, d)) } + pub fn multi(a: S, b: S, c: S, d: S) -> R { stable(getopts::optmulti(a, b, c, d)) } + pub fn flag(a: S, b: S, c: S) -> R { stable(getopts::optflag(a, b, c)) } + pub fn flagopt(a: S, b: S, c: S, d: S) -> R { stable(getopts::optflagopt(a, b, c, d)) } + + pub fn opt_u(a: S, b: S, c: S, d: S) -> R { unstable(getopts::optopt(a, b, c, d)) } + pub fn multi_u(a: S, b: S, c: S, d: S) -> R { unstable(getopts::optmulti(a, b, c, d)) } + pub fn flag_u(a: S, b: S, c: S) -> R { unstable(getopts::optflag(a, b, c)) } + pub fn flagopt_u(a: S, b: S, c: S, d: S) -> R { unstable(getopts::optflagopt(a, b, c, d)) } +} + +/// Returns the "short" subset of the rustc command line options, +/// including metadata for each option, such as whether the option is +/// part of the stable long-term interface for rustc. +pub fn rustc_short_optgroups() -> Vec { + vec![ + opt::flag("h", "help", "Display this message"), + opt::multi("", "cfg", "Configure the compilation environment", "SPEC"), + opt::multi("L", "", "Add a directory to the library search path", "PATH"), + opt::multi("l", "", "Link the generated crate(s) to the specified native library NAME. The optional KIND can be one of, static, dylib, or framework. If omitted, dylib is assumed.", "NAME[:KIND]"), - optmulti("", "crate-type", "Comma separated list of types of crates + opt::multi("", "crate-type", "Comma separated list of types of crates for the compiler to emit", - "[bin|lib|rlib|dylib|staticlib]"), - optmulti("", "emit", "Comma separated list of types of output for the compiler to emit", - "[asm|bc|ir|obj|link]"), - optopt("", "crate-name", "Specify the name of the crate being built", + "[bin|lib|rlib|dylib|staticlib]"), + opt::opt("", "crate-name", "Specify the name of the crate being built", "NAME"), - optflag("", "print-crate-name", "Output the crate name and exit"), - optflag("", "print-file-name", "Output the file(s) that would be written if compilation \ - continued and exit"), - optflag("", "crate-file-name", "deprecated in favor of --print-file-name"), - optflag("g", "", "Equivalent to --debuginfo=2"), - optopt("", "debuginfo", "Emit DWARF debug info to the objects created: + opt::multi("", "emit", "Comma separated list of types of output for \ + the compiler to emit", + "[asm|llvm-bc|llvm-ir|obj|link|dep-info]"), + opt::multi("", "print", "Comma separated list of compiler information to \ + print on stdout", + "[crate-name|output-file-names|sysroot]"), + opt::flag("g", "", "Equivalent to -C debuginfo=2"), + opt::flag("O", "", "Equivalent to -C opt-level=2"), + opt::opt("o", "", "Write output to ", "FILENAME"), + opt::opt("", "out-dir", "Write output to compiler-chosen filename \ + in ", "DIR"), + opt::opt("", "explain", "Provide a detailed explanation of an error \ + message", "OPT"), + opt::flag("", "test", "Build a test harness"), + opt::opt("", "target", "Target triple cpu-manufacturer-kernel[-os] \ + to compile for (see chapter 3.4 of \ + http://www.sourceware.org/autobook/ + for details)", + "TRIPLE"), + opt::multi("W", "warn", "Set lint warnings", "OPT"), + opt::multi("A", "allow", "Set lint allowed", "OPT"), + opt::multi("D", "deny", "Set lint denied", "OPT"), + opt::multi("F", "forbid", "Set lint forbidden", "OPT"), + opt::multi("C", "codegen", "Set a codegen option", "OPT[=VALUE]"), + opt::flag("V", "version", "Print version info and exit"), + opt::flag("v", "verbose", "Use verbose output"), + ] +} + +/// Returns all rustc command line options, including metadata for +/// each option, such as whether the option is part of the stable +/// long-term interface for rustc. +pub fn rustc_optgroups() -> Vec { + let mut opts = rustc_short_optgroups(); + opts.push_all(&[ + opt::multi("", "extern", "Specify where an external rust library is \ + located", + "NAME=PATH"), + opt::opt("", "opt-level", "Optimize with possible levels 0-3", "LEVEL"), + opt::opt("", "sysroot", "Override the system root", "PATH"), + opt::multi("Z", "", "Set internal debugging options", "FLAG"), + opt::opt("", "color", "Configure coloring of output: + auto = colorize, if output goes to a tty (default); + always = always colorize output; + never = never colorize output", "auto|always|never"), + + // DEPRECATED + opt::flag("", "print-crate-name", "Output the crate name and exit"), + opt::flag("", "print-file-name", "Output the file(s) that would be \ + written if compilation \ + continued and exit"), + opt::opt("", "debuginfo", "Emit DWARF debug info to the objects created: 0 = no debug info, 1 = line-tables only (for stacktraces and breakpoints), - 2 = full debug info with variable and type information (same as -g)", "LEVEL"), - optflag("", "no-trans", "Run all passes except translation; no output"), - optflag("", "no-analysis", - "Parse and expand the source, but run no analysis and produce no output"), - optflag("O", "", "Equivalent to --opt-level=2"), - optopt("o", "", "Write output to ", "FILENAME"), - optopt("", "opt-level", "Optimize with possible levels 0-3", "LEVEL"), - optopt( "", "out-dir", "Write output to compiler-chosen filename in ", "DIR"), - optflag("", "parse-only", "Parse only; do not compile, assemble, or link"), - optopt("", "explain", "Provide a detailed explanation of an error message", "OPT"), - optflagopt("", "pretty", + 2 = full debug info with variable and type information \ + (same as -g)", "LEVEL"), + opt::flag("", "no-trans", "Run all passes except translation; no output"), + opt::flag("", "no-analysis", "Parse and expand the source, but run no \ + analysis and produce no output"), + opt::flag("", "parse-only", "Parse only; do not compile, assemble, \ + or link"), + opt::flagopt("", "pretty", "Pretty-print the input instead of compiling; valid types are: `normal` (un-annotated source), `expanded` (crates expanded), - `typed` (crates expanded, with type annotations), - `expanded,identified` (fully parenthesized, AST nodes with IDs), or - `flowgraph=` (graphviz formatted flowgraph for node)", + `typed` (crates expanded, with type annotations), or + `expanded,identified` (fully parenthesized, AST nodes with IDs).", "TYPE"), - optflagopt("", "dep-info", + opt::flagopt_u("", "xpretty", + "Pretty-print the input instead of compiling, unstable variants; + valid types are any of the types for `--pretty`, as well as: + `flowgraph=` (graphviz formatted flowgraph for node), or + `everybody_loops` (all function bodies replaced with `loop {}`).", + "TYPE"), + opt::flagopt("", "dep-info", "Output dependency info to after compiling, \ in a format suitable for use by Makefiles", "FILENAME"), - optopt("", "sysroot", "Override the system root", "PATH"), - optflag("", "test", "Build a test harness"), - optopt("", "target", "Target triple cpu-manufacturer-kernel[-os] - to compile for (see chapter 3.4 of http://www.sourceware.org/autobook/ - for details)", "TRIPLE"), - optmulti("W", "warn", "Set lint warnings", "OPT"), - optmulti("A", "allow", "Set lint allowed", "OPT"), - optmulti("D", "deny", "Set lint denied", "OPT"), - optmulti("F", "forbid", "Set lint forbidden", "OPT"), - optmulti("C", "codegen", "Set a codegen option", "OPT[=VALUE]"), - optmulti("Z", "", "Set internal debugging options", "FLAG"), - optflagopt("v", "version", "Print version info and exit", "verbose"), - optopt("", "color", "Configure coloring of output: - auto = colorize, if output goes to a tty (default); - always = always colorize output; - never = never colorize output", "auto|always|never"), - optmulti("", "extern", "Specify where an external rust library is located", - "NAME=PATH"), - ) + ]); + opts } - // Convert strings provided as --cfg [cfgspec] into a crate_cfg pub fn parse_cfgspecs(cfgspecs: Vec ) -> ast::CrateConfig { cfgspecs.into_iter().map(|s| { @@ -707,11 +841,7 @@ pub fn build_session_options(matches: &getopts::Matches) -> Options { let unparsed_crate_types = matches.opt_strs("crate-type"); let crate_types = parse_crate_types_from_list(unparsed_crate_types) - .unwrap_or_else(|e| early_error(e.as_slice())); - - let parse_only = matches.opt_present("parse-only"); - let no_trans = matches.opt_present("no-trans"); - let no_analysis = matches.opt_present("no-analysis"); + .unwrap_or_else(|e| early_error(e[])); let mut lint_opts = vec!(); let mut describe_lints = false; @@ -721,7 +851,7 @@ pub fn build_session_options(matches: &getopts::Matches) -> Options { if lint_name == "help" { describe_lints = true; } else { - lint_opts.push((lint_name.replace("-", "_").into_string(), level)); + lint_opts.push((lint_name.replace("-", "_"), level)); } } } @@ -739,11 +869,33 @@ pub fn build_session_options(matches: &getopts::Matches) -> Options { } if this_bit == 0 { early_error(format!("unknown debug flag: {}", - *debug_flag).as_slice()) + *debug_flag)[]) } debugging_opts |= this_bit; } + let parse_only = if matches.opt_present("parse-only") { + // FIXME(acrichto) uncomment deprecation warning + // early_warn("--parse-only is deprecated in favor of -Z parse-only"); + true + } else { + debugging_opts & PARSE_ONLY != 0 + }; + let no_trans = if matches.opt_present("no-trans") { + // FIXME(acrichto) uncomment deprecation warning + // early_warn("--no-trans is deprecated in favor of -Z no-trans"); + true + } else { + debugging_opts & NO_TRANS != 0 + }; + let no_analysis = if matches.opt_present("no-analysis") { + // FIXME(acrichto) uncomment deprecation warning + // early_warn("--no-analysis is deprecated in favor of -Z no-analysis"); + true + } else { + debugging_opts & NO_ANALYSIS != 0 + }; + if debugging_opts & DEBUG_LLVM != 0 { unsafe { llvm::LLVMSetDebug(1); } } @@ -754,14 +906,15 @@ pub fn build_session_options(matches: &getopts::Matches) -> Options { for unparsed_output_type in unparsed_output_types.iter() { for part in unparsed_output_type.split(',') { let output_type = match part.as_slice() { - "asm" => OutputTypeAssembly, - "ir" => OutputTypeLlvmAssembly, - "bc" => OutputTypeBitcode, - "obj" => OutputTypeObject, + "asm" => OutputTypeAssembly, + "llvm-ir" => OutputTypeLlvmAssembly, + "llvm-bc" => OutputTypeBitcode, + "obj" => OutputTypeObject, "link" => OutputTypeExe, + "dep-info" => OutputTypeDepInfo, _ => { early_error(format!("unknown emission type: `{}`", - part).as_slice()) + part)[]) } }; output_types.push(output_type) @@ -774,6 +927,8 @@ pub fn build_session_options(matches: &getopts::Matches) -> Options { output_types.push(OutputTypeExe); } + let cg = build_codegen_options(matches); + let sysroot_opt = matches.opt_str("sysroot").map(|m| Path::new(m)); let target = matches.opt_str("target").unwrap_or( host_triple().to_string()); @@ -782,8 +937,13 @@ pub fn build_session_options(matches: &getopts::Matches) -> Options { if matches.opt_present("opt-level") { early_error("-O and --opt-level both provided"); } + if cg.opt_level.is_some() { + early_error("-O and -C opt-level both provided"); + } Default } else if matches.opt_present("opt-level") { + // FIXME(acrichto) uncomment deprecation warning + // early_warn("--opt-level=N is deprecated in favor of -C opt-level=N"); match matches.opt_str("opt-level").as_ref().map(|s| s.as_slice()) { None | Some("0") => No, @@ -793,11 +953,22 @@ pub fn build_session_options(matches: &getopts::Matches) -> Options { Some(arg) => { early_error(format!("optimization level needs to be \ between 0-3 (instead was `{}`)", - arg).as_slice()); + arg)[]); } } } else { - No + match cg.opt_level { + None => No, + Some(0) => No, + Some(1) => Less, + Some(2) => Default, + Some(3) => Aggressive, + Some(arg) => { + early_error(format!("optimization level needs to be \ + between 0-3 (instead was `{}`)", + arg).as_slice()); + } + } } }; let gc = debugging_opts & GC != 0; @@ -805,8 +976,13 @@ pub fn build_session_options(matches: &getopts::Matches) -> Options { if matches.opt_present("debuginfo") { early_error("-g and --debuginfo both provided"); } + if cg.debuginfo.is_some() { + early_error("-g and -C debuginfo both provided"); + } FullDebugInfo } else if matches.opt_present("debuginfo") { + // FIXME(acrichto) uncomment deprecation warning + // early_warn("--debuginfo=N is deprecated in favor of -C debuginfo=N"); match matches.opt_str("debuginfo").as_ref().map(|s| s.as_slice()) { Some("0") => NoDebugInfo, Some("1") => LimitedDebugInfo, @@ -815,15 +991,24 @@ pub fn build_session_options(matches: &getopts::Matches) -> Options { Some(arg) => { early_error(format!("debug info level needs to be between \ 0-2 (instead was `{}`)", - arg).as_slice()); + arg)[]); } } } else { - NoDebugInfo + match cg.debuginfo { + None | Some(0) => NoDebugInfo, + Some(1) => LimitedDebugInfo, + Some(2) => FullDebugInfo, + Some(arg) => { + early_error(format!("debug info level needs to be between \ + 0-2 (instead was `{}`)", + arg).as_slice()); + } + } }; let addl_lib_search_paths = matches.opt_strs("L").iter().map(|s| { - Path::new(s.as_slice()) + Path::new(s[]) }).collect(); let libs = matches.opt_strs("l").into_iter().map(|s| { @@ -837,7 +1022,7 @@ pub fn build_session_options(matches: &getopts::Matches) -> Options { (_, s) => { early_error(format!("unknown library kind `{}`, expected \ one of dylib, framework, or static", - s).as_slice()); + s)[]); } }; (name.to_string(), kind) @@ -845,24 +1030,44 @@ pub fn build_session_options(matches: &getopts::Matches) -> Options { let cfg = parse_cfgspecs(matches.opt_strs("cfg")); let test = matches.opt_present("test"); - let write_dependency_info = (matches.opt_present("dep-info"), - matches.opt_str("dep-info") - .map(|p| Path::new(p))); - - let print_metas = (matches.opt_present("print-crate-name"), - matches.opt_present("print-file-name") || - matches.opt_present("crate-file-name")); - if matches.opt_present("crate-file-name") { - early_warn("the --crate-file-name argument has been renamed to \ - --print-file-name"); + let write_dependency_info = if matches.opt_present("dep-info") { + // FIXME(acrichto) uncomment deprecation warning + // early_warn("--dep-info has been deprecated in favor of --emit"); + (true, matches.opt_str("dep-info").map(|p| Path::new(p))) + } else { + (output_types.contains(&OutputTypeDepInfo), None) + }; + + let mut prints = matches.opt_strs("print").into_iter().map(|s| { + match s.as_slice() { + "crate-name" => PrintRequest::CrateName, + "file-names" => PrintRequest::FileNames, + "sysroot" => PrintRequest::Sysroot, + req => { + early_error(format!("unknown print request `{}`", req).as_slice()) + } + } + }).collect::>(); + if matches.opt_present("print-crate-name") { + // FIXME(acrichto) uncomment deprecation warning + // early_warn("--print-crate-name has been deprecated in favor of \ + // --print crate-name"); + prints.push(PrintRequest::CrateName); + } + if matches.opt_present("print-file-name") { + // FIXME(acrichto) uncomment deprecation warning + // early_warn("--print-file-name has been deprecated in favor of \ + // --print file-names"); + prints.push(PrintRequest::FileNames); } - let cg = build_codegen_options(matches); if !cg.remark.is_empty() && debuginfo == NoDebugInfo { - early_warn("-C remark will not show source locations without --debuginfo"); + // FIXME(acrichto) uncomment deprecation warning + // early_warn("-C remark will not show source locations without \ + // --debuginfo"); } - let color = match matches.opt_str("color").as_ref().map(|s| s.as_slice()) { + let color = match matches.opt_str("color").as_ref().map(|s| s[]) { Some("auto") => Auto, Some("always") => Always, Some("never") => Never, @@ -872,7 +1077,7 @@ pub fn build_session_options(matches: &getopts::Matches) -> Options { Some(arg) => { early_error(format!("argument for --color must be auto, always \ or never (instead was `{}`)", - arg).as_slice()) + arg)[]) } }; @@ -914,7 +1119,7 @@ pub fn build_session_options(matches: &getopts::Matches) -> Options { no_analysis: no_analysis, debugging_opts: debugging_opts, write_dependency_info: write_dependency_info, - print_metas: print_metas, + prints: prints, cg: cg, color: color, externs: externs, @@ -973,7 +1178,7 @@ mod test { #[test] fn test_switch_implies_cfg_test() { let matches = - &match getopts(&["--test".to_string()], optgroups().as_slice()) { + &match getopts(&["--test".to_string()], optgroups()[]) { Ok(m) => m, Err(f) => panic!("test_switch_implies_cfg_test: {}", f) }; @@ -981,7 +1186,7 @@ mod test { let sessopts = build_session_options(matches); let sess = build_session(sessopts, None, registry); let cfg = build_configuration(&sess); - assert!((attr::contains_name(cfg.as_slice(), "test"))); + assert!((attr::contains_name(cfg[], "test"))); } // When the user supplies --test and --cfg test, don't implicitly add @@ -990,7 +1195,7 @@ mod test { fn test_switch_implies_cfg_test_unless_cfg_test() { let matches = &match getopts(&["--test".to_string(), "--cfg=test".to_string()], - optgroups().as_slice()) { + optgroups()[]) { Ok(m) => m, Err(f) => { panic!("test_switch_implies_cfg_test_unless_cfg_test: {}", f) @@ -1010,7 +1215,7 @@ mod test { { let matches = getopts(&[ "-Awarnings".to_string() - ], optgroups().as_slice()).unwrap(); + ], optgroups()[]).unwrap(); let registry = diagnostics::registry::Registry::new(&[]); let sessopts = build_session_options(&matches); let sess = build_session(sessopts, None, registry); @@ -1021,7 +1226,7 @@ mod test { let matches = getopts(&[ "-Awarnings".to_string(), "-Dwarnings".to_string() - ], optgroups().as_slice()).unwrap(); + ], optgroups()[]).unwrap(); let registry = diagnostics::registry::Registry::new(&[]); let sessopts = build_session_options(&matches); let sess = build_session(sessopts, None, registry); @@ -1031,7 +1236,7 @@ mod test { { let matches = getopts(&[ "-Adead_code".to_string() - ], optgroups().as_slice()).unwrap(); + ], optgroups()[]).unwrap(); let registry = diagnostics::registry::Registry::new(&[]); let sessopts = build_session_options(&matches); let sess = build_session(sessopts, None, registry); diff --git a/src/librustc/session/mod.rs b/src/librustc/session/mod.rs index 8516ece202c75..37bdd1673e9ca 100644 --- a/src/librustc/session/mod.rs +++ b/src/librustc/session/mod.rs @@ -172,7 +172,7 @@ impl Session { // cases later on pub fn impossible_case(&self, sp: Span, msg: &str) -> ! { self.span_bug(sp, - format!("impossible case reached: {}", msg).as_slice()); + format!("impossible case reached: {}", msg)[]); } pub fn verbose(&self) -> bool { self.debugging_opt(config::VERBOSE) } pub fn time_passes(&self) -> bool { self.debugging_opt(config::TIME_PASSES) } @@ -211,7 +211,7 @@ impl Session { } pub fn target_filesearch<'a>(&'a self) -> filesearch::FileSearch<'a> { filesearch::FileSearch::new(self.sysroot(), - self.opts.target_triple.as_slice(), + self.opts.target_triple[], &self.opts.addl_lib_search_paths) } pub fn host_filesearch<'a>(&'a self) -> filesearch::FileSearch<'a> { diff --git a/src/librustc/util/common.rs b/src/librustc/util/common.rs index bc6fb1be0758f..e1448364a9e05 100644 --- a/src/librustc/util/common.rs +++ b/src/librustc/util/common.rs @@ -14,6 +14,7 @@ use std::cell::{RefCell, Cell}; use std::collections::HashMap; use std::fmt::Show; use std::hash::{Hash, Hasher}; +use std::iter::repeat; use std::time::Duration; use syntax::ast; @@ -48,7 +49,7 @@ pub fn time(do_it: bool, what: &str, u: U, f: F) -> T where }; let rv = rv.unwrap(); - println!("{}time: {}.{:03} \t{}", " ".repeat(old), + println!("{}time: {}.{:03} \t{}", repeat(" ").take(old).collect::(), dur.num_seconds(), dur.num_milliseconds() % 1000, what); DEPTH.with(|slot| slot.set(old)); diff --git a/src/librustc/util/lev_distance.rs b/src/librustc/util/lev_distance.rs new file mode 100644 index 0000000000000..24e9883744407 --- /dev/null +++ b/src/librustc/util/lev_distance.rs @@ -0,0 +1,63 @@ +// Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +use std::cmp; + +pub fn lev_distance(me: &str, t: &str) -> uint { + if me.is_empty() { return t.chars().count(); } + if t.is_empty() { return me.chars().count(); } + + let mut dcol = Vec::from_fn(t.len() + 1, |x| x); + let mut t_last = 0; + + for (i, sc) in me.chars().enumerate() { + + let mut current = i; + dcol[0] = current + 1; + + for (j, tc) in t.chars().enumerate() { + + let next = dcol[j + 1]; + + if sc == tc { + dcol[j + 1] = current; + } else { + dcol[j + 1] = cmp::min(current, next); + dcol[j + 1] = cmp::min(dcol[j + 1], dcol[j]) + 1; + } + + current = next; + t_last = j; + } + } + + dcol[t_last + 1] +} + +#[test] +fn test_lev_distance() { + use std::char::{ from_u32, MAX }; + // Test bytelength agnosticity + for c in range(0u32, MAX as u32) + .filter_map(|i| from_u32(i)) + .map(|i| String::from_char(1, i)) { + assert_eq!(lev_distance(c[], c[]), 0); + } + + let a = "\nMäry häd ä little lämb\n\nLittle lämb\n"; + let b = "\nMary häd ä little lämb\n\nLittle lämb\n"; + let c = "Mary häd ä little lämb\n\nLittle lämb\n"; + assert_eq!(lev_distance(a, b), 1); + assert_eq!(lev_distance(b, a), 1); + assert_eq!(lev_distance(a, c), 2); + assert_eq!(lev_distance(c, a), 2); + assert_eq!(lev_distance(b, c), 1); + assert_eq!(lev_distance(c, b), 1); +} diff --git a/src/librustc/util/ppaux.rs b/src/librustc/util/ppaux.rs index b0124977c9f1b..a02004e6d18a3 100644 --- a/src/librustc/util/ppaux.rs +++ b/src/librustc/util/ppaux.rs @@ -55,12 +55,12 @@ pub fn note_and_explain_region(cx: &ctxt, (ref str, Some(span)) => { cx.sess.span_note( span, - format!("{}{}{}", prefix, *str, suffix).as_slice()); + format!("{}{}{}", prefix, *str, suffix)[]); Some(span) } (ref str, None) => { cx.sess.note( - format!("{}{}{}", prefix, *str, suffix).as_slice()); + format!("{}{}{}", prefix, *str, suffix)[]); None } } @@ -93,8 +93,9 @@ pub fn explain_region_and_span(cx: &ctxt, region: ty::Region) ast::ExprMethodCall(..) => { explain_span(cx, "method call", expr.span) }, - ast::ExprMatch(_, _, ast::MatchIfLetDesugar) => explain_span(cx, "if let", expr.span), - ast::ExprMatch(_, _, ast::MatchWhileLetDesugar) => { + ast::ExprMatch(_, _, ast::MatchSource::IfLetDesugar { .. }) => + explain_span(cx, "if let", expr.span), + ast::ExprMatch(_, _, ast::MatchSource::WhileLetDesugar) => { explain_span(cx, "while let", expr.span) }, ast::ExprMatch(..) => explain_span(cx, "match", expr.span), @@ -253,12 +254,14 @@ pub fn vec_map_to_string(ts: &[T], f: F) -> String where pub fn ty_to_string<'tcx>(cx: &ctxt<'tcx>, typ: &ty::TyS<'tcx>) -> String { fn bare_fn_to_string<'tcx>(cx: &ctxt<'tcx>, + opt_def_id: Option, unsafety: ast::Unsafety, abi: abi::Abi, ident: Option, sig: &ty::PolyFnSig<'tcx>) -> String { let mut s = String::new(); + match unsafety { ast::Unsafety::Normal => {} ast::Unsafety::Unsafe => { @@ -268,7 +271,7 @@ pub fn ty_to_string<'tcx>(cx: &ctxt<'tcx>, typ: &ty::TyS<'tcx>) -> String { }; if abi != abi::Rust { - s.push_str(format!("extern {} ", abi.to_string()).as_slice()); + s.push_str(format!("extern {} ", abi.to_string())[]); }; s.push_str("fn"); @@ -283,6 +286,16 @@ pub fn ty_to_string<'tcx>(cx: &ctxt<'tcx>, typ: &ty::TyS<'tcx>) -> String { push_sig_to_string(cx, &mut s, '(', ')', sig, ""); + match opt_def_id { + Some(def_id) => { + s.push_str(" {"); + let path_str = ty::item_path_str(cx, def_id); + s.push_str(path_str[]); + s.push_str("}"); + } + None => { } + } + s } @@ -292,7 +305,7 @@ pub fn ty_to_string<'tcx>(cx: &ctxt<'tcx>, typ: &ty::TyS<'tcx>) -> String { match cty.store { ty::UniqTraitStore => {} ty::RegionTraitStore(region, _) => { - s.push_str(region_to_string(cx, "", true, region).as_slice()); + s.push_str(region_to_string(cx, "", true, region)[]); } } @@ -311,7 +324,7 @@ pub fn ty_to_string<'tcx>(cx: &ctxt<'tcx>, typ: &ty::TyS<'tcx>) -> String { assert_eq!(cty.onceness, ast::Once); s.push_str("proc"); push_sig_to_string(cx, &mut s, '(', ')', &cty.sig, - bounds_str.as_slice()); + bounds_str[]); } ty::RegionTraitStore(..) => { match cty.onceness { @@ -319,7 +332,7 @@ pub fn ty_to_string<'tcx>(cx: &ctxt<'tcx>, typ: &ty::TyS<'tcx>) -> String { ast::Once => s.push_str("once ") } push_sig_to_string(cx, &mut s, '|', '|', &cty.sig, - bounds_str.as_slice()); + bounds_str[]); } } @@ -352,7 +365,7 @@ pub fn ty_to_string<'tcx>(cx: &ctxt<'tcx>, typ: &ty::TyS<'tcx>) -> String { ty::FnConverging(t) => { if !ty::type_is_nil(t) { s.push_str(" -> "); - s.push_str(ty_to_string(cx, t).as_slice()); + s.push_str(ty_to_string(cx, t)[]); } } ty::FnDiverging => { @@ -389,7 +402,7 @@ pub fn ty_to_string<'tcx>(cx: &ctxt<'tcx>, typ: &ty::TyS<'tcx>) -> String { } ty_rptr(r, ref tm) => { let mut buf = region_ptr_to_string(cx, r); - buf.push_str(mt_to_string(cx, tm).as_slice()); + buf.push_str(mt_to_string(cx, tm)[]); buf } ty_open(typ) => @@ -399,7 +412,7 @@ pub fn ty_to_string<'tcx>(cx: &ctxt<'tcx>, typ: &ty::TyS<'tcx>) -> String { .iter() .map(|elem| ty_to_string(cx, *elem)) .collect::>(); - match strs.as_slice() { + match strs[] { [ref string] => format!("({},)", string), strs => format!("({})", strs.connect(", ")) } @@ -407,8 +420,8 @@ pub fn ty_to_string<'tcx>(cx: &ctxt<'tcx>, typ: &ty::TyS<'tcx>) -> String { ty_closure(ref f) => { closure_to_string(cx, &**f) } - ty_bare_fn(ref f) => { - bare_fn_to_string(cx, f.unsafety, f.abi, None, &f.sig) + ty_bare_fn(opt_def_id, ref f) => { + bare_fn_to_string(cx, opt_def_id, f.unsafety, f.abi, None, &f.sig) } ty_infer(infer_ty) => infer_ty_to_string(cx, infer_ty), ty_err => "[type error]".to_string(), @@ -452,7 +465,7 @@ pub fn ty_to_string<'tcx>(cx: &ctxt<'tcx>, typ: &ty::TyS<'tcx>) -> String { ty_vec(t, sz) => { let inner_str = ty_to_string(cx, t); match sz { - Some(n) => format!("[{}, ..{}]", inner_str, n), + Some(n) => format!("[{}; {}]", inner_str, n), None => format!("[{}]", inner_str), } } @@ -532,7 +545,13 @@ pub fn parameterized<'tcx>(cx: &ctxt<'tcx>, if cx.lang_items.fn_trait_kind(did).is_some() { format!("{}({}){}", base, - strs[0][1 .. strs[0].len() - (strs[0].ends_with(",)") as uint+1)], + if strs[0].starts_with("(") && strs[0].ends_with(",)") { + strs[0][1 .. strs[0].len() - 2] // Remove '(' and ',)' + } else if strs[0].starts_with("(") && strs[0].ends_with(")") { + strs[0][1 .. strs[0].len() - 1] // Remove '(' and ')' + } else { + strs[0][] + }, if &*strs[1] == "()" { String::new() } else { format!(" -> {}", strs[1]) }) } else if strs.len() > 0 { format!("{}<{}>", base, strs.connect(", ")) @@ -544,7 +563,7 @@ pub fn parameterized<'tcx>(cx: &ctxt<'tcx>, pub fn ty_to_short_str<'tcx>(cx: &ctxt<'tcx>, typ: Ty<'tcx>) -> String { let mut s = typ.repr(cx).to_string(); if s.len() >= 32u { - s = s.slice(0u, 32u).to_string(); + s = s[0u..32u].to_string(); } return s; } @@ -609,7 +628,7 @@ impl<'tcx, T:Repr<'tcx>> Repr<'tcx> for [T] { impl<'tcx, T:Repr<'tcx>> Repr<'tcx> for OwnedSlice { fn repr(&self, tcx: &ctxt<'tcx>) -> String { - repr_vec(tcx, self.as_slice()) + repr_vec(tcx, self[]) } } @@ -617,7 +636,7 @@ impl<'tcx, T:Repr<'tcx>> Repr<'tcx> for OwnedSlice { // autoderef cannot convert the &[T] handler impl<'tcx, T:Repr<'tcx>> Repr<'tcx> for Vec { fn repr(&self, tcx: &ctxt<'tcx>) -> String { - repr_vec(tcx, self.as_slice()) + repr_vec(tcx, self[]) } } diff --git a/src/librustc_back/archive.rs b/src/librustc_back/archive.rs index 3a4510703166c..0bd4265e487a6 100644 --- a/src/librustc_back/archive.rs +++ b/src/librustc_back/archive.rs @@ -53,7 +53,7 @@ fn run_ar(handler: &ErrorHandler, maybe_ar_prog: &Option, args: &str, cwd: Option<&Path>, paths: &[&Path]) -> ProcessOutput { let ar = match *maybe_ar_prog { - Some(ref ar) => ar.as_slice(), + Some(ref ar) => ar[], None => "ar" }; let mut cmd = Command::new(ar); @@ -75,22 +75,22 @@ fn run_ar(handler: &ErrorHandler, maybe_ar_prog: &Option, if !o.status.success() { handler.err(format!("{} failed with: {}", cmd, - o.status).as_slice()); + o.status)[]); handler.note(format!("stdout ---\n{}", str::from_utf8(o.output - .as_slice()).unwrap()) - .as_slice()); + []).unwrap()) + []); handler.note(format!("stderr ---\n{}", str::from_utf8(o.error - .as_slice()).unwrap()) - .as_slice()); + []).unwrap()) + []); handler.abort_if_errors(); } o }, Err(e) => { - handler.err(format!("could not exec `{}`: {}", ar.as_slice(), - e).as_slice()); + handler.err(format!("could not exec `{}`: {}", ar[], + e)[]); handler.abort_if_errors(); panic!("rustc::back::archive::run_ar() should not reach this point"); } @@ -106,16 +106,16 @@ pub fn find_library(name: &str, osprefix: &str, ossuffix: &str, for path in search_paths.iter() { debug!("looking for {} inside {}", name, path.display()); - let test = path.join(oslibname.as_slice()); + let test = path.join(oslibname[]); if test.exists() { return test } if oslibname != unixlibname { - let test = path.join(unixlibname.as_slice()); + let test = path.join(unixlibname[]); if test.exists() { return test } } } handler.fatal(format!("could not find native static library `{}`, \ perhaps an -L flag is missing?", - name).as_slice()); + name)[]); } impl<'a> Archive<'a> { @@ -147,7 +147,7 @@ impl<'a> Archive<'a> { /// Lists all files in an archive pub fn files(&self) -> Vec { let output = run_ar(self.handler, &self.maybe_ar_prog, "t", None, &[&self.dst]); - let output = str::from_utf8(output.output.as_slice()).unwrap(); + let output = str::from_utf8(output.output[]).unwrap(); // use lines_any because windows delimits output with `\r\n` instead of // just `\n` output.lines_any().map(|s| s.to_string()).collect() @@ -179,9 +179,9 @@ impl<'a> ArchiveBuilder<'a> { /// search in the relevant locations for a library named `name`. pub fn add_native_library(&mut self, name: &str) -> io::IoResult<()> { let location = find_library(name, - self.archive.slib_prefix.as_slice(), - self.archive.slib_suffix.as_slice(), - self.archive.lib_search_paths.as_slice(), + self.archive.slib_prefix[], + self.archive.slib_suffix[], + self.archive.lib_search_paths[], self.archive.handler); self.add_archive(&location, name, |_| false) } @@ -197,12 +197,12 @@ impl<'a> ArchiveBuilder<'a> { // as simple comparison is not enough - there // might be also an extra name suffix let obj_start = format!("{}", name); - let obj_start = obj_start.as_slice(); + let obj_start = obj_start[]; // Ignoring all bytecode files, no matter of // name let bc_ext = ".bytecode.deflate"; - self.add_archive(rlib, name.as_slice(), |fname: &str| { + self.add_archive(rlib, name[], |fname: &str| { let skip_obj = lto && fname.starts_with(obj_start) && fname.ends_with(".o"); skip_obj || fname.ends_with(bc_ext) || fname == METADATA_FILENAME @@ -239,7 +239,7 @@ impl<'a> ArchiveBuilder<'a> { // allow running `ar s file.a` to update symbols only. if self.should_update_symbols { run_ar(self.archive.handler, &self.archive.maybe_ar_prog, - "s", Some(self.work_dir.path()), args.as_slice()); + "s", Some(self.work_dir.path()), args[]); } return self.archive; } @@ -259,7 +259,7 @@ impl<'a> ArchiveBuilder<'a> { // Add the archive members seen so far, without updating the // symbol table (`S`). run_ar(self.archive.handler, &self.archive.maybe_ar_prog, - "cruS", Some(self.work_dir.path()), args.as_slice()); + "cruS", Some(self.work_dir.path()), args[]); args.clear(); args.push(&abs_dst); @@ -274,7 +274,7 @@ impl<'a> ArchiveBuilder<'a> { // necessary. let flags = if self.should_update_symbols { "crus" } else { "cruS" }; run_ar(self.archive.handler, &self.archive.maybe_ar_prog, - flags, Some(self.work_dir.path()), args.as_slice()); + flags, Some(self.work_dir.path()), args[]); self.archive } @@ -316,7 +316,7 @@ impl<'a> ArchiveBuilder<'a> { } else { filename }; - let new_filename = self.work_dir.path().join(filename.as_slice()); + let new_filename = self.work_dir.path().join(filename[]); try!(fs::rename(file, &new_filename)); self.members.push(Path::new(filename)); } diff --git a/src/librustc_back/rpath.rs b/src/librustc_back/rpath.rs index 1f8549098d949..1056ac928e687 100644 --- a/src/librustc_back/rpath.rs +++ b/src/librustc_back/rpath.rs @@ -44,15 +44,15 @@ pub fn get_rpath_flags(config: RPathConfig) -> Vec where l.map(|p| p.clone()) }).collect::>(); - let rpaths = get_rpaths(config, libs.as_slice()); - flags.push_all(rpaths_to_flags(rpaths.as_slice()).as_slice()); + let rpaths = get_rpaths(config, libs[]); + flags.push_all(rpaths_to_flags(rpaths[])[]); flags } fn rpaths_to_flags(rpaths: &[String]) -> Vec { let mut ret = Vec::new(); for rpath in rpaths.iter() { - ret.push(format!("-Wl,-rpath,{}", (*rpath).as_slice())); + ret.push(format!("-Wl,-rpath,{}", (*rpath)[])); } return ret; } @@ -82,14 +82,14 @@ fn get_rpaths(mut config: RPathConfig, libs: &[Path]) -> Vec } } - log_rpaths("relative", rel_rpaths.as_slice()); - log_rpaths("fallback", fallback_rpaths.as_slice()); + log_rpaths("relative", rel_rpaths[]); + log_rpaths("fallback", fallback_rpaths[]); let mut rpaths = rel_rpaths; - rpaths.push_all(fallback_rpaths.as_slice()); + rpaths.push_all(fallback_rpaths[]); // Remove duplicates - let rpaths = minimize_rpaths(rpaths.as_slice()); + let rpaths = minimize_rpaths(rpaths[]); return rpaths; } @@ -140,7 +140,7 @@ fn minimize_rpaths(rpaths: &[String]) -> Vec { let mut set = HashSet::new(); let mut minimized = Vec::new(); for rpath in rpaths.iter() { - if set.insert(rpath.as_slice()) { + if set.insert(rpath[]) { minimized.push(rpath.clone()); } } diff --git a/src/librustc_back/svh.rs b/src/librustc_back/svh.rs index 98fa659ba55ea..d40c9ee8af6af 100644 --- a/src/librustc_back/svh.rs +++ b/src/librustc_back/svh.rs @@ -65,7 +65,7 @@ impl Svh { } pub fn as_str<'a>(&'a self) -> &'a str { - self.hash.as_slice() + self.hash[] } pub fn calculate(metadata: &Vec, krate: &ast::Crate) -> Svh { @@ -358,7 +358,7 @@ mod svh_visitor { fn macro_name(macro: &Mac) -> token::InternedString { match ¯o.node { &MacInvocTT(ref path, ref _tts, ref _stx_ctxt) => { - let s = path.segments.as_slice(); + let s = path.segments[]; assert_eq!(s.len(), 1); content(s[0].identifier) } diff --git a/src/librustc_back/target/mod.rs b/src/librustc_back/target/mod.rs index d12cb356e3faa..99a25bebf40ab 100644 --- a/src/librustc_back/target/mod.rs +++ b/src/librustc_back/target/mod.rs @@ -224,7 +224,7 @@ impl Target { Some(val) => val, None => handler.fatal((format!("Field {} in target specification is required", name)) - .as_slice()) + []) } }; @@ -365,7 +365,7 @@ impl Target { let target_path = os::getenv("RUST_TARGET_PATH").unwrap_or(String::new()); - let paths = os::split_paths(target_path.as_slice()); + let paths = os::split_paths(target_path[]); // FIXME 16351: add a sane default search path? for dir in paths.iter() { diff --git a/src/librustc_borrowck/borrowck/check_loans.rs b/src/librustc_borrowck/borrowck/check_loans.rs index 3bf817b42b06d..568bb023b68a2 100644 --- a/src/librustc_borrowck/borrowck/check_loans.rs +++ b/src/librustc_borrowck/borrowck/check_loans.rs @@ -469,7 +469,7 @@ impl<'a, 'tcx> CheckLoanCtxt<'a, 'tcx> { new_loan.span, format!("cannot borrow `{}`{} as mutable \ more than once at a time", - nl, new_loan_msg).as_slice()) + nl, new_loan_msg)[]) } (ty::UniqueImmBorrow, _) => { @@ -477,7 +477,7 @@ impl<'a, 'tcx> CheckLoanCtxt<'a, 'tcx> { new_loan.span, format!("closure requires unique access to `{}` \ but {} is already borrowed{}", - nl, ol_pronoun, old_loan_msg).as_slice()); + nl, ol_pronoun, old_loan_msg)[]); } (_, ty::UniqueImmBorrow) => { @@ -485,7 +485,7 @@ impl<'a, 'tcx> CheckLoanCtxt<'a, 'tcx> { new_loan.span, format!("cannot borrow `{}`{} as {} because \ previous closure requires unique access", - nl, new_loan_msg, new_loan.kind.to_user_str()).as_slice()); + nl, new_loan_msg, new_loan.kind.to_user_str())[]); } (_, _) => { @@ -498,7 +498,7 @@ impl<'a, 'tcx> CheckLoanCtxt<'a, 'tcx> { new_loan.kind.to_user_str(), ol_pronoun, old_loan.kind.to_user_str(), - old_loan_msg).as_slice()); + old_loan_msg)[]); } } @@ -507,7 +507,7 @@ impl<'a, 'tcx> CheckLoanCtxt<'a, 'tcx> { self.bccx.span_note( span, format!("borrow occurs due to use of `{}` in closure", - nl).as_slice()); + nl)[]); } _ => { } } @@ -556,7 +556,7 @@ impl<'a, 'tcx> CheckLoanCtxt<'a, 'tcx> { self.bccx.span_note( old_loan.span, - format!("{}; {}", borrow_summary, rule_summary).as_slice()); + format!("{}; {}", borrow_summary, rule_summary)[]); let old_loan_span = self.tcx().map.span(old_loan.kill_scope.node_id()); self.bccx.span_end_note(old_loan_span, @@ -626,13 +626,13 @@ impl<'a, 'tcx> CheckLoanCtxt<'a, 'tcx> { self.bccx.span_err( span, format!("cannot use `{}` because it was mutably borrowed", - self.bccx.loan_path_to_string(copy_path).as_slice()) - .as_slice()); + self.bccx.loan_path_to_string(copy_path)[]) + []); self.bccx.span_note( loan_span, format!("borrow of `{}` occurs here", - self.bccx.loan_path_to_string(&*loan_path).as_slice()) - .as_slice()); + self.bccx.loan_path_to_string(&*loan_path)[]) + []); } } } @@ -651,20 +651,20 @@ impl<'a, 'tcx> CheckLoanCtxt<'a, 'tcx> { let err_message = match move_kind { move_data::Captured => format!("cannot move `{}` into closure because it is borrowed", - self.bccx.loan_path_to_string(move_path).as_slice()), + self.bccx.loan_path_to_string(move_path)[]), move_data::Declared | move_data::MoveExpr | move_data::MovePat => format!("cannot move out of `{}` because it is borrowed", - self.bccx.loan_path_to_string(move_path).as_slice()) + self.bccx.loan_path_to_string(move_path)[]) }; - self.bccx.span_err(span, err_message.as_slice()); + self.bccx.span_err(span, err_message[]); self.bccx.span_note( loan_span, format!("borrow of `{}` occurs here", - self.bccx.loan_path_to_string(&*loan_path).as_slice()) - .as_slice()); + self.bccx.loan_path_to_string(&*loan_path)[]) + []); } } } @@ -814,7 +814,7 @@ impl<'a, 'tcx> CheckLoanCtxt<'a, 'tcx> { self.bccx.span_err( assignment_span, format!("cannot assign to {}", - self.bccx.cmt_to_string(&*assignee_cmt)).as_slice()); + self.bccx.cmt_to_string(&*assignee_cmt))[]); self.bccx.span_help( self.tcx().map.span(upvar_id.closure_expr_id), "consider changing this closure to take self by mutable reference"); @@ -823,7 +823,7 @@ impl<'a, 'tcx> CheckLoanCtxt<'a, 'tcx> { assignment_span, format!("cannot assign to {} {}", assignee_cmt.mutbl.to_user_str(), - self.bccx.cmt_to_string(&*assignee_cmt)).as_slice()); + self.bccx.cmt_to_string(&*assignee_cmt))[]); } } _ => match opt_loan_path(&assignee_cmt) { @@ -833,14 +833,14 @@ impl<'a, 'tcx> CheckLoanCtxt<'a, 'tcx> { format!("cannot assign to {} {} `{}`", assignee_cmt.mutbl.to_user_str(), self.bccx.cmt_to_string(&*assignee_cmt), - self.bccx.loan_path_to_string(&*lp)).as_slice()); + self.bccx.loan_path_to_string(&*lp))[]); } None => { self.bccx.span_err( assignment_span, format!("cannot assign to {} {}", assignee_cmt.mutbl.to_user_str(), - self.bccx.cmt_to_string(&*assignee_cmt)).as_slice()); + self.bccx.cmt_to_string(&*assignee_cmt))[]); } } } @@ -960,10 +960,10 @@ impl<'a, 'tcx> CheckLoanCtxt<'a, 'tcx> { self.bccx.span_err( span, format!("cannot assign to `{}` because it is borrowed", - self.bccx.loan_path_to_string(loan_path)).as_slice()); + self.bccx.loan_path_to_string(loan_path))[]); self.bccx.span_note( loan.span, format!("borrow of `{}` occurs here", - self.bccx.loan_path_to_string(loan_path)).as_slice()); + self.bccx.loan_path_to_string(loan_path))[]); } } diff --git a/src/librustc_borrowck/borrowck/fragments.rs b/src/librustc_borrowck/borrowck/fragments.rs index 25ed518255540..dbbc52cf36235 100644 --- a/src/librustc_borrowck/borrowck/fragments.rs +++ b/src/librustc_borrowck/borrowck/fragments.rs @@ -124,12 +124,12 @@ pub fn instrument_move_fragments<'tcx>(this: &MoveData<'tcx>, let attrs : &[ast::Attribute]; attrs = match tcx.map.find(id) { Some(ast_map::NodeItem(ref item)) => - item.attrs.as_slice(), + item.attrs[], Some(ast_map::NodeImplItem(&ast::MethodImplItem(ref m))) => - m.attrs.as_slice(), + m.attrs[], Some(ast_map::NodeTraitItem(&ast::ProvidedMethod(ref m))) => - m.attrs.as_slice(), - _ => [].as_slice(), + m.attrs[], + _ => [][], }; let span_err = @@ -145,7 +145,7 @@ pub fn instrument_move_fragments<'tcx>(this: &MoveData<'tcx>, for (i, mpi) in vec_rc.iter().enumerate() { let render = || this.path_loan_path(*mpi).user_string(tcx); if span_err { - tcx.sess.span_err(sp, format!("{}: `{}`", kind, render()).as_slice()); + tcx.sess.span_err(sp, format!("{}: `{}`", kind, render())[]); } if print { println!("id:{} {}[{}] `{}`", id, kind, i, render()); @@ -157,7 +157,7 @@ pub fn instrument_move_fragments<'tcx>(this: &MoveData<'tcx>, for (i, f) in vec_rc.iter().enumerate() { let render = || f.loan_path_user_string(this, tcx); if span_err { - tcx.sess.span_err(sp, format!("{}: `{}`", kind, render()).as_slice()); + tcx.sess.span_err(sp, format!("{}: `{}`", kind, render())[]); } if print { println!("id:{} {}[{}] `{}`", id, kind, i, render()); @@ -199,11 +199,11 @@ pub fn fixup_fragment_sets<'tcx>(this: &MoveData<'tcx>, tcx: &ty::ctxt<'tcx>) { // First, filter out duplicates moved.sort(); moved.dedup(); - debug!("fragments 1 moved: {}", path_lps(moved.as_slice())); + debug!("fragments 1 moved: {}", path_lps(moved[])); assigned.sort(); assigned.dedup(); - debug!("fragments 1 assigned: {}", path_lps(assigned.as_slice())); + debug!("fragments 1 assigned: {}", path_lps(assigned[])); // Second, build parents from the moved and assigned. for m in moved.iter() { @@ -223,14 +223,14 @@ pub fn fixup_fragment_sets<'tcx>(this: &MoveData<'tcx>, tcx: &ty::ctxt<'tcx>) { parents.sort(); parents.dedup(); - debug!("fragments 2 parents: {}", path_lps(parents.as_slice())); + debug!("fragments 2 parents: {}", path_lps(parents[])); // Third, filter the moved and assigned fragments down to just the non-parents - moved.retain(|f| non_member(*f, parents.as_slice())); - debug!("fragments 3 moved: {}", path_lps(moved.as_slice())); + moved.retain(|f| non_member(*f, parents[])); + debug!("fragments 3 moved: {}", path_lps(moved[])); - assigned.retain(|f| non_member(*f, parents.as_slice())); - debug!("fragments 3 assigned: {}", path_lps(assigned.as_slice())); + assigned.retain(|f| non_member(*f, parents[])); + debug!("fragments 3 assigned: {}", path_lps(assigned[])); // Fourth, build the leftover from the moved, assigned, and parents. for m in moved.iter() { @@ -248,16 +248,16 @@ pub fn fixup_fragment_sets<'tcx>(this: &MoveData<'tcx>, tcx: &ty::ctxt<'tcx>) { unmoved.sort(); unmoved.dedup(); - debug!("fragments 4 unmoved: {}", frag_lps(unmoved.as_slice())); + debug!("fragments 4 unmoved: {}", frag_lps(unmoved[])); // Fifth, filter the leftover fragments down to its core. unmoved.retain(|f| match *f { AllButOneFrom(_) => true, - Just(mpi) => non_member(mpi, parents.as_slice()) && - non_member(mpi, moved.as_slice()) && - non_member(mpi, assigned.as_slice()) + Just(mpi) => non_member(mpi, parents[]) && + non_member(mpi, moved[]) && + non_member(mpi, assigned[]) }); - debug!("fragments 5 unmoved: {}", frag_lps(unmoved.as_slice())); + debug!("fragments 5 unmoved: {}", frag_lps(unmoved[])); // Swap contents back in. fragments.unmoved_fragments = unmoved; @@ -434,7 +434,7 @@ fn add_fragment_siblings_for_extension<'tcx>(this: &MoveData<'tcx>, let msg = format!("type {} ({}) is not fragmentable", parent_ty.repr(tcx), sty_and_variant_info); let opt_span = origin_id.and_then(|id|tcx.map.opt_span(id)); - tcx.sess.opt_span_bug(opt_span, msg.as_slice()) + tcx.sess.opt_span_bug(opt_span, msg[]) } } } diff --git a/src/librustc_borrowck/borrowck/gather_loans/mod.rs b/src/librustc_borrowck/borrowck/gather_loans/mod.rs index 08d12f8282bdf..d7f50ccc6ba3c 100644 --- a/src/librustc_borrowck/borrowck/gather_loans/mod.rs +++ b/src/librustc_borrowck/borrowck/gather_loans/mod.rs @@ -310,7 +310,7 @@ impl<'a, 'tcx> GatherLoanCtxt<'a, 'tcx> { self.tcx().sess.span_bug( cmt.span, format!("invalid borrow lifetime: {}", - loan_region).as_slice()); + loan_region)[]); } }; debug!("loan_scope = {}", loan_scope); diff --git a/src/librustc_borrowck/borrowck/gather_loans/move_error.rs b/src/librustc_borrowck/borrowck/gather_loans/move_error.rs index fbe78152a609e..73b345a70af46 100644 --- a/src/librustc_borrowck/borrowck/gather_loans/move_error.rs +++ b/src/librustc_borrowck/borrowck/gather_loans/move_error.rs @@ -120,7 +120,7 @@ fn report_cannot_move_out_of<'a, 'tcx>(bccx: &BorrowckCtxt<'a, 'tcx>, bccx.span_err( move_from.span, format!("cannot move out of {}", - bccx.cmt_to_string(&*move_from)).as_slice()); + bccx.cmt_to_string(&*move_from))[]); } mc::cat_downcast(ref b, _) | @@ -132,7 +132,7 @@ fn report_cannot_move_out_of<'a, 'tcx>(bccx: &BorrowckCtxt<'a, 'tcx>, move_from.span, format!("cannot move out of type `{}`, \ which defines the `Drop` trait", - b.ty.user_string(bccx.tcx)).as_slice()); + b.ty.user_string(bccx.tcx))[]); }, _ => panic!("this path should not cause illegal move") } @@ -155,10 +155,10 @@ fn note_move_destination(bccx: &BorrowckCtxt, format!("to prevent the move, \ use `ref {0}` or `ref mut {0}` to capture value by \ reference", - pat_name).as_slice()); + pat_name)[]); } else { bccx.span_note(move_to_span, format!("and here (use `ref {0}` or `ref mut {0}`)", - pat_name).as_slice()); + pat_name)[]); } } diff --git a/src/librustc_borrowck/borrowck/mod.rs b/src/librustc_borrowck/borrowck/mod.rs index 9be87b533f296..a13001b796850 100644 --- a/src/librustc_borrowck/borrowck/mod.rs +++ b/src/librustc_borrowck/borrowck/mod.rs @@ -146,7 +146,7 @@ fn borrowck_fn(this: &mut BorrowckCtxt, check_loans::check_loans(this, &loan_dfcx, flowed_moves, - all_loans.as_slice(), + all_loans[], id, decl, body); @@ -527,7 +527,7 @@ impl<'a, 'tcx> BorrowckCtxt<'a, 'tcx> { pub fn report(&self, err: BckError<'tcx>) { self.span_err( err.span, - self.bckerr_to_string(&err).as_slice()); + self.bckerr_to_string(&err)[]); self.note_and_explain_bckerr(err); } @@ -549,7 +549,7 @@ impl<'a, 'tcx> BorrowckCtxt<'a, 'tcx> { use_span, format!("{} of possibly uninitialized variable: `{}`", verb, - self.loan_path_to_string(lp)).as_slice()); + self.loan_path_to_string(lp))[]); (self.loan_path_to_string(moved_lp), String::new()) } @@ -591,7 +591,7 @@ impl<'a, 'tcx> BorrowckCtxt<'a, 'tcx> { format!("{} of {}moved value: `{}`", verb, msg, - nl).as_slice()); + nl)[]); (ol, moved_lp_msg) } }; @@ -610,7 +610,7 @@ impl<'a, 'tcx> BorrowckCtxt<'a, 'tcx> { self.tcx.sess.bug(format!("MoveExpr({}) maps to \ {}, not Expr", the_move.id, - r).as_slice()) + r)[]) } }; let (suggestion, _) = move_suggestion(self.tcx, param_env, expr_ty, @@ -621,7 +621,7 @@ impl<'a, 'tcx> BorrowckCtxt<'a, 'tcx> { ol, moved_lp_msg, expr_ty.user_string(self.tcx), - suggestion).as_slice()); + suggestion)[]); } move_data::MovePat => { @@ -632,7 +632,7 @@ impl<'a, 'tcx> BorrowckCtxt<'a, 'tcx> { which is moved by default", ol, moved_lp_msg, - pat_ty.user_string(self.tcx)).as_slice()); + pat_ty.user_string(self.tcx))[]); self.tcx.sess.span_help(span, "use `ref` to override"); } @@ -648,7 +648,7 @@ impl<'a, 'tcx> BorrowckCtxt<'a, 'tcx> { self.tcx.sess.bug(format!("Captured({}) maps to \ {}, not Expr", the_move.id, - r).as_slice()) + r)[]) } }; let (suggestion, help) = move_suggestion(self.tcx, @@ -663,7 +663,7 @@ impl<'a, 'tcx> BorrowckCtxt<'a, 'tcx> { ol, moved_lp_msg, expr_ty.user_string(self.tcx), - suggestion).as_slice()); + suggestion)[]); self.tcx.sess.span_help(expr_span, help); } } @@ -696,7 +696,7 @@ impl<'a, 'tcx> BorrowckCtxt<'a, 'tcx> { self.tcx.sess.span_err( span, format!("re-assignment of immutable variable `{}`", - self.loan_path_to_string(lp)).as_slice()); + self.loan_path_to_string(lp))[]); self.tcx.sess.span_note(assign.span, "prior assignment occurs here"); } @@ -822,12 +822,12 @@ impl<'a, 'tcx> BorrowckCtxt<'a, 'tcx> { self.tcx.sess.span_err( span, format!("{} in an aliasable location", - prefix).as_slice()); + prefix)[]); } mc::AliasableClosure(id) => { self.tcx.sess.span_err(span, format!("{} in a captured outer \ - variable in an `Fn` closure", prefix).as_slice()); + variable in an `Fn` closure", prefix)[]); span_help!(self.tcx.sess, self.tcx.map.span(id), "consider changing this closure to take self by mutable reference"); } @@ -835,12 +835,12 @@ impl<'a, 'tcx> BorrowckCtxt<'a, 'tcx> { mc::AliasableStaticMut(..) => { self.tcx.sess.span_err( span, - format!("{} in a static location", prefix).as_slice()); + format!("{} in a static location", prefix)[]); } mc::AliasableBorrowed => { self.tcx.sess.span_err( span, - format!("{} in a `&` reference", prefix).as_slice()); + format!("{} in a `&` reference", prefix)[]); } } @@ -908,12 +908,12 @@ impl<'a, 'tcx> BorrowckCtxt<'a, 'tcx> { note_and_explain_region( self.tcx, format!("{} would have to be valid for ", - descr).as_slice(), + descr)[], loan_scope, "..."); note_and_explain_region( self.tcx, - format!("...but {} is only valid for ", descr).as_slice(), + format!("...but {} is only valid for ", descr)[], ptr_scope, ""); } @@ -933,7 +933,7 @@ impl<'a, 'tcx> BorrowckCtxt<'a, 'tcx> { out.push('('); self.append_loan_path_to_string(&**lp_base, out); out.push_str(DOWNCAST_PRINTED_OPERATOR); - out.push_str(ty::item_path_str(self.tcx, variant_def_id).as_slice()); + out.push_str(ty::item_path_str(self.tcx, variant_def_id)[]); out.push(')'); } @@ -947,7 +947,7 @@ impl<'a, 'tcx> BorrowckCtxt<'a, 'tcx> { } mc::PositionalField(idx) => { out.push('.'); - out.push_str(idx.to_string().as_slice()); + out.push_str(idx.to_string()[]); } } } @@ -979,7 +979,7 @@ impl<'a, 'tcx> BorrowckCtxt<'a, 'tcx> { out.push('('); self.append_autoderefd_loan_path_to_string(&**lp_base, out); out.push(':'); - out.push_str(ty::item_path_str(self.tcx, variant_def_id).as_slice()); + out.push_str(ty::item_path_str(self.tcx, variant_def_id)[]); out.push(')'); } diff --git a/src/librustc_borrowck/borrowck/move_data.rs b/src/librustc_borrowck/borrowck/move_data.rs index d033fd808aa40..5d2faa52f1ad0 100644 --- a/src/librustc_borrowck/borrowck/move_data.rs +++ b/src/librustc_borrowck/borrowck/move_data.rs @@ -14,8 +14,6 @@ pub use self::MoveKind::*; use borrowck::*; -use borrowck::LoanPathKind::{LpVar, LpUpvar, LpDowncast, LpExtend}; -use borrowck::LoanPathElem::{LpInterior}; use rustc::middle::cfg; use rustc::middle::dataflow::DataFlowContext; use rustc::middle::dataflow::BitwiseOperator; diff --git a/src/librustc_borrowck/graphviz.rs b/src/librustc_borrowck/graphviz.rs index 3427be1443b3c..e2813c8e9882a 100644 --- a/src/librustc_borrowck/graphviz.rs +++ b/src/librustc_borrowck/graphviz.rs @@ -59,7 +59,7 @@ impl<'a, 'tcx> DataflowLabeller<'a, 'tcx> { if seen_one { sets.push_str(" "); } else { seen_one = true; } sets.push_str(variant.short_name()); sets.push_str(": "); - sets.push_str(self.dataflow_for_variant(e, n, variant).as_slice()); + sets.push_str(self.dataflow_for_variant(e, n, variant)[]); } sets } @@ -88,7 +88,7 @@ impl<'a, 'tcx> DataflowLabeller<'a, 'tcx> { set.push_str(", "); } let loan_str = self.borrowck_ctxt.loan_path_to_string(&*lp); - set.push_str(loan_str.as_slice()); + set.push_str(loan_str[]); saw_some = true; true }); diff --git a/src/librustc_borrowck/lib.rs b/src/librustc_borrowck/lib.rs index ffc5a3919b60f..e71e9e5dfea1b 100644 --- a/src/librustc_borrowck/lib.rs +++ b/src/librustc_borrowck/lib.rs @@ -16,7 +16,7 @@ html_favicon_url = "http://www.rust-lang.org/favicon.ico", html_root_url = "http://doc.rust-lang.org/nightly/")] -#![feature(default_type_params, globs, import_shadowing, macro_rules, phase, quote)] +#![feature(default_type_params, globs, macro_rules, phase, quote)] #![feature(slicing_syntax, unsafe_destructor)] #![feature(rustc_diagnostic_macros)] #![feature(unboxed_closures)] diff --git a/src/librustc_driver/driver.rs b/src/librustc_driver/driver.rs index 9ed4f46c16884..20bb9c2f4fd1c 100644 --- a/src/librustc_driver/driver.rs +++ b/src/librustc_driver/driver.rs @@ -20,6 +20,7 @@ use rustc::plugin::registry::Registry; use rustc::plugin; use rustc::util::common::time; use rustc_borrowck as borrowck; +use rustc_resolve as resolve; use rustc_trans::back::link; use rustc_trans::back::write; use rustc_trans::save; @@ -57,12 +58,12 @@ pub fn compile_input(sess: Session, let outputs = build_output_filenames(input, outdir, output, - krate.attrs.as_slice(), + krate.attrs[], &sess); - let id = link::find_crate_name(Some(&sess), krate.attrs.as_slice(), + let id = link::find_crate_name(Some(&sess), krate.attrs[], input); let expanded_crate - = match phase_2_configure_and_expand(&sess, krate, id.as_slice(), + = match phase_2_configure_and_expand(&sess, krate, id[], addl_plugins) { None => return, Some(k) => k @@ -74,7 +75,7 @@ pub fn compile_input(sess: Session, let mut forest = ast_map::Forest::new(expanded_crate); let ast_map = assign_node_ids_and_map(&sess, &mut forest); - write_out_deps(&sess, input, &outputs, id.as_slice()); + write_out_deps(&sess, input, &outputs, id[]); if stop_after_phase_2(&sess) { return; } @@ -162,9 +163,9 @@ pub fn phase_2_configure_and_expand(sess: &Session, let time_passes = sess.time_passes(); *sess.crate_types.borrow_mut() = - collect_crate_types(sess, krate.attrs.as_slice()); + collect_crate_types(sess, krate.attrs[]); *sess.crate_metadata.borrow_mut() = - collect_crate_metadata(sess, krate.attrs.as_slice()); + collect_crate_metadata(sess, krate.attrs[]); time(time_passes, "gated feature checking", (), |_| { let (features, unknown_features) = @@ -256,8 +257,8 @@ pub fn phase_2_configure_and_expand(sess: &Session, if cfg!(windows) { _old_path = os::getenv("PATH").unwrap_or(_old_path); let mut new_path = sess.host_filesearch().get_dylib_search_paths(); - new_path.extend(os::split_paths(_old_path.as_slice()).into_iter()); - os::setenv("PATH", os::join_paths(new_path.as_slice()).unwrap()); + new_path.extend(os::split_paths(_old_path[]).into_iter()); + os::setenv("PATH", os::join_paths(new_path[]).unwrap()); } let cfg = syntax::ext::expand::ExpansionConfig { crate_name: crate_name.to_string(), @@ -341,17 +342,17 @@ pub fn phase_3_run_analysis_passes<'tcx>(sess: Session, let lang_items = time(time_passes, "language item collection", (), |_| middle::lang_items::collect_language_items(krate, &sess)); - let middle::resolve::CrateMap { + let resolve::CrateMap { def_map, freevars, capture_mode_map, - exp_map2, + export_map, trait_map, external_exports, last_private_map } = - time(time_passes, "resolution", (), |_| - middle::resolve::resolve_crate(&sess, &lang_items, krate)); + time(time_passes, "resolution", (), + |_| resolve::resolve_crate(&sess, &lang_items, krate)); // Discard MTWT tables that aren't required past resolution. syntax::ext::mtwt::clear_tables(); @@ -406,7 +407,7 @@ pub fn phase_3_run_analysis_passes<'tcx>(sess: Session, let maps = (external_exports, last_private_map); let (exported_items, public_items) = time(time_passes, "privacy checking", maps, |(a, b)| - middle::privacy::check_crate(&ty_cx, &exp_map2, a, b)); + middle::privacy::check_crate(&ty_cx, &export_map, a, b)); time(time_passes, "intrinsic checking", (), |_| middle::intrinsicck::check_crate(&ty_cx)); @@ -447,7 +448,7 @@ pub fn phase_3_run_analysis_passes<'tcx>(sess: Session, lint::check_crate(&ty_cx, &exported_items)); ty::CrateAnalysis { - exp_map2: exp_map2, + export_map: export_map, ty_cx: ty_cx, exported_items: exported_items, public_items: public_items, @@ -502,7 +503,7 @@ pub fn phase_5_run_llvm_passes(sess: &Session, time(sess.time_passes(), "LLVM passes", (), |_| write::run_passes(sess, trans, - sess.opts.output_types.as_slice(), + sess.opts.output_types[], outputs)); } @@ -516,14 +517,14 @@ pub fn phase_6_link_output(sess: &Session, outputs: &OutputFilenames) { let old_path = os::getenv("PATH").unwrap_or_else(||String::new()); let mut new_path = sess.host_filesearch().get_tools_search_paths(); - new_path.extend(os::split_paths(old_path.as_slice()).into_iter()); - os::setenv("PATH", os::join_paths(new_path.as_slice()).unwrap()); + new_path.extend(os::split_paths(old_path[]).into_iter()); + os::setenv("PATH", os::join_paths(new_path[]).unwrap()); time(sess.time_passes(), "linking", (), |_| link::link_binary(sess, trans, outputs, - trans.link.crate_name.as_slice())); + trans.link.crate_name[])); os::setenv("PATH", old_path); } @@ -612,7 +613,7 @@ fn write_out_deps(sess: &Session, // write Makefile-compatible dependency rules let files: Vec = sess.codemap().files.borrow() .iter().filter(|fmap| fmap.is_real_file()) - .map(|fmap| escape_dep_filename(fmap.name.as_slice())) + .map(|fmap| escape_dep_filename(fmap.name[])) .collect(); let mut file = try!(io::File::create(&deps_filename)); for path in out_filenames.iter() { @@ -626,7 +627,7 @@ fn write_out_deps(sess: &Session, Ok(()) => {} Err(e) => { sess.fatal(format!("error writing dependencies to `{}`: {}", - deps_filename.display(), e).as_slice()); + deps_filename.display(), e)[]); } } } @@ -697,7 +698,7 @@ pub fn collect_crate_types(session: &Session, if !res { session.warn(format!("dropping unsupported crate type `{}` \ for target `{}`", - *crate_type, session.opts.target_triple).as_slice()); + *crate_type, session.opts.target_triple)[]); } res diff --git a/src/librustc_driver/lib.rs b/src/librustc_driver/lib.rs index 120654678e96e..e2791aff14e49 100644 --- a/src/librustc_driver/lib.rs +++ b/src/librustc_driver/lib.rs @@ -22,7 +22,7 @@ html_favicon_url = "http://www.rust-lang.org/favicon.ico", html_root_url = "http://doc.rust-lang.org/nightly/")] -#![feature(default_type_params, globs, import_shadowing, macro_rules, phase, quote)] +#![feature(default_type_params, globs, macro_rules, phase, quote)] #![feature(slicing_syntax, unsafe_destructor)] #![feature(rustc_diagnostic_macros)] #![feature(unboxed_closures)] @@ -35,6 +35,7 @@ extern crate libc; extern crate rustc; extern crate rustc_back; extern crate rustc_borrowck; +extern crate rustc_resolve; extern crate rustc_trans; extern crate rustc_typeck; #[phase(plugin, link)] extern crate log; @@ -46,7 +47,7 @@ pub use syntax::diagnostic; use rustc_trans::back::link; use rustc::session::{config, Session, build_session}; -use rustc::session::config::Input; +use rustc::session::config::{Input, PrintRequest}; use rustc::lint::Lint; use rustc::lint; use rustc::metadata; @@ -54,6 +55,7 @@ use rustc::DIAGNOSTICS; use std::any::AnyRefExt; use std::io; +use std::iter::repeat; use std::os; use std::thread; @@ -87,12 +89,12 @@ fn run_compiler(args: &[String]) { let descriptions = diagnostics::registry::Registry::new(&DIAGNOSTICS); match matches.opt_str("explain") { Some(ref code) => { - match descriptions.find_description(code.as_slice()) { + match descriptions.find_description(code[]) { Some(ref description) => { println!("{}", description); } None => { - early_error(format!("no extended information for {}", code).as_slice()); + early_error(format!("no extended information for {}", code)[]); } } return; @@ -101,6 +103,8 @@ fn run_compiler(args: &[String]) { } let sopts = config::build_session_options(&matches); + let odir = matches.opt_str("out-dir").map(|o| Path::new(o)); + let ofile = matches.opt_str("o").map(|o| Path::new(o)); let (input, input_file_path) = match matches.free.len() { 0u => { if sopts.describe_lints { @@ -109,17 +113,14 @@ fn run_compiler(args: &[String]) { describe_lints(&ls, false); return; } - let sess = build_session(sopts, None, descriptions); - if sess.debugging_opt(config::PRINT_SYSROOT) { - println!("{}", sess.sysroot().display()); + if print_crate_info(&sess, None, &odir, &ofile) { return; } - early_error("no input filename given"); } 1u => { - let ifile = matches.free[0].as_slice(); + let ifile = matches.free[0][]; if ifile == "-" { let contents = io::stdin().read_to_end().unwrap(); let src = String::from_utf8(contents).unwrap(); @@ -133,13 +134,25 @@ fn run_compiler(args: &[String]) { let sess = build_session(sopts, input_file_path, descriptions); let cfg = config::build_configuration(&sess); - let odir = matches.opt_str("out-dir").map(|o| Path::new(o)); - let ofile = matches.opt_str("o").map(|o| Path::new(o)); + if print_crate_info(&sess, Some(&input), &odir, &ofile) { + return + } let pretty = matches.opt_default("pretty", "normal").map(|a| { - pretty::parse_pretty(&sess, a.as_slice()) + // stable pretty-print variants only + pretty::parse_pretty(&sess, a.as_slice(), false) }); - match pretty { + let pretty = if pretty.is_none() && + sess.debugging_opt(config::UNSTABLE_OPTIONS) { + matches.opt_str("xpretty").map(|a| { + // extended with unstable pretty-print variants + pretty::parse_pretty(&sess, a.as_slice(), true) + }) + } else { + pretty + }; + + match pretty.into_iter().next() { Some((ppm, opt_uii)) => { pretty::pretty_print_input(sess, cfg, &input, ppm, opt_uii, ofile); return; @@ -161,10 +174,6 @@ fn run_compiler(args: &[String]) { return; } - if print_crate_info(&sess, &input, &odir, &ofile) { - return; - } - driver::compile_input(sess, cfg, &input, &odir, &ofile, None); } @@ -185,12 +194,8 @@ pub fn commit_date_str() -> Option<&'static str> { /// Prints version information and returns None on success or an error /// message on panic. -pub fn version(binary: &str, matches: &getopts::Matches) -> Option { - let verbose = match matches.opt_str("version").as_ref().map(|s| s.as_slice()) { - None => false, - Some("verbose") => true, - Some(s) => return Some(format!("Unrecognized argument: {}", s)) - }; +pub fn version(binary: &str, matches: &getopts::Matches) { + let verbose = matches.opt_present("verbose"); println!("{} {}", binary, option_env!("CFG_VERSION").unwrap_or("unknown version")); if verbose { @@ -201,18 +206,31 @@ pub fn version(binary: &str, matches: &getopts::Matches) -> Option { println!("host: {}", config::host_triple()); println!("release: {}", unw(release_str())); } - None } -fn usage() { +fn usage(verbose: bool, include_unstable_options: bool) { + let groups = if verbose { + config::rustc_optgroups() + } else { + config::rustc_short_optgroups() + }; + let groups : Vec<_> = groups.into_iter() + .filter(|x| include_unstable_options || x.is_stable()) + .map(|x|x.opt_group) + .collect(); let message = format!("Usage: rustc [OPTIONS] INPUT"); + let extra_help = if verbose { + "" + } else { + "\n --help -v Print the full set of options rustc accepts" + }; println!("{}\n\ Additional help: -C help Print codegen options -W help Print 'lint' options and default settings - -Z help Print internal options for debugging rustc\n", - getopts::usage(message.as_slice(), - config::optgroups().as_slice())); + -Z help Print internal options for debugging rustc{}\n", + getopts::usage(message.as_slice(), groups.as_slice()), + extra_help); } fn describe_lints(lint_store: &lint::LintStore, loaded_plugins: bool) { @@ -259,7 +277,8 @@ Available lint options: .map(|&s| s.name.width(true)) .max().unwrap_or(0); let padded = |x: &str| { - let mut s = " ".repeat(max_name_len - x.char_len()); + let mut s = repeat(" ").take(max_name_len - x.chars().count()) + .collect::(); s.push_str(x); s }; @@ -272,7 +291,7 @@ Available lint options: for lint in lints.into_iter() { let name = lint.name_lower().replace("_", "-"); println!(" {} {:7.7} {}", - padded(name.as_slice()), lint.default_level.as_str(), lint.desc); + padded(name[]), lint.default_level.as_str(), lint.desc); } println!("\n"); }; @@ -285,7 +304,8 @@ Available lint options: .map(|&(s, _)| s.width(true)) .max().unwrap_or(0); let padded = |x: &str| { - let mut s = " ".repeat(max_name_len - x.char_len()); + let mut s = repeat(" ").take(max_name_len - x.chars().count()) + .collect::(); s.push_str(x); s }; @@ -301,7 +321,7 @@ Available lint options: let desc = to.into_iter().map(|x| x.as_str().replace("_", "-")) .collect::>().connect(", "); println!(" {} {}", - padded(name.as_slice()), desc); + padded(name[]), desc); } println!("\n"); }; @@ -360,20 +380,45 @@ pub fn handle_options(mut args: Vec) -> Option { let _binary = args.remove(0).unwrap(); if args.is_empty() { - usage(); + // user did not write `-v` nor `-Z unstable-options`, so do not + // include that extra information. + usage(false, false); return None; } let matches = - match getopts::getopts(args.as_slice(), config::optgroups().as_slice()) { + match getopts::getopts(args[], config::optgroups()[]) { Ok(m) => m, - Err(f) => { - early_error(f.to_string().as_slice()); + Err(f_stable_attempt) => { + // redo option parsing, including unstable options this time, + // in anticipation that the mishandled option was one of the + // unstable ones. + let all_groups : Vec + = config::rustc_optgroups().into_iter().map(|x|x.opt_group).collect(); + match getopts::getopts(args.as_slice(), all_groups.as_slice()) { + Ok(m_unstable) => { + let r = m_unstable.opt_strs("Z"); + let include_unstable_options = r.iter().any(|x| *x == "unstable-options"); + if include_unstable_options { + m_unstable + } else { + early_error(f_stable_attempt.to_string().as_slice()); + } + } + Err(_) => { + // ignore the error from the unstable attempt; just + // pass the error we got from the first try. + early_error(f_stable_attempt.to_string().as_slice()); + } + } } }; + let r = matches.opt_strs("Z"); + let include_unstable_options = r.iter().any(|x| *x == "unstable-options"); + if matches.opt_present("h") || matches.opt_present("help") { - usage(); + usage(matches.opt_present("verbose"), include_unstable_options); return None; } @@ -397,49 +442,55 @@ pub fn handle_options(mut args: Vec) -> Option { } if matches.opt_present("version") { - match version("rustc", &matches) { - Some(err) => early_error(err.as_slice()), - None => return None - } + version("rustc", &matches); + return None; } Some(matches) } fn print_crate_info(sess: &Session, - input: &Input, + input: Option<&Input>, odir: &Option, ofile: &Option) -> bool { - let (crate_name, crate_file_name) = sess.opts.print_metas; - // these nasty nested conditions are to avoid doing extra work - if crate_name || crate_file_name { - let attrs = parse_crate_attrs(sess, input); - let t_outputs = driver::build_output_filenames(input, - odir, - ofile, - attrs.as_slice(), - sess); - let id = link::find_crate_name(Some(sess), attrs.as_slice(), input); - - if crate_name { - println!("{}", id); - } - if crate_file_name { - let crate_types = driver::collect_crate_types(sess, attrs.as_slice()); - let metadata = driver::collect_crate_metadata(sess, attrs.as_slice()); - *sess.crate_metadata.borrow_mut() = metadata; - for &style in crate_types.iter() { - let fname = link::filename_for_input(sess, style, id.as_slice(), - &t_outputs.with_extension("")); - println!("{}", fname.filename_display()); + if sess.opts.prints.len() == 0 { return false } + + let attrs = input.map(|input| parse_crate_attrs(sess, input)); + for req in sess.opts.prints.iter() { + match *req { + PrintRequest::Sysroot => println!("{}", sess.sysroot().display()), + PrintRequest::FileNames | + PrintRequest::CrateName => { + let input = match input { + Some(input) => input, + None => early_error("no input file provided"), + }; + let attrs = attrs.as_ref().unwrap().as_slice(); + let t_outputs = driver::build_output_filenames(input, + odir, + ofile, + attrs, + sess); + let id = link::find_crate_name(Some(sess), attrs.as_slice(), + input); + if *req == PrintRequest::CrateName { + println!("{}", id); + continue + } + let crate_types = driver::collect_crate_types(sess, attrs); + let metadata = driver::collect_crate_metadata(sess, attrs); + *sess.crate_metadata.borrow_mut() = metadata; + for &style in crate_types.iter() { + let fname = link::filename_for_input(sess, style, + id.as_slice(), + &t_outputs.with_extension("")); + println!("{}", fname.filename_display()); + } } } - - true - } else { - false } + return true; } fn parse_crate_attrs(sess: &Session, input: &Input) -> @@ -472,7 +523,7 @@ pub fn list_metadata(sess: &Session, path: &Path, /// The diagnostic emitter yielded to the procedure should be used for reporting /// errors of the compiler. pub fn monitor(f: F) { - static STACK_SIZE: uint = 32000000; // 32MB + static STACK_SIZE: uint = 8 * 1024 * 1024; // 8MB let (tx, rx) = channel(); let w = io::ChanWriter::new(tx); @@ -510,7 +561,7 @@ pub fn monitor(f: F) { "run with `RUST_BACKTRACE=1` for a backtrace".to_string(), ]; for note in xs.iter() { - emitter.emit(None, note.as_slice(), None, diagnostic::Note) + emitter.emit(None, note[], None, diagnostic::Note) } match r.read_to_string() { @@ -518,8 +569,7 @@ pub fn monitor(f: F) { Err(e) => { emitter.emit(None, format!("failed to read internal \ - stderr: {}", - e).as_slice(), + stderr: {}", e)[], None, diagnostic::Error) } diff --git a/src/librustc_driver/pretty.rs b/src/librustc_driver/pretty.rs index 2eb9d2c67a7cb..9334c648a1f70 100644 --- a/src/librustc_driver/pretty.rs +++ b/src/librustc_driver/pretty.rs @@ -30,7 +30,10 @@ use rustc_borrowck::graphviz as borrowck_dot; use syntax::ast; use syntax::ast_map::{mod, blocks, NodePrinter}; +use syntax::codemap; +use syntax::fold::{mod, Folder}; use syntax::print::{pp, pprust}; +use syntax::ptr::P; use graphviz as dot; @@ -42,6 +45,7 @@ use arena::TypedArena; #[deriving(Copy, PartialEq, Show)] pub enum PpSourceMode { PpmNormal, + PpmEveryBodyLoops, PpmExpanded, PpmTyped, PpmIdentified, @@ -55,26 +59,36 @@ pub enum PpMode { PpmFlowGraph, } -pub fn parse_pretty(sess: &Session, name: &str) -> (PpMode, Option) { +pub fn parse_pretty(sess: &Session, + name: &str, + extended: bool) -> (PpMode, Option) { let mut split = name.splitn(1, '='); let first = split.next().unwrap(); let opt_second = split.next(); - let first = match first { - "normal" => PpmSource(PpmNormal), - "expanded" => PpmSource(PpmExpanded), - "typed" => PpmSource(PpmTyped), - "expanded,identified" => PpmSource(PpmExpandedIdentified), - "expanded,hygiene" => PpmSource(PpmExpandedHygiene), - "identified" => PpmSource(PpmIdentified), - "flowgraph" => PpmFlowGraph, + let first = match (first, extended) { + ("normal", _) => PpmSource(PpmNormal), + ("everybody_loops", true) => PpmSource(PpmEveryBodyLoops), + ("expanded", _) => PpmSource(PpmExpanded), + ("typed", _) => PpmSource(PpmTyped), + ("expanded,identified", _) => PpmSource(PpmExpandedIdentified), + ("expanded,hygiene", _) => PpmSource(PpmExpandedHygiene), + ("identified", _) => PpmSource(PpmIdentified), + ("flowgraph", true) => PpmFlowGraph, _ => { - sess.fatal(format!( - "argument to `pretty` must be one of `normal`, \ - `expanded`, `flowgraph=`, `typed`, `identified`, \ - or `expanded,identified`; got {}", name).as_slice()); + if extended { + sess.fatal(format!( + "argument to `xpretty` must be one of `normal`, \ + `expanded`, `flowgraph=`, `typed`, `identified`, \ + `expanded,identified`, or `everybody_loops`; got {}", name).as_slice()); + } else { + sess.fatal(format!( + "argument to `pretty` must be one of `normal`, \ + `expanded`, `typed`, `identified`, \ + or `expanded,identified`; got {}", name).as_slice()); + } } }; - let opt_second = opt_second.and_then::(from_str); + let opt_second = opt_second.and_then(|s| s.parse::()); (first, opt_second) } @@ -105,7 +119,7 @@ impl PpSourceMode { F: FnOnce(&PrinterSupport, B) -> A, { match *self { - PpmNormal | PpmExpanded => { + PpmNormal | PpmEveryBodyLoops | PpmExpanded => { let annotation = NoAnn { sess: sess, ast_map: ast_map }; f(&annotation, payload) } @@ -276,7 +290,7 @@ impl<'tcx> pprust::PpAnn for TypedAnnotation<'tcx> { try!(pp::word(&mut s.s, ppaux::ty_to_string( tcx, - ty::expr_ty(tcx, expr)).as_slice())); + ty::expr_ty(tcx, expr))[])); s.pclose() } _ => Ok(()) @@ -311,7 +325,7 @@ pub enum UserIdentifiedItem { impl FromStr for UserIdentifiedItem { fn from_str(s: &str) -> Option { - from_str(s).map(ItemViaNode).or_else(|| { + s.parse().map(ItemViaNode).or_else(|| { let v : Vec<_> = s.split_str("::") .map(|x|x.to_string()) .collect(); @@ -322,7 +336,7 @@ impl FromStr for UserIdentifiedItem { enum NodesMatchingUII<'a, 'ast: 'a> { NodesMatchingDirect(option::IntoIter), - NodesMatchingSuffix(ast_map::NodesMatchingSuffix<'a, 'ast, String>), + NodesMatchingSuffix(ast_map::NodesMatchingSuffix<'a, 'ast>), } impl<'a, 'ast> Iterator for NodesMatchingUII<'a, 'ast> { @@ -348,7 +362,7 @@ impl UserIdentifiedItem { ItemViaNode(node_id) => NodesMatchingDirect(Some(node_id).into_iter()), ItemViaPath(ref parts) => - NodesMatchingSuffix(map.nodes_matching_suffix(parts.as_slice())), + NodesMatchingSuffix(map.nodes_matching_suffix(parts[])), } } @@ -360,7 +374,7 @@ impl UserIdentifiedItem { user_option, self.reconstructed_input(), is_wrong_because); - sess.fatal(message.as_slice()) + sess.fatal(message[]) }; let mut saw_node = ast::DUMMY_NODE_ID; @@ -384,6 +398,7 @@ impl UserIdentifiedItem { fn needs_ast_map(ppm: &PpMode, opt_uii: &Option) -> bool { match *ppm { PpmSource(PpmNormal) | + PpmSource(PpmEveryBodyLoops) | PpmSource(PpmIdentified) => opt_uii.is_some(), PpmSource(PpmExpanded) | @@ -397,6 +412,7 @@ fn needs_ast_map(ppm: &PpMode, opt_uii: &Option) -> bool { fn needs_expansion(ppm: &PpMode) -> bool { match *ppm { PpmSource(PpmNormal) | + PpmSource(PpmEveryBodyLoops) | PpmSource(PpmIdentified) => false, PpmSource(PpmExpanded) | @@ -407,6 +423,64 @@ fn needs_expansion(ppm: &PpMode) -> bool { } } +struct ReplaceBodyWithLoop { + within_static_or_const: bool, +} + +impl ReplaceBodyWithLoop { + fn new() -> ReplaceBodyWithLoop { + ReplaceBodyWithLoop { within_static_or_const: false } + } +} + +impl fold::Folder for ReplaceBodyWithLoop { + fn fold_item_underscore(&mut self, i: ast::Item_) -> ast::Item_ { + match i { + ast::ItemStatic(..) | ast::ItemConst(..) => { + self.within_static_or_const = true; + let ret = fold::noop_fold_item_underscore(i, self); + self.within_static_or_const = false; + return ret; + } + _ => { + fold::noop_fold_item_underscore(i, self) + } + } + } + + + fn fold_block(&mut self, b: P) -> P { + fn expr_to_block(rules: ast::BlockCheckMode, + e: Option>) -> P { + P(ast::Block { + expr: e, + view_items: vec![], stmts: vec![], rules: rules, + id: ast::DUMMY_NODE_ID, span: codemap::DUMMY_SP, + }) + } + + if !self.within_static_or_const { + + let empty_block = expr_to_block(ast::DefaultBlock, None); + let loop_expr = P(ast::Expr { + node: ast::ExprLoop(empty_block, None), + id: ast::DUMMY_NODE_ID, span: codemap::DUMMY_SP + }); + + expr_to_block(b.rules, Some(loop_expr)) + + } else { + fold::noop_fold_block(b, self) + } + } + + // in general the pretty printer processes unexpanded code, so + // we override the default `fold_mac` method which panics. + fn fold_mac(&mut self, _macro: ast::Mac) -> ast::Mac { + fold::noop_fold_mac(_macro, self) + } +} + pub fn pretty_print_input(sess: Session, cfg: ast::CrateConfig, input: &Input, @@ -414,12 +488,20 @@ pub fn pretty_print_input(sess: Session, opt_uii: Option, ofile: Option) { let krate = driver::phase_1_parse_input(&sess, cfg, input); + + let krate = if let PpmSource(PpmEveryBodyLoops) = ppm { + let mut fold = ReplaceBodyWithLoop::new(); + fold.fold_crate(krate) + } else { + krate + }; + let id = link::find_crate_name(Some(&sess), krate.attrs.as_slice(), input); let is_expanded = needs_expansion(&ppm); let compute_ast_map = needs_ast_map(&ppm, &opt_uii); let krate = if compute_ast_map { - match driver::phase_2_configure_and_expand(&sess, krate, id.as_slice(), None) { + match driver::phase_2_configure_and_expand(&sess, krate, id[], None) { None => return, Some(k) => k } @@ -438,7 +520,7 @@ pub fn pretty_print_input(sess: Session, }; let src_name = driver::source_name(input); - let src = sess.codemap().get_filemap(src_name.as_slice()) + let src = sess.codemap().get_filemap(src_name[]) .src.as_bytes().to_vec(); let mut rdr = MemReader::new(src); @@ -499,7 +581,7 @@ pub fn pretty_print_input(sess: Session, debug!("pretty printing flow graph for {}", opt_uii); let uii = opt_uii.unwrap_or_else(|| { sess.fatal(format!("`pretty flowgraph=..` needs NodeId (int) or - unique path suffix (b::c::d)").as_slice()) + unique path suffix (b::c::d)")[]) }); let ast_map = ast_map.expect("--pretty flowgraph missing ast_map"); @@ -507,7 +589,7 @@ pub fn pretty_print_input(sess: Session, let node = ast_map.find(nodeid).unwrap_or_else(|| { sess.fatal(format!("--pretty flowgraph couldn't find id: {}", - nodeid).as_slice()) + nodeid)[]) }); let code = blocks::Code::from_node(node); @@ -526,8 +608,8 @@ pub fn pretty_print_input(sess: Session, // point to what was found, if there's an // accessible span. match ast_map.opt_span(nodeid) { - Some(sp) => sess.span_fatal(sp, message.as_slice()), - None => sess.fatal(message.as_slice()) + Some(sp) => sess.span_fatal(sp, message[]), + None => sess.fatal(message[]) } } } @@ -587,7 +669,7 @@ fn print_flowgraph(variants: Vec, let m = "graphviz::render failed"; io::IoError { detail: Some(match orig_detail { - None => m.into_string(), + None => m.to_string(), Some(d) => format!("{}: {}", m, d) }), ..ioerr diff --git a/src/librustc_driver/test.rs b/src/librustc_driver/test.rs index b2c661cc58aa4..526bbca8d70dd 100644 --- a/src/librustc_driver/test.rs +++ b/src/librustc_driver/test.rs @@ -13,9 +13,9 @@ use diagnostic; use diagnostic::Emitter; use driver; +use rustc_resolve as resolve; use rustc_typeck::middle::lang_items; use rustc_typeck::middle::region::{mod, CodeExtent}; -use rustc_typeck::middle::resolve; use rustc_typeck::middle::resolve_lifetime; use rustc_typeck::middle::stability; use rustc_typeck::middle::subst; @@ -253,7 +253,18 @@ impl<'a, 'tcx> Env<'a, 'tcx> { output_ty: Ty<'tcx>) -> Ty<'tcx> { - ty::mk_ctor_fn(self.infcx.tcx, input_tys, output_ty) + let input_args = input_tys.iter().map(|ty| *ty).collect(); + ty::mk_bare_fn(self.infcx.tcx, + None, + ty::BareFnTy { + unsafety: ast::Unsafety::Normal, + abi: abi::Rust, + sig: ty::Binder(ty::FnSig { + inputs: input_args, + output: ty::FnConverging(output_ty), + variadic: false + }) + }) } pub fn t_nil(&self) -> Ty<'tcx> { diff --git a/src/librustc_resolve/check_unused.rs b/src/librustc_resolve/check_unused.rs new file mode 100644 index 0000000000000..39cdf6fc8f318 --- /dev/null +++ b/src/librustc_resolve/check_unused.rs @@ -0,0 +1,161 @@ +// Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + + +// +// Unused import checking +// +// Although this is mostly a lint pass, it lives in here because it depends on +// resolve data structures and because it finalises the privacy information for +// `use` directives. +// + +use Resolver; +use Namespace::{TypeNS, ValueNS}; + +use rustc::lint; +use rustc::middle::privacy::{DependsOn, LastImport, Used, Unused}; +use syntax::ast; +use syntax::ast::{ViewItem, ViewItemExternCrate, ViewItemUse}; +use syntax::ast::{ViewPathGlob, ViewPathList, ViewPathSimple}; +use syntax::codemap::{Span, DUMMY_SP}; +use syntax::visit::{mod, Visitor}; + +struct UnusedImportCheckVisitor<'a, 'b:'a> { + resolver: &'a mut Resolver<'b> +} + +// Deref and DerefMut impls allow treating UnusedImportCheckVisitor as Resolver. +impl<'a, 'b> Deref> for UnusedImportCheckVisitor<'a, 'b> { + fn deref<'c>(&'c self) -> &'c Resolver<'b> { + &*self.resolver + } +} + +impl<'a, 'b> DerefMut> for UnusedImportCheckVisitor<'a, 'b> { + fn deref_mut<'c>(&'c mut self) -> &'c mut Resolver<'b> { + &mut *self.resolver + } +} + +impl<'a, 'b> UnusedImportCheckVisitor<'a, 'b> { + // We have information about whether `use` (import) directives are actually used now. + // If an import is not used at all, we signal a lint error. If an import is only used + // for a single namespace, we remove the other namespace from the recorded privacy + // information. That means in privacy.rs, we will only check imports and namespaces + // which are used. In particular, this means that if an import could name either a + // public or private item, we will check the correct thing, dependent on how the import + // is used. + fn finalize_import(&mut self, id: ast::NodeId, span: Span) { + debug!("finalizing import uses for {}", + self.session.codemap().span_to_snippet(span)); + + if !self.used_imports.contains(&(id, TypeNS)) && + !self.used_imports.contains(&(id, ValueNS)) { + self.session.add_lint(lint::builtin::UNUSED_IMPORTS, + id, + span, + "unused import".to_string()); + } + + let (v_priv, t_priv) = match self.last_private.get(&id) { + Some(&LastImport { + value_priv: v, + value_used: _, + type_priv: t, + type_used: _ + }) => (v, t), + Some(_) => { + panic!("we should only have LastImport for `use` directives") + } + _ => return, + }; + + let mut v_used = if self.used_imports.contains(&(id, ValueNS)) { + Used + } else { + Unused + }; + let t_used = if self.used_imports.contains(&(id, TypeNS)) { + Used + } else { + Unused + }; + + match (v_priv, t_priv) { + // Since some items may be both in the value _and_ type namespaces (e.g., structs) + // we might have two LastPrivates pointing at the same thing. There is no point + // checking both, so lets not check the value one. + (Some(DependsOn(def_v)), Some(DependsOn(def_t))) if def_v == def_t => v_used = Unused, + _ => {}, + } + + self.last_private.insert(id, LastImport{value_priv: v_priv, + value_used: v_used, + type_priv: t_priv, + type_used: t_used}); + } +} + +impl<'a, 'b, 'v> Visitor<'v> for UnusedImportCheckVisitor<'a, 'b> { + fn visit_view_item(&mut self, vi: &ViewItem) { + // Ignore is_public import statements because there's no way to be sure + // whether they're used or not. Also ignore imports with a dummy span + // because this means that they were generated in some fashion by the + // compiler and we don't need to consider them. + if vi.vis == ast::Public || vi.span == DUMMY_SP { + visit::walk_view_item(self, vi); + return; + } + + match vi.node { + ViewItemExternCrate(_, _, id) => { + if let Some(crate_num) = self.session.cstore.find_extern_mod_stmt_cnum(id) { + if !self.used_crates.contains(&crate_num) { + self.session.add_lint(lint::builtin::UNUSED_EXTERN_CRATES, + id, + vi.span, + "unused extern crate".to_string()); + } + } + }, + ViewItemUse(ref p) => { + match p.node { + ViewPathSimple(_, _, id) => { + self.finalize_import(id, p.span) + } + + ViewPathList(_, ref list, _) => { + for i in list.iter() { + self.finalize_import(i.node.id(), i.span); + } + } + ViewPathGlob(_, id) => { + if !self.used_imports.contains(&(id, TypeNS)) && + !self.used_imports.contains(&(id, ValueNS)) { + self.session + .add_lint(lint::builtin::UNUSED_IMPORTS, + id, + p.span, + "unused import".to_string()); + } + } + } + } + } + + visit::walk_view_item(self, vi); + } +} + +pub fn check_crate(resolver: &mut Resolver, krate: &ast::Crate) { + let mut visitor = UnusedImportCheckVisitor { resolver: resolver }; + visit::walk_crate(&mut visitor, krate); +} diff --git a/src/librustc/middle/resolve.rs b/src/librustc_resolve/lib.rs similarity index 93% rename from src/librustc/middle/resolve.rs rename to src/librustc_resolve/lib.rs index e1e376c537cba..bf9e9294307ef 100644 --- a/src/librustc/middle/resolve.rs +++ b/src/librustc_resolve/lib.rs @@ -8,12 +8,22 @@ // option. This file may not be copied, modified, or distributed // except according to those terms. -#![allow(non_camel_case_types)] +#![crate_name = "rustc_resolve"] +#![experimental] +#![crate_type = "dylib"] +#![crate_type = "rlib"] +#![doc(html_logo_url = "http://www.rust-lang.org/logos/rust-logo-128x128-blk-v2.png", + html_favicon_url = "http://www.rust-lang.org/favicon.ico", + html_root_url = "http://doc.rust-lang.org/nightly/")] + +#![feature(globs, phase, slicing_syntax)] +#![feature(rustc_diagnostic_macros)] + +#[phase(plugin, link)] extern crate log; +#[phase(plugin, link)] extern crate syntax; + +extern crate rustc; -pub use self::PrivateDep::*; -pub use self::ImportUse::*; -pub use self::TraitItemKind::*; -pub use self::LastPrivate::*; use self::PatternBindingMode::*; use self::Namespace::*; use self::NamespaceError::*; @@ -36,26 +46,27 @@ use self::ModuleKind::*; use self::TraitReferenceType::*; use self::FallbackChecks::*; -use session::Session; -use lint; -use metadata::csearch; -use metadata::decoder::{DefLike, DlDef, DlField, DlImpl}; -use middle::def::*; -use middle::lang_items::LanguageItems; -use middle::pat_util::pat_bindings; -use middle::subst::{ParamSpace, FnSpace, TypeSpace}; -use middle::ty::{ExplicitSelfCategory, StaticExplicitSelfCategory}; -use middle::ty::{CaptureModeMap, Freevar, FreevarMap}; -use util::nodemap::{NodeMap, NodeSet, DefIdSet, FnvHashMap}; +use rustc::session::Session; +use rustc::lint; +use rustc::metadata::csearch; +use rustc::metadata::decoder::{DefLike, DlDef, DlField, DlImpl}; +use rustc::middle::def::*; +use rustc::middle::lang_items::LanguageItems; +use rustc::middle::pat_util::pat_bindings; +use rustc::middle::privacy::*; +use rustc::middle::subst::{ParamSpace, FnSpace, TypeSpace}; +use rustc::middle::ty::{CaptureModeMap, Freevar, FreevarMap, TraitMap}; +use rustc::util::nodemap::{NodeMap, NodeSet, DefIdSet, FnvHashMap}; +use rustc::util::lev_distance::lev_distance; use syntax::ast::{Arm, BindByRef, BindByValue, BindingMode, Block, Crate, CrateNum}; use syntax::ast::{DeclItem, DefId, Expr, ExprAgain, ExprBreak, ExprField}; use syntax::ast::{ExprClosure, ExprForLoop, ExprLoop, ExprWhile, ExprMethodCall}; use syntax::ast::{ExprPath, ExprStruct, FnDecl}; use syntax::ast::{ForeignItem, ForeignItemFn, ForeignItemStatic, Generics}; -use syntax::ast::{Ident, ImplItem, Item, ItemEnum, ItemFn, ItemForeignMod}; -use syntax::ast::{ItemImpl, ItemMac, ItemMod, ItemStatic, ItemStruct}; -use syntax::ast::{ItemTrait, ItemTy, LOCAL_CRATE, Local, ItemConst}; +use syntax::ast::{Ident, ImplItem, Item, ItemConst, ItemEnum, ItemFn}; +use syntax::ast::{ItemForeignMod, ItemImpl, ItemMac, ItemMod, ItemStatic}; +use syntax::ast::{ItemStruct, ItemTrait, ItemTy, Local}; use syntax::ast::{MethodImplItem, Mod, Name, NamedField, NodeId}; use syntax::ast::{Pat, PatEnum, PatIdent, PatLit}; use syntax::ast::{PatRange, PatStruct, Path, PathListIdent, PathListMod}; @@ -80,78 +91,23 @@ use syntax::owned_slice::OwnedSlice; use syntax::visit::{mod, Visitor}; use std::collections::{HashMap, HashSet}; -use std::collections::hash_map::{Occupied, Vacant}; +use std::collections::hash_map::Entry::{Occupied, Vacant}; use std::cell::{Cell, RefCell}; use std::mem::replace; use std::rc::{Rc, Weak}; use std::uint; -// Definition mapping -pub type DefMap = RefCell>; +mod check_unused; +mod record_exports; #[deriving(Copy)] -struct binding_info { +struct BindingInfo { span: Span, binding_mode: BindingMode, } // Map from the name in a pattern to its binding mode. -type BindingMap = HashMap; - -// Trait method resolution -pub type TraitMap = NodeMap >; - -// This is the replacement export map. It maps a module to all of the exports -// within. -pub type ExportMap2 = NodeMap>; - -pub struct Export2 { - pub name: String, // The name of the target. - pub def_id: DefId, // The definition of the target. -} - -// This set contains all exported definitions from external crates. The set does -// not contain any entries from local crates. -pub type ExternalExports = DefIdSet; - -// FIXME: dox -pub type LastPrivateMap = NodeMap; - -#[deriving(Copy, Show)] -pub enum LastPrivate { - LastMod(PrivateDep), - // `use` directives (imports) can refer to two separate definitions in the - // type and value namespaces. We record here the last private node for each - // and whether the import is in fact used for each. - // If the Option fields are None, it means there is no definition - // in that namespace. - LastImport{value_priv: Option, - value_used: ImportUse, - type_priv: Option, - type_used: ImportUse}, -} - -#[deriving(Copy, Show)] -pub enum PrivateDep { - AllPublic, - DependsOn(DefId), -} - -// How an import is used. -#[deriving(Copy, PartialEq, Show)] -pub enum ImportUse { - Unused, // The import is not used. - Used, // The import is used. -} - -impl LastPrivate { - fn or(self, other: LastPrivate) -> LastPrivate { - match (self, other) { - (me, LastMod(AllPublic)) => me, - (_, other) => other, - } - } -} +type BindingMap = HashMap; #[deriving(Copy, PartialEq)] enum PatternBindingMode { @@ -340,25 +296,6 @@ enum ModulePrefixResult { PrefixFound(Rc, uint) } -#[deriving(Clone, Copy, Eq, PartialEq)] -pub enum TraitItemKind { - NonstaticMethodTraitItemKind, - StaticMethodTraitItemKind, - TypeTraitItemKind, -} - -impl TraitItemKind { - pub fn from_explicit_self_category(explicit_self_category: - ExplicitSelfCategory) - -> TraitItemKind { - if explicit_self_category == StaticExplicitSelfCategory { - StaticMethodTraitItemKind - } else { - NonstaticMethodTraitItemKind - } - } -} - #[deriving(Copy, PartialEq)] enum NameSearchType { /// We're doing a name search in order to resolve a `use` directive. @@ -948,7 +885,7 @@ struct Resolver<'a> { freevars: RefCell, freevars_seen: RefCell>, capture_mode_map: CaptureModeMap, - export_map2: ExportMap2, + export_map: ExportMap, trait_map: TraitMap, external_exports: ExternalExports, last_private: LastPrivateMap, @@ -1002,17 +939,6 @@ impl<'a, 'b, 'v> Visitor<'v> for BuildReducedGraphVisitor<'a, 'b> { } -struct UnusedImportCheckVisitor<'a, 'b:'a> { - resolver: &'a mut Resolver<'b> -} - -impl<'a, 'b, 'v> Visitor<'v> for UnusedImportCheckVisitor<'a, 'b> { - fn visit_view_item(&mut self, vi: &ViewItem) { - self.resolver.check_for_item_unused_imports(vi); - visit::walk_view_item(self, vi); - } -} - #[deriving(PartialEq)] enum FallbackChecks { Everything, @@ -1063,7 +989,7 @@ impl<'a> Resolver<'a> { freevars: RefCell::new(NodeMap::new()), freevars_seen: RefCell::new(NodeMap::new()), capture_mode_map: NodeMap::new(), - export_map2: NodeMap::new(), + export_map: NodeMap::new(), trait_map: NodeMap::new(), used_imports: HashSet::new(), used_crates: HashSet::new(), @@ -1073,22 +999,6 @@ impl<'a> Resolver<'a> { emit_errors: true, } } - /// The main name resolution procedure. - fn resolve(&mut self, krate: &ast::Crate) { - self.build_reduced_graph(krate); - self.session.abort_if_errors(); - - self.resolve_imports(); - self.session.abort_if_errors(); - - self.record_exports(); - self.session.abort_if_errors(); - - self.resolve_crate(krate); - self.session.abort_if_errors(); - - self.check_for_unused_imports(krate); - } // // Reduced graph building @@ -1209,14 +1119,14 @@ impl<'a> Resolver<'a> { self.resolve_error(sp, format!("duplicate definition of {} `{}`", namespace_error_to_string(duplicate_type), - token::get_name(name)).as_slice()); + token::get_name(name))[]); { let r = child.span_for_namespace(ns); for sp in r.iter() { self.session.span_note(*sp, format!("first definition of {} `{}` here", namespace_error_to_string(duplicate_type), - token::get_name(name)).as_slice()); + token::get_name(name))[]); } } } @@ -2237,7 +2147,7 @@ impl<'a> Resolver<'a> { debug!("(building import directive) building import \ directive: {}::{}", self.names_to_string(module_.imports.borrow().last().unwrap() - .module_path.as_slice()), + .module_path[]), token::get_name(target)); let mut import_resolutions = module_.import_resolutions @@ -2355,10 +2265,10 @@ impl<'a> Resolver<'a> { let msg = format!("unresolved import `{}`{}", self.import_path_to_string( import_directive.module_path - .as_slice(), + [], import_directive.subclass), help); - self.resolve_error(span, msg.as_slice()); + self.resolve_error(span, msg[]); } Indeterminate => break, // Bail out. We'll come around next time. Success(()) => () // Good. Continue. @@ -2388,7 +2298,7 @@ impl<'a> Resolver<'a> { .iter() .map(|seg| seg.identifier.name) .collect(); - self.names_to_string(names.as_slice()) + self.names_to_string(names[]) } fn import_directive_subclass_to_string(&mut self, @@ -2430,7 +2340,7 @@ impl<'a> Resolver<'a> { debug!("(resolving import for module) resolving import `{}::...` in \ `{}`", - self.names_to_string(module_path.as_slice()), + self.names_to_string(module_path[]), self.module_to_string(&*module_)); // First, resolve the module path for the directive, if necessary. @@ -2439,7 +2349,7 @@ impl<'a> Resolver<'a> { Some((self.graph_root.get_module(), LastMod(AllPublic))) } else { match self.resolve_module_path(module_.clone(), - module_path.as_slice(), + module_path[], DontUseLexicalScope, import_directive.span, ImportSearch) { @@ -3031,7 +2941,7 @@ impl<'a> Resolver<'a> { ValueNS => "value", }, token::get_name(name).get()); - self.session.span_err(import_span, msg.as_slice()); + self.session.span_err(import_span, msg[]); } Some(_) | None => {} } @@ -3046,7 +2956,7 @@ impl<'a> Resolver<'a> { if !name_bindings.defined_in_namespace_with(namespace, IMPORTABLE) { let msg = format!("`{}` is not directly importable", token::get_name(name)); - self.session.span_err(import_span, msg.as_slice()); + self.session.span_err(import_span, msg[]); } } @@ -3071,7 +2981,7 @@ impl<'a> Resolver<'a> { crate in this module \ (maybe you meant `use {0}::*`?)", token::get_name(name).get()); - self.session.span_err(import_span, msg.as_slice()); + self.session.span_err(import_span, msg[]); } Some(_) | None => {} } @@ -3093,7 +3003,7 @@ impl<'a> Resolver<'a> { let msg = format!("import `{}` conflicts with value \ in this module", token::get_name(name).get()); - self.session.span_err(import_span, msg.as_slice()); + self.session.span_err(import_span, msg[]); if let Some(span) = value.value_span { self.session.span_note(span, "conflicting value here"); @@ -3111,7 +3021,7 @@ impl<'a> Resolver<'a> { let msg = format!("import `{}` conflicts with type in \ this module", token::get_name(name).get()); - self.session.span_err(import_span, msg.as_slice()); + self.session.span_err(import_span, msg[]); if let Some(span) = ty.type_span { self.session.span_note(span, "note conflicting type here") @@ -3124,7 +3034,7 @@ impl<'a> Resolver<'a> { let msg = format!("inherent implementations \ are only allowed on types \ defined in the current module"); - self.session.span_err(span, msg.as_slice()); + self.session.span_err(span, msg[]); self.session.span_note(import_span, "import from other module here") } @@ -3133,7 +3043,7 @@ impl<'a> Resolver<'a> { let msg = format!("import `{}` conflicts with existing \ submodule", token::get_name(name).get()); - self.session.span_err(import_span, msg.as_slice()); + self.session.span_err(import_span, msg[]); if let Some(span) = ty.type_span { self.session.span_note(span, "note conflicting module here") @@ -3163,7 +3073,7 @@ impl<'a> Resolver<'a> { .span_err(span, format!("an external crate named `{}` has already \ been imported into this module", - token::get_name(name).get()).as_slice()); + token::get_name(name).get())[]); } } @@ -3182,7 +3092,7 @@ impl<'a> Resolver<'a> { format!("the name `{}` conflicts with an external \ crate that has been imported into this \ module", - token::get_name(name).get()).as_slice()); + token::get_name(name).get())[]); } } @@ -3230,7 +3140,7 @@ impl<'a> Resolver<'a> { let segment_name = token::get_name(name); let module_name = self.module_to_string(&*search_module); let mut span = span; - let msg = if "???" == module_name.as_slice() { + let msg = if "???" == module_name[] { span.hi = span.lo + Pos::from_uint(segment_name.get().len()); match search_parent_externals(name, @@ -3343,14 +3253,14 @@ impl<'a> Resolver<'a> { match module_prefix_result { Failed(None) => { let mpath = self.names_to_string(module_path); - let mpath = mpath.as_slice(); + let mpath = mpath[]; match mpath.rfind(':') { Some(idx) => { let msg = format!("Could not find `{}` in `{}`", // idx +- 1 to account for the // colons on either side - mpath.slice_from(idx + 1), - mpath.slice_to(idx - 1)); + mpath[idx + 1..], + mpath[0..idx - 1]); return Failed(Some((span, msg))); }, None => { @@ -3521,7 +3431,7 @@ impl<'a> Resolver<'a> { true) { Failed(Some((span, msg))) => self.resolve_error(span, format!("failed to resolve. {}", - msg)), + msg)[]), Failed(None) => (), // Continue up the search chain. Indeterminate => { // We couldn't see through the higher scope because of an @@ -3776,8 +3686,8 @@ impl<'a> Resolver<'a> { "unresolved import"); } else { let err = format!("unresolved import (maybe you meant `{}::*`?)", - sn.slice(0, sn.len())); - self.resolve_error((*imports)[index].span, err.as_slice()); + sn); + self.resolve_error((*imports)[index].span, err[]); } } @@ -3800,125 +3710,6 @@ impl<'a> Resolver<'a> { } } - // Export recording - // - // This pass simply determines what all "export" keywords refer to and - // writes the results into the export map. - // - // FIXME #4953 This pass will be removed once exports change to per-item. - // Then this operation can simply be performed as part of item (or import) - // processing. - - fn record_exports(&mut self) { - let root_module = self.graph_root.get_module(); - self.record_exports_for_module_subtree(root_module); - } - - fn record_exports_for_module_subtree(&mut self, - module_: Rc) { - // If this isn't a local krate, then bail out. We don't need to record - // exports for nonlocal crates. - - match module_.def_id.get() { - Some(def_id) if def_id.krate == LOCAL_CRATE => { - // OK. Continue. - debug!("(recording exports for module subtree) recording \ - exports for local module `{}`", - self.module_to_string(&*module_)); - } - None => { - // Record exports for the root module. - debug!("(recording exports for module subtree) recording \ - exports for root module `{}`", - self.module_to_string(&*module_)); - } - Some(_) => { - // Bail out. - debug!("(recording exports for module subtree) not recording \ - exports for `{}`", - self.module_to_string(&*module_)); - return; - } - } - - self.record_exports_for_module(&*module_); - self.populate_module_if_necessary(&module_); - - for (_, child_name_bindings) in module_.children.borrow().iter() { - match child_name_bindings.get_module_if_available() { - None => { - // Nothing to do. - } - Some(child_module) => { - self.record_exports_for_module_subtree(child_module); - } - } - } - - for (_, child_module) in module_.anonymous_children.borrow().iter() { - self.record_exports_for_module_subtree(child_module.clone()); - } - } - - fn record_exports_for_module(&mut self, module_: &Module) { - let mut exports2 = Vec::new(); - - self.add_exports_for_module(&mut exports2, module_); - match module_.def_id.get() { - Some(def_id) => { - self.export_map2.insert(def_id.node, exports2); - debug!("(computing exports) writing exports for {} (some)", - def_id.node); - } - None => {} - } - } - - fn add_exports_of_namebindings(&mut self, - exports2: &mut Vec , - name: Name, - namebindings: &NameBindings, - ns: Namespace) { - match namebindings.def_for_namespace(ns) { - Some(d) => { - let name = token::get_name(name); - debug!("(computing exports) YES: export '{}' => {}", - name, d.def_id()); - exports2.push(Export2 { - name: name.get().to_string(), - def_id: d.def_id() - }); - } - d_opt => { - debug!("(computing exports) NO: {}", d_opt); - } - } - } - - fn add_exports_for_module(&mut self, - exports2: &mut Vec , - module_: &Module) { - for (name, importresolution) in module_.import_resolutions.borrow().iter() { - if !importresolution.is_public { - continue - } - let xs = [TypeNS, ValueNS]; - for &ns in xs.iter() { - match importresolution.target_for_namespace(ns) { - Some(target) => { - debug!("(computing exports) maybe export '{}'", - token::get_name(*name)); - self.add_exports_of_namebindings(exports2, - *name, - &*target.bindings, - ns) - } - _ => () - } - } - } - } - // AST resolution // // We maintain a list of value ribs and type ribs. @@ -3988,7 +3779,7 @@ impl<'a> Resolver<'a> { match def_like { DlDef(d @ DefUpvar(..)) => { self.session.span_bug(span, - format!("unexpected {} in bindings", d).as_slice()) + format!("unexpected {} in bindings", d)[]) } DlDef(d @ DefLocal(_)) => { let node_id = d.def_id().node; @@ -4204,7 +3995,7 @@ impl<'a> Resolver<'a> { generics, implemented_traits, &**self_type, - impl_items.as_slice()); + impl_items[]); } ItemTrait(_, ref generics, ref unbound, ref bounds, ref trait_items) => { @@ -4289,7 +4080,7 @@ impl<'a> Resolver<'a> { ItemStruct(ref struct_def, ref generics) => { self.resolve_struct(item.id, generics, - struct_def.fields.as_slice()); + struct_def.fields[]); } ItemMod(ref module_) => { @@ -4362,7 +4153,7 @@ impl<'a> Resolver<'a> { parameter in this type \ parameter list", token::get_name( - name)).as_slice()) + name))[]) } seen_bindings.insert(name); @@ -4539,7 +4330,7 @@ impl<'a> Resolver<'a> { }; let msg = format!("attempt to {} a nonexistent trait `{}`", usage_str, path_str); - self.resolve_error(trait_reference.path.span, msg.as_slice()); + self.resolve_error(trait_reference.path.span, msg[]); } Some(def) => { match def { @@ -4551,14 +4342,14 @@ impl<'a> Resolver<'a> { self.resolve_error(trait_reference.path.span, format!("`{}` is not a trait", self.path_names_to_string( - &trait_reference.path))); + &trait_reference.path))[]); // If it's a typedef, give a note if let DefTy(..) = def { self.session.span_note( trait_reference.path.span, format!("`type` aliases cannot be used for traits") - .as_slice()); + []); } } } @@ -4570,27 +4361,14 @@ impl<'a> Resolver<'a> { for predicate in where_clause.predicates.iter() { match predicate { &ast::WherePredicate::BoundPredicate(ref bound_pred) => { - match self.resolve_identifier(bound_pred.ident, - TypeNS, - true, - bound_pred.span) { - Some((def @ DefTyParam(..), last_private)) => { - self.record_def(bound_pred.id, (def, last_private)); - } - _ => { - self.resolve_error( - bound_pred.span, - format!("undeclared type parameter `{}`", - token::get_ident( - bound_pred.ident)).as_slice()); - } - } + self.resolve_type(&*bound_pred.bounded_ty); for bound in bound_pred.bounds.iter() { - self.resolve_type_parameter_bound(bound_pred.id, bound, + self.resolve_type_parameter_bound(bound_pred.bounded_ty.id, bound, TraitBoundingTypeParameter); } } + &ast::WherePredicate::RegionPredicate(_) => {} &ast::WherePredicate::EqPredicate(ref eq_pred) => { match self.resolve_path(eq_pred.id, &eq_pred.path, TypeNS, true) { Some((def @ DefTyParam(..), last_private)) => { @@ -4768,7 +4546,7 @@ impl<'a> Resolver<'a> { self.resolve_error(span, format!("method `{}` is not a member of trait `{}`", token::get_name(name), - path_str).as_slice()); + path_str)[]); } } } @@ -4809,9 +4587,10 @@ impl<'a> Resolver<'a> { let mut result = HashMap::new(); pat_bindings(&self.def_map, pat, |binding_mode, _id, sp, path1| { let name = mtwt::resolve(path1.node); - result.insert(name, - binding_info {span: sp, - binding_mode: binding_mode}); + result.insert(name, BindingInfo { + span: sp, + binding_mode: binding_mode + }); }); return result; } @@ -4834,7 +4613,7 @@ impl<'a> Resolver<'a> { format!("variable `{}` from pattern #1 is \ not bound in pattern #{}", token::get_name(key), - i + 1).as_slice()); + i + 1)[]); } Some(binding_i) => { if binding_0.binding_mode != binding_i.binding_mode { @@ -4843,7 +4622,7 @@ impl<'a> Resolver<'a> { format!("variable `{}` is bound with different \ mode in pattern #{} than in pattern #1", token::get_name(key), - i + 1).as_slice()); + i + 1)[]); } } } @@ -4856,7 +4635,7 @@ impl<'a> Resolver<'a> { format!("variable `{}` from pattern {}{} is \ not bound in pattern {}1", token::get_name(key), - "#", i + 1, "#").as_slice()); + "#", i + 1, "#")[]); } } } @@ -4973,7 +4752,7 @@ impl<'a> Resolver<'a> { None => { let msg = format!("use of undeclared type name `{}`", self.path_names_to_string(path)); - self.resolve_error(ty.span, msg.as_slice()); + self.resolve_error(ty.span, msg[]); } } } @@ -5053,7 +4832,7 @@ impl<'a> Resolver<'a> { format!("declaration of `{}` shadows an enum \ variant or unit-like struct in \ scope", - token::get_name(renamed)).as_slice()); + token::get_name(renamed))[]); } FoundConst(ref def, lp) if mode == RefutableMode => { debug!("(resolving pattern) resolving `{}` to \ @@ -5105,7 +4884,7 @@ impl<'a> Resolver<'a> { list", token::get_ident( ident)) - .as_slice()) + []) } else if bindings_list.get(&renamed) == Some(&pat_id) { // Then this is a duplicate variable in the @@ -5114,7 +4893,7 @@ impl<'a> Resolver<'a> { format!("identifier `{}` is bound \ more than once in the same \ pattern", - token::get_ident(ident)).as_slice()); + token::get_ident(ident))[]); } // Else, not bound in the same pattern: do // nothing. @@ -5143,7 +4922,7 @@ impl<'a> Resolver<'a> { path.segments .last() .unwrap() - .identifier)).as_slice()); + .identifier))[]); } None => { self.resolve_error(path.span, @@ -5152,7 +4931,7 @@ impl<'a> Resolver<'a> { path.segments .last() .unwrap() - .identifier)).as_slice()); + .identifier))[]); } } @@ -5183,7 +4962,7 @@ impl<'a> Resolver<'a> { def: {}", result); let msg = format!("`{}` does not name a structure", self.path_names_to_string(path)); - self.resolve_error(path.span, msg.as_slice()); + self.resolve_error(path.span, msg[]); } } } @@ -5245,7 +5024,7 @@ impl<'a> Resolver<'a> { match err { Some((span, msg)) => { self.resolve_error(span, format!("failed to resolve: {}", - msg)); + msg)[]); } None => () } @@ -5441,7 +5220,7 @@ impl<'a> Resolver<'a> { let last_private; let module = self.current_module.clone(); match self.resolve_module_path(module, - module_path.as_slice(), + module_path[], UseLexicalScope, path.span, PathSearch) { @@ -5456,7 +5235,7 @@ impl<'a> Resolver<'a> { }; self.resolve_error(span, format!("failed to resolve. {}", - msg.as_slice())); + msg)[]); return None; } Indeterminate => panic!("indeterminate unexpected"), @@ -5499,7 +5278,7 @@ impl<'a> Resolver<'a> { let containing_module; let last_private; match self.resolve_module_path_from_root(root_module, - module_path.as_slice(), + module_path[], 0, path.span, PathSearch, @@ -5509,13 +5288,13 @@ impl<'a> Resolver<'a> { Some((span, msg)) => (span, msg), None => { let msg = format!("Use of undeclared module `::{}`", - self.names_to_string(module_path.as_slice())); + self.names_to_string(module_path[])); (path.span, msg) } }; self.resolve_error(span, format!("failed to resolve. {}", - msg.as_slice())); + msg)[]); return None; } @@ -5556,7 +5335,7 @@ impl<'a> Resolver<'a> { } TypeNS => { let name = ident.name; - self.search_ribs(self.type_ribs.as_slice(), name, span) + self.search_ribs(self.type_ribs[], name, span) } }; @@ -5610,7 +5389,8 @@ impl<'a> Resolver<'a> { Failed(err) => { match err { Some((span, msg)) => - self.resolve_error(span, format!("failed to resolve. {}", msg)), + self.resolve_error(span, format!("failed to resolve. {}", + msg)[]), None => () } @@ -5630,9 +5410,9 @@ impl<'a> Resolver<'a> { rs } - fn resolve_error(&self, span: Span, s: T) { + fn resolve_error(&self, span: Span, s: &str) { if self.emit_errors { - self.session.span_err(span, s.as_slice()); + self.session.span_err(span, s); } } @@ -5667,7 +5447,7 @@ impl<'a> Resolver<'a> { } } else { match this.resolve_module_path(root, - name_path.as_slice(), + name_path[], UseLexicalScope, span, PathSearch) { @@ -5705,7 +5485,7 @@ impl<'a> Resolver<'a> { let name_path = path.segments.iter().map(|seg| seg.identifier.name).collect::>(); // Look for a method in the current self type's impl module. - match get_module(self, path.span, name_path.as_slice()) { + match get_module(self, path.span, name_path[]) { Some(module) => match module.children.borrow().get(&name) { Some(binding) => { let p_str = self.path_names_to_string(&path); @@ -5761,7 +5541,7 @@ impl<'a> Resolver<'a> { let mut smallest = 0; for (i, other) in maybes.iter().enumerate() { - values[i] = name.lev_distance(other.get()); + values[i] = lev_distance(name, other.get()); if values[i] <= values[smallest] { smallest = i; @@ -5916,7 +5696,7 @@ impl<'a> Resolver<'a> { def: {}", result); let msg = format!("`{}` does not name a structure", self.path_names_to_string(path)); - self.resolve_error(path.span, msg.as_slice()); + self.resolve_error(path.span, msg[]); } } @@ -5972,13 +5752,13 @@ impl<'a> Resolver<'a> { ExprBreak(Some(label)) | ExprAgain(Some(label)) => { let renamed = mtwt::resolve(label); - match self.search_ribs(self.label_ribs.as_slice(), + match self.search_ribs(self.label_ribs[], renamed, expr.span) { None => { self.resolve_error( expr.span, format!("use of undeclared label `{}`", - token::get_ident(label)).as_slice()) + token::get_ident(label))[]) } Some(DlDef(def @ DefLabel(_))) => { // Since this def is a label, it is never read. @@ -6114,7 +5894,7 @@ impl<'a> Resolver<'a> { then {}", node_id, *entry.get(), - def).as_slice()); + def)[]); }, Vacant(entry) => { entry.set(def); }, } @@ -6130,124 +5910,11 @@ impl<'a> Resolver<'a> { self.resolve_error(pat.span, format!("cannot use `ref` binding mode \ with {}", - descr).as_slice()); + descr)[]); } } } - // - // Unused import checking - // - // Although this is mostly a lint pass, it lives in here because it depends on - // resolve data structures and because it finalises the privacy information for - // `use` directives. - // - - fn check_for_unused_imports(&mut self, krate: &ast::Crate) { - let mut visitor = UnusedImportCheckVisitor{ resolver: self }; - visit::walk_crate(&mut visitor, krate); - } - - fn check_for_item_unused_imports(&mut self, vi: &ViewItem) { - // Ignore is_public import statements because there's no way to be sure - // whether they're used or not. Also ignore imports with a dummy span - // because this means that they were generated in some fashion by the - // compiler and we don't need to consider them. - if vi.vis == Public { return } - if vi.span == DUMMY_SP { return } - - match vi.node { - ViewItemExternCrate(_, _, id) => { - if let Some(crate_num) = self.session.cstore.find_extern_mod_stmt_cnum(id) { - if !self.used_crates.contains(&crate_num) { - self.session.add_lint(lint::builtin::UNUSED_EXTERN_CRATES, - id, - vi.span, - "unused extern crate".to_string()); - } - } - }, - ViewItemUse(ref p) => { - match p.node { - ViewPathSimple(_, _, id) => self.finalize_import(id, p.span), - - ViewPathList(_, ref list, _) => { - for i in list.iter() { - self.finalize_import(i.node.id(), i.span); - } - }, - ViewPathGlob(_, id) => { - if !self.used_imports.contains(&(id, TypeNS)) && - !self.used_imports.contains(&(id, ValueNS)) { - self.session - .add_lint(lint::builtin::UNUSED_IMPORTS, - id, - p.span, - "unused import".to_string()); - } - }, - } - } - } - } - - // We have information about whether `use` (import) directives are actually used now. - // If an import is not used at all, we signal a lint error. If an import is only used - // for a single namespace, we remove the other namespace from the recorded privacy - // information. That means in privacy.rs, we will only check imports and namespaces - // which are used. In particular, this means that if an import could name either a - // public or private item, we will check the correct thing, dependent on how the import - // is used. - fn finalize_import(&mut self, id: NodeId, span: Span) { - debug!("finalizing import uses for {}", - self.session.codemap().span_to_snippet(span)); - - if !self.used_imports.contains(&(id, TypeNS)) && - !self.used_imports.contains(&(id, ValueNS)) { - self.session.add_lint(lint::builtin::UNUSED_IMPORTS, - id, - span, - "unused import".to_string()); - } - - let (v_priv, t_priv) = match self.last_private.get(&id) { - Some(&LastImport { - value_priv: v, - value_used: _, - type_priv: t, - type_used: _ - }) => (v, t), - Some(_) => { - panic!("we should only have LastImport for `use` directives") - } - _ => return, - }; - - let mut v_used = if self.used_imports.contains(&(id, ValueNS)) { - Used - } else { - Unused - }; - let t_used = if self.used_imports.contains(&(id, TypeNS)) { - Used - } else { - Unused - }; - - match (v_priv, t_priv) { - // Since some items may be both in the value _and_ type namespaces (e.g., structs) - // we might have two LastPrivates pointing at the same thing. There is no point - // checking both, so lets not check the value one. - (Some(DependsOn(def_v)), Some(DependsOn(def_t))) if def_v == def_t => v_used = Unused, - _ => {}, - } - - self.last_private.insert(id, LastImport{value_priv: v_priv, - value_used: v_used, - type_priv: t_priv, - type_used: t_used}); - } - // // Diagnostics // @@ -6279,8 +5946,7 @@ impl<'a> Resolver<'a> { return "???".to_string(); } self.names_to_string(names.into_iter().rev() - .collect::>() - .as_slice()) + .collect::>()[]) } #[allow(dead_code)] // useful for debugging @@ -6323,7 +5989,7 @@ pub struct CrateMap { pub def_map: DefMap, pub freevars: RefCell, pub capture_mode_map: RefCell, - pub exp_map2: ExportMap2, + pub export_map: ExportMap, pub trait_map: TraitMap, pub external_exports: ExternalExports, pub last_private_map: LastPrivateMap, @@ -6335,12 +6001,26 @@ pub fn resolve_crate(session: &Session, krate: &Crate) -> CrateMap { let mut resolver = Resolver::new(session, krate.span); - resolver.resolve(krate); + + resolver.build_reduced_graph(krate); + session.abort_if_errors(); + + resolver.resolve_imports(); + session.abort_if_errors(); + + record_exports::record(&mut resolver); + session.abort_if_errors(); + + resolver.resolve_crate(krate); + session.abort_if_errors(); + + check_unused::check_crate(&mut resolver, krate); + CrateMap { def_map: resolver.def_map, freevars: resolver.freevars, capture_mode_map: RefCell::new(resolver.capture_mode_map), - exp_map2: resolver.export_map2, + export_map: resolver.export_map, trait_map: resolver.trait_map, external_exports: resolver.external_exports, last_private_map: resolver.last_private, diff --git a/src/librustc_resolve/record_exports.rs b/src/librustc_resolve/record_exports.rs new file mode 100644 index 0000000000000..41882a94b34fd --- /dev/null +++ b/src/librustc_resolve/record_exports.rs @@ -0,0 +1,157 @@ +// Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + + +// Export recording +// +// This pass simply determines what all "export" keywords refer to and +// writes the results into the export map. +// +// FIXME #4953 This pass will be removed once exports change to per-item. +// Then this operation can simply be performed as part of item (or import) +// processing. + +use {Module, NameBindings, Resolver}; +use Namespace::{mod, TypeNS, ValueNS}; + +use rustc::middle::def::Export; +use syntax::ast; +use syntax::parse::token; + +use std::rc::Rc; + +struct ExportRecorder<'a, 'b:'a> { + resolver: &'a mut Resolver<'b> +} + +// Deref and DerefMut impls allow treating ExportRecorder as Resolver. +impl<'a, 'b> Deref> for ExportRecorder<'a, 'b> { + fn deref<'c>(&'c self) -> &'c Resolver<'b> { + &*self.resolver + } +} + +impl<'a, 'b> DerefMut> for ExportRecorder<'a, 'b> { + fn deref_mut<'c>(&'c mut self) -> &'c mut Resolver<'b> { + &mut *self.resolver + } +} + +impl<'a, 'b> ExportRecorder<'a, 'b> { + fn record_exports_for_module_subtree(&mut self, + module_: Rc) { + // If this isn't a local krate, then bail out. We don't need to record + // exports for nonlocal crates. + + match module_.def_id.get() { + Some(def_id) if def_id.krate == ast::LOCAL_CRATE => { + // OK. Continue. + debug!("(recording exports for module subtree) recording \ + exports for local module `{}`", + self.module_to_string(&*module_)); + } + None => { + // Record exports for the root module. + debug!("(recording exports for module subtree) recording \ + exports for root module `{}`", + self.module_to_string(&*module_)); + } + Some(_) => { + // Bail out. + debug!("(recording exports for module subtree) not recording \ + exports for `{}`", + self.module_to_string(&*module_)); + return; + } + } + + self.record_exports_for_module(&*module_); + self.populate_module_if_necessary(&module_); + + for (_, child_name_bindings) in module_.children.borrow().iter() { + match child_name_bindings.get_module_if_available() { + None => { + // Nothing to do. + } + Some(child_module) => { + self.record_exports_for_module_subtree(child_module); + } + } + } + + for (_, child_module) in module_.anonymous_children.borrow().iter() { + self.record_exports_for_module_subtree(child_module.clone()); + } + } + + fn record_exports_for_module(&mut self, module_: &Module) { + let mut exports = Vec::new(); + + self.add_exports_for_module(&mut exports, module_); + match module_.def_id.get() { + Some(def_id) => { + self.export_map.insert(def_id.node, exports); + debug!("(computing exports) writing exports for {} (some)", + def_id.node); + } + None => {} + } + } + + fn add_exports_of_namebindings(&mut self, + exports: &mut Vec, + name: ast::Name, + namebindings: &NameBindings, + ns: Namespace) { + match namebindings.def_for_namespace(ns) { + Some(d) => { + debug!("(computing exports) YES: export '{}' => {}", + name, d.def_id()); + exports.push(Export { + name: name, + def_id: d.def_id() + }); + } + d_opt => { + debug!("(computing exports) NO: {}", d_opt); + } + } + } + + fn add_exports_for_module(&mut self, + exports: &mut Vec, + module_: &Module) { + for (name, importresolution) in module_.import_resolutions.borrow().iter() { + if !importresolution.is_public { + continue + } + let xs = [TypeNS, ValueNS]; + for &ns in xs.iter() { + match importresolution.target_for_namespace(ns) { + Some(target) => { + debug!("(computing exports) maybe export '{}'", + token::get_name(*name)); + self.add_exports_of_namebindings(exports, + *name, + &*target.bindings, + ns) + } + _ => () + } + } + } + } +} + +pub fn record(resolver: &mut Resolver) { + let mut recorder = ExportRecorder { resolver: resolver }; + let root_module = recorder.graph_root.get_module(); + recorder.record_exports_for_module_subtree(root_module); +} diff --git a/src/librustc_trans/back/link.rs b/src/librustc_trans/back/link.rs index 5617110bfecf7..ec61d3a69537c 100644 --- a/src/librustc_trans/back/link.rs +++ b/src/librustc_trans/back/link.rs @@ -126,7 +126,7 @@ pub fn find_crate_name(sess: Option<&Session>, attrs: &[ast::Attribute], input: &Input) -> String { let validate = |s: String, span: Option| { - creader::validate_crate_name(sess, s.as_slice(), span); + creader::validate_crate_name(sess, s[], span); s }; @@ -144,7 +144,7 @@ pub fn find_crate_name(sess: Option<&Session>, let msg = format!("--crate-name and #[crate_name] are \ required to match, but `{}` != `{}`", s, name); - sess.span_err(attr.span, msg.as_slice()); + sess.span_err(attr.span, msg[]); } } return validate(s.clone(), None); @@ -190,17 +190,17 @@ fn symbol_hash<'tcx>(tcx: &ty::ctxt<'tcx>, // to be independent of one another in the crate. symbol_hasher.reset(); - symbol_hasher.input_str(link_meta.crate_name.as_slice()); + symbol_hasher.input_str(link_meta.crate_name[]); symbol_hasher.input_str("-"); symbol_hasher.input_str(link_meta.crate_hash.as_str()); for meta in tcx.sess.crate_metadata.borrow().iter() { - symbol_hasher.input_str(meta.as_slice()); + symbol_hasher.input_str(meta[]); } symbol_hasher.input_str("-"); - symbol_hasher.input_str(encoder::encoded_ty(tcx, t).as_slice()); + symbol_hasher.input_str(encoder::encoded_ty(tcx, t)[]); // Prefix with 'h' so that it never blends into adjacent digits let mut hash = String::from_str("h"); - hash.push_str(truncated_hash_result(symbol_hasher).as_slice()); + hash.push_str(truncated_hash_result(symbol_hasher)[]); hash } @@ -249,7 +249,7 @@ pub fn sanitize(s: &str) -> String { let mut tstr = String::new(); for c in c.escape_unicode() { tstr.push(c) } result.push('$'); - result.push_str(tstr.slice_from(1)); + result.push_str(tstr[1..]); } } } @@ -258,7 +258,7 @@ pub fn sanitize(s: &str) -> String { if result.len() > 0u && result.as_bytes()[0] != '_' as u8 && ! (result.as_bytes()[0] as char).is_xid_start() { - return format!("_{}", result.as_slice()); + return format!("_{}", result[]); } return result; @@ -284,12 +284,12 @@ pub fn mangle>(mut path: PI, fn push(n: &mut String, s: &str) { let sani = sanitize(s); - n.push_str(format!("{}{}", sani.len(), sani).as_slice()); + n.push_str(format!("{}{}", sani.len(), sani)[]); } // First, connect each component with pairs. for e in path { - push(&mut n, token::get_name(e.name()).get().as_slice()) + push(&mut n, token::get_name(e.name()).get()[]) } match hash { @@ -327,17 +327,17 @@ pub fn mangle_exported_name<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>, path: PathEl hash.push(EXTRA_CHARS.as_bytes()[extra2] as char); hash.push(EXTRA_CHARS.as_bytes()[extra3] as char); - exported_name(path, hash.as_slice()) + exported_name(path, hash[]) } pub fn mangle_internal_name_by_type_and_seq<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>, t: Ty<'tcx>, name: &str) -> String { let s = ppaux::ty_to_string(ccx.tcx(), t); - let path = [PathName(token::intern(s.as_slice())), + let path = [PathName(token::intern(s[])), gensym_name(name)]; let hash = get_symbol_hash(ccx, t); - mangle(ast_map::Values(path.iter()), Some(hash.as_slice())) + mangle(ast_map::Values(path.iter()), Some(hash[])) } pub fn mangle_internal_name_by_path_and_seq(path: PathElems, flav: &str) -> String { @@ -357,7 +357,7 @@ pub fn remove(sess: &Session, path: &Path) { Err(e) => { sess.err(format!("failed to remove {}: {}", path.display(), - e).as_slice()); + e)[]); } } } @@ -372,7 +372,7 @@ pub fn link_binary(sess: &Session, for &crate_type in sess.crate_types.borrow().iter() { if invalid_output_for_target(sess, crate_type) { sess.bug(format!("invalid output type `{}` for target os `{}`", - crate_type, sess.opts.target_triple).as_slice()); + crate_type, sess.opts.target_triple)[]); } let out_file = link_binary_output(sess, trans, crate_type, outputs, crate_name); @@ -437,8 +437,8 @@ pub fn filename_for_input(sess: &Session, out_filename.with_filename(format!("lib{}.rlib", libname)) } config::CrateTypeDylib => { - let (prefix, suffix) = (sess.target.target.options.dll_prefix.as_slice(), - sess.target.target.options.dll_suffix.as_slice()); + let (prefix, suffix) = (sess.target.target.options.dll_prefix[], + sess.target.target.options.dll_suffix[]); out_filename.with_filename(format!("{}{}{}", prefix, libname, @@ -448,7 +448,7 @@ pub fn filename_for_input(sess: &Session, out_filename.with_filename(format!("lib{}.a", libname)) } config::CrateTypeExecutable => { - let suffix = sess.target.target.options.exe_suffix.as_slice(); + let suffix = sess.target.target.options.exe_suffix[]; out_filename.with_filename(format!("{}{}", libname, suffix)) } } @@ -477,12 +477,12 @@ fn link_binary_output(sess: &Session, if !out_is_writeable { sess.fatal(format!("output file {} is not writeable -- check its \ permissions.", - out_filename.display()).as_slice()); + out_filename.display())[]); } else if !obj_is_writeable { sess.fatal(format!("object file {} is not writeable -- check its \ permissions.", - obj_filename.display()).as_slice()); + obj_filename.display())[]); } match crate_type { @@ -507,7 +507,7 @@ fn archive_search_paths(sess: &Session) -> Vec { let mut rustpath = filesearch::rust_path(); rustpath.push(sess.target_filesearch().get_lib_path()); let mut search: Vec = sess.opts.addl_lib_search_paths.borrow().clone(); - search.push_all(rustpath.as_slice()); + search.push_all(rustpath[]); return search; } @@ -536,7 +536,7 @@ fn link_rlib<'a>(sess: &'a Session, for &(ref l, kind) in sess.cstore.get_used_libraries().borrow().iter() { match kind { cstore::NativeStatic => { - ab.add_native_library(l.as_slice()).unwrap(); + ab.add_native_library(l[]).unwrap(); } cstore::NativeFramework | cstore::NativeUnknown => {} } @@ -584,12 +584,12 @@ fn link_rlib<'a>(sess: &'a Session, let tmpdir = TempDir::new("rustc").ok().expect("needs a temp dir"); let metadata = tmpdir.path().join(METADATA_FILENAME); match fs::File::create(&metadata).write(trans.metadata - .as_slice()) { + []) { Ok(..) => {} Err(e) => { sess.err(format!("failed to write {}: {}", metadata.display(), - e).as_slice()); + e)[]); sess.abort_if_errors(); } } @@ -605,27 +605,27 @@ fn link_rlib<'a>(sess: &'a Session, // extension to it. This is to work around a bug in LLDB that // would cause it to crash if the name of a file in an archive // was exactly 16 bytes. - let bc_filename = obj_filename.with_extension(format!("{}.bc", i).as_slice()); + let bc_filename = obj_filename.with_extension(format!("{}.bc", i)[]); let bc_deflated_filename = obj_filename.with_extension( - format!("{}.bytecode.deflate", i).as_slice()); + format!("{}.bytecode.deflate", i)[]); let bc_data = match fs::File::open(&bc_filename).read_to_end() { Ok(buffer) => buffer, Err(e) => sess.fatal(format!("failed to read bytecode: {}", - e).as_slice()) + e)[]) }; - let bc_data_deflated = match flate::deflate_bytes(bc_data.as_slice()) { + let bc_data_deflated = match flate::deflate_bytes(bc_data[]) { Some(compressed) => compressed, None => sess.fatal(format!("failed to compress bytecode from {}", - bc_filename.display()).as_slice()) + bc_filename.display())[]) }; let mut bc_file_deflated = match fs::File::create(&bc_deflated_filename) { Ok(file) => file, Err(e) => { sess.fatal(format!("failed to create compressed bytecode \ - file: {}", e).as_slice()) + file: {}", e)[]) } }; @@ -634,7 +634,7 @@ fn link_rlib<'a>(sess: &'a Session, Ok(()) => {} Err(e) => { sess.err(format!("failed to write compressed bytecode: \ - {}", e).as_slice()); + {}", e)[]); sess.abort_if_errors() } }; @@ -674,7 +674,7 @@ fn write_rlib_bytecode_object_v1(writer: &mut T, try! { writer.write(RLIB_BYTECODE_OBJECT_MAGIC) }; try! { writer.write_le_u32(1) }; try! { writer.write_le_u64(bc_data_deflated_size) }; - try! { writer.write(bc_data_deflated.as_slice()) }; + try! { writer.write(bc_data_deflated[]) }; let number_of_bytes_written_so_far = RLIB_BYTECODE_OBJECT_MAGIC.len() + // magic id @@ -725,11 +725,11 @@ fn link_staticlib(sess: &Session, obj_filename: &Path, out_filename: &Path) { let p = match *path { Some(ref p) => p.clone(), None => { sess.err(format!("could not find rlib for: `{}`", - name).as_slice()); + name)[]); continue } }; - ab.add_rlib(&p, name.as_slice(), sess.lto()).unwrap(); + ab.add_rlib(&p, name[], sess.lto()).unwrap(); let native_libs = csearch::get_native_libraries(&sess.cstore, cnum); all_native_libs.extend(native_libs.into_iter()); @@ -751,7 +751,7 @@ fn link_staticlib(sess: &Session, obj_filename: &Path, out_filename: &Path) { cstore::NativeUnknown => "library", cstore::NativeFramework => "framework", }; - sess.note(format!("{}: {}", name, *lib).as_slice()); + sess.note(format!("{}: {}", name, *lib)[]); } } @@ -765,12 +765,12 @@ fn link_natively(sess: &Session, trans: &CrateTranslation, dylib: bool, // The invocations of cc share some flags across platforms let pname = get_cc_prog(sess); - let mut cmd = Command::new(pname.as_slice()); + let mut cmd = Command::new(pname[]); - cmd.args(sess.target.target.options.pre_link_args.as_slice()); + cmd.args(sess.target.target.options.pre_link_args[]); link_args(&mut cmd, sess, dylib, tmpdir.path(), trans, obj_filename, out_filename); - cmd.args(sess.target.target.options.post_link_args.as_slice()); + cmd.args(sess.target.target.options.post_link_args[]); if !sess.target.target.options.no_compiler_rt { cmd.arg("-lcompiler-rt"); } @@ -790,11 +790,11 @@ fn link_natively(sess: &Session, trans: &CrateTranslation, dylib: bool, if !prog.status.success() { sess.err(format!("linking with `{}` failed: {}", pname, - prog.status).as_slice()); - sess.note(format!("{}", &cmd).as_slice()); + prog.status)[]); + sess.note(format!("{}", &cmd)[]); let mut output = prog.error.clone(); - output.push_all(prog.output.as_slice()); - sess.note(str::from_utf8(output.as_slice()).unwrap()); + output.push_all(prog.output[]); + sess.note(str::from_utf8(output[]).unwrap()); sess.abort_if_errors(); } debug!("linker stderr:\n{}", String::from_utf8(prog.error).unwrap()); @@ -803,7 +803,7 @@ fn link_natively(sess: &Session, trans: &CrateTranslation, dylib: bool, Err(e) => { sess.err(format!("could not exec the linker `{}`: {}", pname, - e).as_slice()); + e)[]); sess.abort_if_errors(); } } @@ -815,7 +815,7 @@ fn link_natively(sess: &Session, trans: &CrateTranslation, dylib: bool, match Command::new("dsymutil").arg(out_filename).output() { Ok(..) => {} Err(e) => { - sess.err(format!("failed to run dsymutil: {}", e).as_slice()); + sess.err(format!("failed to run dsymutil: {}", e)[]); sess.abort_if_errors(); } } @@ -864,7 +864,7 @@ fn link_args(cmd: &mut Command, let mut v = b"-Wl,-force_load,".to_vec(); v.push_all(morestack.as_vec()); - cmd.arg(v.as_slice()); + cmd.arg(v[]); } else { cmd.args(&["-Wl,--whole-archive", "-lmorestack", "-Wl,--no-whole-archive"]); } @@ -989,7 +989,7 @@ fn link_args(cmd: &mut Command, if sess.opts.cg.rpath { let mut v = "-Wl,-install_name,@rpath/".as_bytes().to_vec(); v.push_all(out_filename.filename().unwrap()); - cmd.arg(v.as_slice()); + cmd.arg(v[]); } } else { cmd.arg("-shared"); @@ -1001,7 +1001,7 @@ fn link_args(cmd: &mut Command, // addl_lib_search_paths if sess.opts.cg.rpath { let sysroot = sess.sysroot(); - let target_triple = sess.opts.target_triple.as_slice(); + let target_triple = sess.opts.target_triple[]; let get_install_prefix_lib_path = |:| { let install_prefix = option_env!("CFG_PREFIX").expect("CFG_PREFIX"); let tlib = filesearch::relative_target_lib_path(sysroot, target_triple); @@ -1018,14 +1018,14 @@ fn link_args(cmd: &mut Command, get_install_prefix_lib_path: get_install_prefix_lib_path, realpath: ::util::fs::realpath }; - cmd.args(rpath::get_rpath_flags(rpath_config).as_slice()); + cmd.args(rpath::get_rpath_flags(rpath_config)[]); } // Finally add all the linker arguments provided on the command line along // with any #[link_args] attributes found inside the crate let empty = Vec::new(); - cmd.args(sess.opts.cg.link_args.as_ref().unwrap_or(&empty).as_slice()); - cmd.args(used_link_args.as_slice()); + cmd.args(sess.opts.cg.link_args.as_ref().unwrap_or(&empty)[]); + cmd.args(used_link_args[]); } // # Native library linking @@ -1083,14 +1083,14 @@ fn add_local_native_libraries(cmd: &mut Command, sess: &Session) { } else { // -force_load is the OSX equivalent of --whole-archive, but it // involves passing the full path to the library to link. - let lib = archive::find_library(l.as_slice(), - sess.target.target.options.staticlib_prefix.as_slice(), - sess.target.target.options.staticlib_suffix.as_slice(), - search_path.as_slice(), + let lib = archive::find_library(l[], + sess.target.target.options.staticlib_prefix[], + sess.target.target.options.staticlib_suffix[], + search_path[], &sess.diagnostic().handler); let mut v = b"-Wl,-force_load,".to_vec(); v.push_all(lib.as_vec()); - cmd.arg(v.as_slice()); + cmd.arg(v[]); } } if takes_hints { @@ -1103,7 +1103,7 @@ fn add_local_native_libraries(cmd: &mut Command, sess: &Session) { cmd.arg(format!("-l{}", l)); } cstore::NativeFramework => { - cmd.arg("-framework").arg(l.as_slice()); + cmd.arg("-framework").arg(l[]); } cstore::NativeStatic => unreachable!(), } @@ -1184,9 +1184,9 @@ fn add_upstream_rust_crates(cmd: &mut Command, sess: &Session, // against the archive. if sess.lto() { let name = cratepath.filename_str().unwrap(); - let name = name.slice(3, name.len() - 5); // chop off lib/.rlib + let name = name[3..name.len() - 5]; // chop off lib/.rlib time(sess.time_passes(), - format!("altering {}.rlib", name).as_slice(), + format!("altering {}.rlib", name)[], (), |()| { let dst = tmpdir.join(cratepath.filename().unwrap()); match fs::copy(&cratepath, &dst) { @@ -1195,7 +1195,7 @@ fn add_upstream_rust_crates(cmd: &mut Command, sess: &Session, sess.err(format!("failed to copy {} to {}: {}", cratepath.display(), dst.display(), - e).as_slice()); + e)[]); sess.abort_if_errors(); } } @@ -1207,7 +1207,7 @@ fn add_upstream_rust_crates(cmd: &mut Command, sess: &Session, Err(e) => { sess.err(format!("failed to chmod {} when preparing \ for LTO: {}", dst.display(), - e).as_slice()); + e)[]); sess.abort_if_errors(); } } @@ -1221,9 +1221,9 @@ fn add_upstream_rust_crates(cmd: &mut Command, sess: &Session, maybe_ar_prog: sess.opts.cg.ar.clone() }; let mut archive = Archive::open(config); - archive.remove_file(format!("{}.o", name).as_slice()); + archive.remove_file(format!("{}.o", name)[]); let files = archive.files(); - if files.iter().any(|s| s.as_slice().ends_with(".o")) { + if files.iter().any(|s| s[].ends_with(".o")) { cmd.arg(dst); } }); @@ -1245,7 +1245,7 @@ fn add_upstream_rust_crates(cmd: &mut Command, sess: &Session, let mut v = "-l".as_bytes().to_vec(); v.push_all(unlib(&sess.target, cratepath.filestem().unwrap())); - cmd.arg(v.as_slice()); + cmd.arg(v[]); } } @@ -1287,7 +1287,7 @@ fn add_upstream_native_libraries(cmd: &mut Command, sess: &Session) { } cstore::NativeFramework => { cmd.arg("-framework"); - cmd.arg(lib.as_slice()); + cmd.arg(lib[]); } cstore::NativeStatic => { sess.bug("statics shouldn't be propagated"); diff --git a/src/librustc_trans/back/lto.rs b/src/librustc_trans/back/lto.rs index b9357280d068b..1271330897e73 100644 --- a/src/librustc_trans/back/lto.rs +++ b/src/librustc_trans/back/lto.rs @@ -53,21 +53,21 @@ pub fn run(sess: &session::Session, llmod: ModuleRef, Some(p) => p, None => { sess.fatal(format!("could not find rlib for: `{}`", - name).as_slice()); + name)[]); } }; let archive = ArchiveRO::open(&path).expect("wanted an rlib"); let file = path.filename_str().unwrap(); - let file = file.slice(3, file.len() - 5); // chop off lib/.rlib + let file = file[3..file.len() - 5]; // chop off lib/.rlib debug!("reading {}", file); for i in iter::count(0u, 1) { let bc_encoded = time(sess.time_passes(), - format!("check for {}.{}.bytecode.deflate", name, i).as_slice(), + format!("check for {}.{}.bytecode.deflate", name, i)[], (), |_| { archive.read(format!("{}.{}.bytecode.deflate", - file, i).as_slice()) + file, i)[]) }); let bc_encoded = match bc_encoded { Some(data) => data, @@ -75,7 +75,7 @@ pub fn run(sess: &session::Session, llmod: ModuleRef, if i == 0 { // No bitcode was found at all. sess.fatal(format!("missing compressed bytecode in {}", - path.display()).as_slice()); + path.display())[]); } // No more bitcode files to read. break; @@ -98,12 +98,12 @@ pub fn run(sess: &session::Session, llmod: ModuleRef, Some(inflated) => inflated, None => { sess.fatal(format!("failed to decompress bc of `{}`", - name).as_slice()) + name)[]) } } } else { sess.fatal(format!("Unsupported bytecode format version {}", - version).as_slice()) + version)[]) } }) } else { @@ -114,7 +114,7 @@ pub fn run(sess: &session::Session, llmod: ModuleRef, Some(bc) => bc, None => { sess.fatal(format!("failed to decompress bc of `{}`", - name).as_slice()) + name)[]) } } }) @@ -123,7 +123,7 @@ pub fn run(sess: &session::Session, llmod: ModuleRef, let ptr = bc_decoded.as_slice().as_ptr(); debug!("linking {}, part {}", name, i); time(sess.time_passes(), - format!("ll link {}.{}", name, i).as_slice(), + format!("ll link {}.{}", name, i)[], (), |()| unsafe { if !llvm::LLVMRustLinkInExternalBitcode(llmod, @@ -131,7 +131,7 @@ pub fn run(sess: &session::Session, llmod: ModuleRef, bc_decoded.len() as libc::size_t) { write::llvm_err(sess.diagnostic().handler(), format!("failed to load bc of `{}`", - name.as_slice())); + name[])); } }); } diff --git a/src/librustc_trans/back/write.rs b/src/librustc_trans/back/write.rs index 489d29492c227..5be66d4292097 100644 --- a/src/librustc_trans/back/write.rs +++ b/src/librustc_trans/back/write.rs @@ -46,13 +46,13 @@ pub fn llvm_err(handler: &diagnostic::Handler, msg: String) -> ! { unsafe { let cstr = llvm::LLVMRustGetLastError(); if cstr == ptr::null() { - handler.fatal(msg.as_slice()); + handler.fatal(msg[]); } else { let err = CString::new(cstr, true); let err = String::from_utf8_lossy(err.as_bytes()); handler.fatal(format!("{}: {}", - msg.as_slice(), - err.as_slice()).as_slice()); + msg[], + err[])[]); } } } @@ -103,13 +103,13 @@ impl SharedEmitter { match diag.code { Some(ref code) => { handler.emit_with_code(None, - diag.msg.as_slice(), - code.as_slice(), + diag.msg[], + code[], diag.lvl); }, None => { handler.emit(None, - diag.msg.as_slice(), + diag.msg[], diag.lvl); }, } @@ -164,8 +164,8 @@ fn get_llvm_opt_level(optimize: config::OptLevel) -> llvm::CodeGenOptLevel { fn create_target_machine(sess: &Session) -> TargetMachineRef { let reloc_model_arg = match sess.opts.cg.relocation_model { - Some(ref s) => s.as_slice(), - None => sess.target.target.options.relocation_model.as_slice() + Some(ref s) => s[], + None => sess.target.target.options.relocation_model[] }; let reloc_model = match reloc_model_arg { "pic" => llvm::RelocPIC, @@ -176,7 +176,7 @@ fn create_target_machine(sess: &Session) -> TargetMachineRef { sess.err(format!("{} is not a valid relocation mode", sess.opts .cg - .relocation_model).as_slice()); + .relocation_model)[]); sess.abort_if_errors(); unreachable!(); } @@ -197,8 +197,8 @@ fn create_target_machine(sess: &Session) -> TargetMachineRef { let fdata_sections = ffunction_sections; let code_model_arg = match sess.opts.cg.code_model { - Some(ref s) => s.as_slice(), - None => sess.target.target.options.code_model.as_slice() + Some(ref s) => s[], + None => sess.target.target.options.code_model[] }; let code_model = match code_model_arg { @@ -211,19 +211,19 @@ fn create_target_machine(sess: &Session) -> TargetMachineRef { sess.err(format!("{} is not a valid code model", sess.opts .cg - .code_model).as_slice()); + .code_model)[]); sess.abort_if_errors(); unreachable!(); } }; - let triple = sess.target.target.llvm_target.as_slice(); + let triple = sess.target.target.llvm_target[]; let tm = unsafe { triple.with_c_str(|t| { let cpu = match sess.opts.cg.target_cpu { - Some(ref s) => s.as_slice(), - None => sess.target.target.options.cpu.as_slice() + Some(ref s) => s[], + None => sess.target.target.options.cpu[] }; cpu.with_c_str(|cpu| { target_feature(sess).with_c_str(|features| { @@ -350,13 +350,13 @@ unsafe extern "C" fn inline_asm_handler(diag: SMDiagnosticRef, match cgcx.lto_ctxt { Some((sess, _)) => { sess.codemap().with_expn_info(ExpnId::from_llvm_cookie(cookie), |info| match info { - Some(ei) => sess.span_err(ei.call_site, msg.as_slice()), - None => sess.err(msg.as_slice()), + Some(ei) => sess.span_err(ei.call_site, msg[]), + None => sess.err(msg[]), }); } None => { - cgcx.handler.err(msg.as_slice()); + cgcx.handler.err(msg[]); cgcx.handler.note("build without -C codegen-units for more exact errors"); } } @@ -380,8 +380,8 @@ unsafe extern "C" fn diagnostic_handler(info: DiagnosticInfoRef, user: *mut c_vo cgcx.handler.note(format!("optimization {} for {} at {}: {}", opt.kind.describe(), pass_name, - if loc.is_empty() { "[unknown]" } else { loc.as_slice() }, - llvm::twine_to_string(opt.message)).as_slice()); + if loc.is_empty() { "[unknown]" } else { loc[] }, + llvm::twine_to_string(opt.message))[]); } } @@ -413,7 +413,7 @@ unsafe fn optimize_and_codegen(cgcx: &CodegenContext, if config.emit_no_opt_bc { let ext = format!("{}.no-opt.bc", name_extra); - output_names.with_extension(ext.as_slice()).with_c_str(|buf| { + output_names.with_extension(ext[]).with_c_str(|buf| { llvm::LLVMWriteBitcodeToFile(llmod, buf); }) } @@ -445,7 +445,7 @@ unsafe fn optimize_and_codegen(cgcx: &CodegenContext, pass.with_c_str(|s| { if !llvm::LLVMRustAddPass(mpm, s) { cgcx.handler.warn(format!("unknown pass {}, ignoring", - *pass).as_slice()); + *pass)[]); } }) } @@ -467,7 +467,7 @@ unsafe fn optimize_and_codegen(cgcx: &CodegenContext, if config.emit_lto_bc { let name = format!("{}.lto.bc", name_extra); - output_names.with_extension(name.as_slice()).with_c_str(|buf| { + output_names.with_extension(name[]).with_c_str(|buf| { llvm::LLVMWriteBitcodeToFile(llmod, buf); }) } @@ -501,7 +501,7 @@ unsafe fn optimize_and_codegen(cgcx: &CodegenContext, if config.emit_bc { let ext = format!("{}.bc", name_extra); - output_names.with_extension(ext.as_slice()).with_c_str(|buf| { + output_names.with_extension(ext[]).with_c_str(|buf| { llvm::LLVMWriteBitcodeToFile(llmod, buf); }) } @@ -509,7 +509,7 @@ unsafe fn optimize_and_codegen(cgcx: &CodegenContext, time(config.time_passes, "codegen passes", (), |()| { if config.emit_ir { let ext = format!("{}.ll", name_extra); - output_names.with_extension(ext.as_slice()).with_c_str(|output| { + output_names.with_extension(ext[]).with_c_str(|output| { with_codegen(tm, llmod, config.no_builtins, |cpm| { llvm::LLVMRustPrintModule(cpm, llmod, output); }) @@ -517,14 +517,14 @@ unsafe fn optimize_and_codegen(cgcx: &CodegenContext, } if config.emit_asm { - let path = output_names.with_extension(format!("{}.s", name_extra).as_slice()); + let path = output_names.with_extension(format!("{}.s", name_extra)[]); with_codegen(tm, llmod, config.no_builtins, |cpm| { write_output_file(cgcx.handler, tm, cpm, llmod, &path, llvm::AssemblyFileType); }); } if config.emit_obj { - let path = output_names.with_extension(format!("{}.o", name_extra).as_slice()); + let path = output_names.with_extension(format!("{}.o", name_extra)[]); with_codegen(tm, llmod, config.no_builtins, |cpm| { write_output_file(cgcx.handler, tm, cpm, llmod, &path, llvm::ObjectFileType); }); @@ -605,6 +605,7 @@ pub fn run_passes(sess: &Session, modules_config.emit_obj = true; metadata_config.emit_obj = true; }, + config::OutputTypeDepInfo => {} } } @@ -637,7 +638,7 @@ pub fn run_passes(sess: &Session, // Process the work items, optionally using worker threads. if sess.opts.cg.codegen_units == 1 { - run_work_singlethreaded(sess, trans.reachable.as_slice(), work_items); + run_work_singlethreaded(sess, trans.reachable[], work_items); } else { run_work_multithreaded(sess, work_items, sess.opts.cg.codegen_units); } @@ -665,7 +666,7 @@ pub fn run_passes(sess: &Session, // 2) Multiple codegen units, with `-o some_name`. We have // no good solution for this case, so warn the user. sess.warn(format!("ignoring -o because multiple .{} files were produced", - ext).as_slice()); + ext)[]); } else { // 3) Multiple codegen units, but no `-o some_name`. We // just leave the `foo.0.x` files in place. @@ -698,20 +699,20 @@ pub fn run_passes(sess: &Session, }; let pname = get_cc_prog(sess); - let mut cmd = Command::new(pname.as_slice()); + let mut cmd = Command::new(pname[]); - cmd.args(sess.target.target.options.pre_link_args.as_slice()); + cmd.args(sess.target.target.options.pre_link_args[]); cmd.arg("-nostdlib"); for index in range(0, trans.modules.len()) { - cmd.arg(crate_output.with_extension(format!("{}.o", index).as_slice())); + cmd.arg(crate_output.with_extension(format!("{}.o", index)[])); } cmd.arg("-r") .arg("-o") .arg(windows_output_path.as_ref().unwrap_or(output_path)); - cmd.args(sess.target.target.options.post_link_args.as_slice()); + cmd.args(sess.target.target.options.post_link_args[]); if (sess.opts.debugging_opts & config::PRINT_LINK_ARGS) != 0 { println!("{}", &cmd); @@ -724,14 +725,14 @@ pub fn run_passes(sess: &Session, Ok(status) => { if !status.success() { sess.err(format!("linking of {} with `{}` failed", - output_path.display(), cmd).as_slice()); + output_path.display(), cmd)[]); sess.abort_if_errors(); } }, Err(e) => { sess.err(format!("could not exec the linker `{}`: {}", pname, - e).as_slice()); + e)[]); sess.abort_if_errors(); }, } @@ -777,6 +778,7 @@ pub fn run_passes(sess: &Session, link_obj(&crate_output.temp_path(config::OutputTypeObject)); } } + config::OutputTypeDepInfo => {} } } let user_wants_bitcode = user_wants_bitcode; @@ -815,12 +817,12 @@ pub fn run_passes(sess: &Session, for i in range(0, trans.modules.len()) { if modules_config.emit_obj { let ext = format!("{}.o", i); - remove(sess, &crate_output.with_extension(ext.as_slice())); + remove(sess, &crate_output.with_extension(ext[])); } if modules_config.emit_bc && !keep_numbered_bitcode { let ext = format!("{}.bc", i); - remove(sess, &crate_output.with_extension(ext.as_slice())); + remove(sess, &crate_output.with_extension(ext[])); } } @@ -946,7 +948,7 @@ fn run_work_multithreaded(sess: &Session, pub fn run_assembler(sess: &Session, outputs: &OutputFilenames) { let pname = get_cc_prog(sess); - let mut cmd = Command::new(pname.as_slice()); + let mut cmd = Command::new(pname[]); cmd.arg("-c").arg("-o").arg(outputs.path(config::OutputTypeObject)) .arg(outputs.temp_path(config::OutputTypeAssembly)); @@ -957,18 +959,18 @@ pub fn run_assembler(sess: &Session, outputs: &OutputFilenames) { if !prog.status.success() { sess.err(format!("linking with `{}` failed: {}", pname, - prog.status).as_slice()); - sess.note(format!("{}", &cmd).as_slice()); + prog.status)[]); + sess.note(format!("{}", &cmd)[]); let mut note = prog.error.clone(); - note.push_all(prog.output.as_slice()); - sess.note(str::from_utf8(note.as_slice()).unwrap()); + note.push_all(prog.output[]); + sess.note(str::from_utf8(note[]).unwrap()); sess.abort_if_errors(); } }, Err(e) => { sess.err(format!("could not exec the linker `{}`: {}", pname, - e).as_slice()); + e)[]); sess.abort_if_errors(); } } @@ -1001,7 +1003,7 @@ unsafe fn configure_llvm(sess: &Session) { if sess.print_llvm_passes() { add("-debug-pass=Structure"); } for arg in sess.opts.cg.llvm_args.iter() { - add((*arg).as_slice()); + add((*arg)[]); } } diff --git a/src/librustc_trans/lib.rs b/src/librustc_trans/lib.rs index 05b1a86b72b05..784002287b750 100644 --- a/src/librustc_trans/lib.rs +++ b/src/librustc_trans/lib.rs @@ -22,7 +22,7 @@ html_favicon_url = "http://www.rust-lang.org/favicon.ico", html_root_url = "http://doc.rust-lang.org/nightly/")] -#![feature(default_type_params, globs, import_shadowing, macro_rules, phase, quote)] +#![feature(default_type_params, globs, macro_rules, phase, quote)] #![feature(slicing_syntax, unsafe_destructor)] #![feature(rustc_diagnostic_macros)] #![feature(unboxed_closures)] diff --git a/src/librustc_trans/save/mod.rs b/src/librustc_trans/save/mod.rs index 1a4f06663ef3b..0183aa8c2aabb 100644 --- a/src/librustc_trans/save/mod.rs +++ b/src/librustc_trans/save/mod.rs @@ -94,7 +94,7 @@ impl <'l, 'tcx> DxrVisitor<'l, 'tcx> { // dump info about all the external crates referenced from this crate self.sess.cstore.iter_crate_data(|n, cmd| { - self.fmt.external_crate_str(krate.span, cmd.name.as_slice(), n); + self.fmt.external_crate_str(krate.span, cmd.name[], n); }); self.fmt.recorder.record("end_external_crates\n"); } @@ -143,7 +143,7 @@ impl <'l, 'tcx> DxrVisitor<'l, 'tcx> { for &(ref span, ref qualname) in sub_paths.iter() { self.fmt.sub_mod_ref_str(path.span, *span, - qualname.as_slice(), + qualname[], self.cur_scope); } } @@ -161,7 +161,7 @@ impl <'l, 'tcx> DxrVisitor<'l, 'tcx> { for &(ref span, ref qualname) in sub_paths.iter() { self.fmt.sub_mod_ref_str(path.span, *span, - qualname.as_slice(), + qualname[], self.cur_scope); } } @@ -180,7 +180,7 @@ impl <'l, 'tcx> DxrVisitor<'l, 'tcx> { let (ref span, ref qualname) = sub_paths[len-2]; self.fmt.sub_type_ref_str(path.span, *span, - qualname.as_slice()); + qualname[]); // write the other sub-paths if len <= 2 { @@ -190,7 +190,7 @@ impl <'l, 'tcx> DxrVisitor<'l, 'tcx> { for &(ref span, ref qualname) in sub_paths.iter() { self.fmt.sub_mod_ref_str(path.span, *span, - qualname.as_slice(), + qualname[], self.cur_scope); } } @@ -199,7 +199,7 @@ impl <'l, 'tcx> DxrVisitor<'l, 'tcx> { fn lookup_type_ref(&self, ref_id: NodeId) -> Option { if !self.analysis.ty_cx.def_map.borrow().contains_key(&ref_id) { self.sess.bug(format!("def_map has no key for {} in lookup_type_ref", - ref_id).as_slice()); + ref_id)[]); } let def = (*self.analysis.ty_cx.def_map.borrow())[ref_id]; match def { @@ -212,7 +212,7 @@ impl <'l, 'tcx> DxrVisitor<'l, 'tcx> { let def_map = self.analysis.ty_cx.def_map.borrow(); if !def_map.contains_key(&ref_id) { self.sess.span_bug(span, format!("def_map has no key for {} in lookup_def_kind", - ref_id).as_slice()); + ref_id)[]); } let def = (*def_map)[ref_id]; match def { @@ -241,7 +241,7 @@ impl <'l, 'tcx> DxrVisitor<'l, 'tcx> { def::DefMethod(..) | def::DefPrimTy(_) => { self.sess.span_bug(span, format!("lookup_def_kind for unexpected item: {}", - def).as_slice()); + def)[]); }, } } @@ -262,8 +262,8 @@ impl <'l, 'tcx> DxrVisitor<'l, 'tcx> { span_utils.span_for_last_ident(p.span), id, qualname, - path_to_string(p).as_slice(), - typ.as_slice()); + path_to_string(p)[], + typ[]); } self.collected_paths.clear(); } @@ -285,14 +285,14 @@ impl <'l, 'tcx> DxrVisitor<'l, 'tcx> { match item.node { ast::ItemImpl(_, _, _, ref ty, _) => { let mut result = String::from_str("<"); - result.push_str(ty_to_string(&**ty).as_slice()); + result.push_str(ty_to_string(&**ty)[]); match ty::trait_of_item(&self.analysis.ty_cx, ast_util::local_def(method.id)) { Some(def_id) => { result.push_str(" as "); result.push_str( - ty::item_path_str(&self.analysis.ty_cx, def_id).as_slice()); + ty::item_path_str(&self.analysis.ty_cx, def_id)[]); }, None => {} } @@ -302,7 +302,7 @@ impl <'l, 'tcx> DxrVisitor<'l, 'tcx> { _ => { self.sess.span_bug(method.span, format!("Container {} for method {} not an impl?", - impl_id.node, method.id).as_slice()); + impl_id.node, method.id)[]); }, } }, @@ -312,7 +312,7 @@ impl <'l, 'tcx> DxrVisitor<'l, 'tcx> { impl_id.node, method.id, self.analysis.ty_cx.map.get(impl_id.node) - ).as_slice()); + )[]); }, }, None => match ty::trait_of_item(&self.analysis.ty_cx, @@ -328,20 +328,20 @@ impl <'l, 'tcx> DxrVisitor<'l, 'tcx> { _ => { self.sess.span_bug(method.span, format!("Could not find container {} for method {}", - def_id.node, method.id).as_slice()); + def_id.node, method.id)[]); } } }, None => { self.sess.span_bug(method.span, format!("Could not find container for method {}", - method.id).as_slice()); + method.id)[]); }, }, }; qualname.push_str(get_ident(method.pe_ident()).get()); - let qualname = qualname.as_slice(); + let qualname = qualname[]; // record the decl for this def (if it has one) let decl_id = ty::trait_item_of_item(&self.analysis.ty_cx, @@ -430,13 +430,13 @@ impl <'l, 'tcx> DxrVisitor<'l, 'tcx> { Some(sub_span) => self.fmt.field_str(field.span, Some(sub_span), field.node.id, - name.get().as_slice(), - qualname.as_slice(), - typ.as_slice(), + name.get()[], + qualname[], + typ[], scope_id), None => self.sess.span_bug(field.span, format!("Could not find sub-span for field {}", - qualname).as_slice()), + qualname)[]), } }, _ => (), @@ -463,7 +463,7 @@ impl <'l, 'tcx> DxrVisitor<'l, 'tcx> { self.fmt.typedef_str(full_span, Some(*param_ss), param.id, - name.as_slice(), + name[], ""); } self.visit_generics(generics); @@ -480,10 +480,10 @@ impl <'l, 'tcx> DxrVisitor<'l, 'tcx> { self.fmt.fn_str(item.span, sub_span, item.id, - qualname.as_slice(), + qualname[], self.cur_scope); - self.process_formals(&decl.inputs, qualname.as_slice()); + self.process_formals(&decl.inputs, qualname[]); // walk arg and return types for arg in decl.inputs.iter() { @@ -497,7 +497,7 @@ impl <'l, 'tcx> DxrVisitor<'l, 'tcx> { // walk the body self.nest(item.id, |v| v.visit_block(&*body)); - self.process_generic_params(ty_params, item.span, qualname.as_slice(), item.id); + self.process_generic_params(ty_params, item.span, qualname[], item.id); } fn process_static(&mut self, @@ -519,9 +519,9 @@ impl <'l, 'tcx> DxrVisitor<'l, 'tcx> { sub_span, item.id, get_ident(item.ident).get(), - qualname.as_slice(), - value.as_slice(), - ty_to_string(&*typ).as_slice(), + qualname[], + value[], + ty_to_string(&*typ)[], self.cur_scope); // walk type and init value @@ -542,9 +542,9 @@ impl <'l, 'tcx> DxrVisitor<'l, 'tcx> { sub_span, item.id, get_ident(item.ident).get(), - qualname.as_slice(), + qualname[], "", - ty_to_string(&*typ).as_slice(), + ty_to_string(&*typ)[], self.cur_scope); // walk type and init value @@ -568,17 +568,17 @@ impl <'l, 'tcx> DxrVisitor<'l, 'tcx> { sub_span, item.id, ctor_id, - qualname.as_slice(), + qualname[], self.cur_scope, - val.as_slice()); + val[]); // fields for field in def.fields.iter() { - self.process_struct_field_def(field, qualname.as_slice(), item.id); + self.process_struct_field_def(field, qualname[], item.id); self.visit_ty(&*field.node.ty); } - self.process_generic_params(ty_params, item.span, qualname.as_slice(), item.id); + self.process_generic_params(ty_params, item.span, qualname[], item.id); } fn process_enum(&mut self, @@ -591,12 +591,12 @@ impl <'l, 'tcx> DxrVisitor<'l, 'tcx> { Some(sub_span) => self.fmt.enum_str(item.span, Some(sub_span), item.id, - enum_name.as_slice(), + enum_name[], self.cur_scope, - val.as_slice()), + val[]), None => self.sess.span_bug(item.span, format!("Could not find subspan for enum {}", - enum_name).as_slice()), + enum_name)[]), } for variant in enum_definition.variants.iter() { let name = get_ident(variant.node.name); @@ -612,9 +612,9 @@ impl <'l, 'tcx> DxrVisitor<'l, 'tcx> { self.span.span_for_first_ident(variant.span), variant.node.id, name, - qualname.as_slice(), - enum_name.as_slice(), - val.as_slice(), + qualname[], + enum_name[], + val[], item.id); for arg in args.iter() { self.visit_ty(&*arg.ty); @@ -630,20 +630,20 @@ impl <'l, 'tcx> DxrVisitor<'l, 'tcx> { self.span.span_for_first_ident(variant.span), variant.node.id, ctor_id, - qualname.as_slice(), - enum_name.as_slice(), - val.as_slice(), + qualname[], + enum_name[], + val[], item.id); for field in struct_def.fields.iter() { - self.process_struct_field_def(field, enum_name.as_slice(), variant.node.id); + self.process_struct_field_def(field, enum_name[], variant.node.id); self.visit_ty(&*field.node.ty); } } } } - self.process_generic_params(ty_params, item.span, enum_name.as_slice(), item.id); + self.process_generic_params(ty_params, item.span, enum_name[], item.id); } fn process_impl(&mut self, @@ -703,9 +703,9 @@ impl <'l, 'tcx> DxrVisitor<'l, 'tcx> { self.fmt.trait_str(item.span, sub_span, item.id, - qualname.as_slice(), + qualname[], self.cur_scope, - val.as_slice()); + val[]); // super-traits for super_bound in trait_refs.iter() { @@ -737,7 +737,7 @@ impl <'l, 'tcx> DxrVisitor<'l, 'tcx> { } // walk generics and methods - self.process_generic_params(generics, item.span, qualname.as_slice(), item.id); + self.process_generic_params(generics, item.span, qualname[], item.id); for method in methods.iter() { self.visit_trait_item(method) } @@ -755,9 +755,9 @@ impl <'l, 'tcx> DxrVisitor<'l, 'tcx> { self.fmt.mod_str(item.span, sub_span, item.id, - qualname.as_slice(), + qualname[], self.cur_scope, - filename.as_slice()); + filename[]); self.nest(item.id, |v| visit::walk_mod(v, m)); } @@ -773,7 +773,7 @@ impl <'l, 'tcx> DxrVisitor<'l, 'tcx> { if !def_map.contains_key(&ex.id) { self.sess.span_bug(ex.span, format!("def_map has no key for {} in visit_expr", - ex.id).as_slice()); + ex.id)[]); } let def = &(*def_map)[ex.id]; let sub_span = self.span.span_for_last_ident(ex.span); @@ -840,7 +840,7 @@ impl <'l, 'tcx> DxrVisitor<'l, 'tcx> { self.cur_scope), _ => self.sess.span_bug(ex.span, format!("Unexpected def kind while looking up path in '{}'", - self.span.snippet(ex.span)).as_slice()), + self.span.snippet(ex.span))[]), } // modules or types in the path prefix match *def { @@ -961,7 +961,7 @@ impl <'l, 'tcx> DxrVisitor<'l, 'tcx> { self.cur_scope); // walk receiver and args - visit::walk_exprs(self, args.as_slice()); + visit::walk_exprs(self, args[]); } fn process_pat(&mut self, p:&ast::Pat) { @@ -978,7 +978,7 @@ impl <'l, 'tcx> DxrVisitor<'l, 'tcx> { None => { self.sess.span_bug(p.span, format!("Could not find struct_def for `{}`", - self.span.snippet(p.span)).as_slice()); + self.span.snippet(p.span))[]); } }; for &Spanned { node: ref field, span } in fields.iter() { @@ -1062,11 +1062,11 @@ impl<'l, 'tcx, 'v> Visitor<'v> for DxrVisitor<'l, 'tcx> { self.fmt.typedef_str(item.span, sub_span, item.id, - qualname.as_slice(), - value.as_slice()); + qualname[], + value[]); self.visit_ty(&**ty); - self.process_generic_params(ty_params, item.span, qualname.as_slice(), item.id); + self.process_generic_params(ty_params, item.span, qualname[], item.id); }, ast::ItemMac(_) => (), _ => visit::walk_item(self, item), @@ -1123,12 +1123,12 @@ impl<'l, 'tcx, 'v> Visitor<'v> for DxrVisitor<'l, 'tcx> { None => { self.sess.span_bug(method_type.span, format!("Could not find trait for method {}", - method_type.id).as_slice()); + method_type.id)[]); }, }; qualname.push_str(get_ident(method_type.ident).get()); - let qualname = qualname.as_slice(); + let qualname = qualname[]; let sub_span = self.span.sub_span_after_keyword(method_type.span, keywords::Fn); self.fmt.method_decl_str(method_type.span, @@ -1243,7 +1243,7 @@ impl<'l, 'tcx, 'v> Visitor<'v> for DxrVisitor<'l, 'tcx> { id, cnum, name, - s.as_slice(), + s[], self.cur_scope); }, } @@ -1349,8 +1349,8 @@ impl<'l, 'tcx, 'v> Visitor<'v> for DxrVisitor<'l, 'tcx> { } let mut id = String::from_str("$"); - id.push_str(ex.id.to_string().as_slice()); - self.process_formals(&decl.inputs, id.as_slice()); + id.push_str(ex.id.to_string()[]); + self.process_formals(&decl.inputs, id[]); // walk arg and return types for arg in decl.inputs.iter() { @@ -1393,7 +1393,7 @@ impl<'l, 'tcx, 'v> Visitor<'v> for DxrVisitor<'l, 'tcx> { // process collected paths for &(id, ref p, ref immut, ref_kind) in self.collected_paths.iter() { let value = if *immut { - self.span.snippet(p.span).into_string() + self.span.snippet(p.span).to_string() } else { "".to_string() }; @@ -1402,15 +1402,15 @@ impl<'l, 'tcx, 'v> Visitor<'v> for DxrVisitor<'l, 'tcx> { if !def_map.contains_key(&id) { self.sess.span_bug(p.span, format!("def_map has no key for {} in visit_arm", - id).as_slice()); + id)[]); } let def = &(*def_map)[id]; match *def { def::DefLocal(id) => self.fmt.variable_str(p.span, sub_span, id, - path_to_string(p).as_slice(), - value.as_slice(), + path_to_string(p)[], + value[], ""), def::DefVariant(_,id,_) => self.fmt.ref_str(ref_kind, p.span, @@ -1462,9 +1462,9 @@ impl<'l, 'tcx, 'v> Visitor<'v> for DxrVisitor<'l, 'tcx> { self.fmt.variable_str(p.span, sub_span, id, - path_to_string(p).as_slice(), - value.as_slice(), - typ.as_slice()); + path_to_string(p)[], + value[], + typ[]); } self.collected_paths.clear(); @@ -1482,7 +1482,7 @@ pub fn process_crate(sess: &Session, return; } - let cratename = match attr::find_crate_name(krate.attrs.as_slice()) { + let cratename = match attr::find_crate_name(krate.attrs[]) { Some(name) => name.get().to_string(), None => { info!("Could not find crate name, using 'unknown_crate'"); @@ -1503,7 +1503,7 @@ pub fn process_crate(sess: &Session, match fs::mkdir_recursive(&root_path, io::USER_RWX) { Err(e) => sess.err(format!("Could not create directory {}: {}", - root_path.display(), e).as_slice()), + root_path.display(), e)[]), _ => (), } @@ -1520,7 +1520,7 @@ pub fn process_crate(sess: &Session, Ok(f) => box f, Err(e) => { let disp = root_path.display(); - sess.fatal(format!("Could not open {}: {}", disp, e).as_slice()); + sess.fatal(format!("Could not open {}: {}", disp, e)[]); } }; root_path.pop(); @@ -1546,7 +1546,7 @@ pub fn process_crate(sess: &Session, cur_scope: 0 }; - visitor.dump_crate_info(cratename.as_slice(), krate); + visitor.dump_crate_info(cratename[], krate); visit::walk_crate(&mut visitor, krate); } diff --git a/src/librustc_trans/save/recorder.rs b/src/librustc_trans/save/recorder.rs index 37d9e5d994073..08670864ade93 100644 --- a/src/librustc_trans/save/recorder.rs +++ b/src/librustc_trans/save/recorder.rs @@ -41,7 +41,7 @@ impl Recorder { assert!(self.dump_spans); let result = format!("span,kind,{},{},text,\"{}\"\n", kind, su.extent_str(span), escape(su.snippet(span))); - self.record(result.as_slice()); + self.record(result[]); } } @@ -158,15 +158,15 @@ impl<'a> FmtStrs<'a> { if values.len() != fields.len() { self.span.sess.span_bug(span, format!( "Mismatch between length of fields for '{}', expected '{}', found '{}'", - kind, fields.len(), values.len()).as_slice()); + kind, fields.len(), values.len())[]); } let values = values.iter().map(|s| { // Never take more than 1020 chars if s.len() > 1020 { - s.slice_to(1020) + s[..1020] } else { - s.as_slice() + s[] } }); @@ -182,7 +182,7 @@ impl<'a> FmtStrs<'a> { } ))); Some(strs.fold(String::new(), |mut s, ss| { - s.push_str(ss.as_slice()); + s.push_str(ss[]); s })) } @@ -196,7 +196,7 @@ impl<'a> FmtStrs<'a> { if needs_span { self.span.sess.span_bug(span, format!( "Called record_without_span for '{}' which does requires a span", - label).as_slice()); + label)[]); } assert!(!dump_spans); @@ -210,9 +210,9 @@ impl<'a> FmtStrs<'a> { }; let mut result = String::from_str(label); - result.push_str(values_str.as_slice()); + result.push_str(values_str[]); result.push_str("\n"); - self.recorder.record(result.as_slice()); + self.recorder.record(result[]); } pub fn record_with_span(&mut self, @@ -235,7 +235,7 @@ impl<'a> FmtStrs<'a> { if !needs_span { self.span.sess.span_bug(span, format!("Called record_with_span for '{}' \ - which does not require a span", label).as_slice()); + which does not require a span", label)[]); } let values_str = match self.make_values_str(label, fields, values, span) { @@ -243,7 +243,7 @@ impl<'a> FmtStrs<'a> { None => return, }; let result = format!("{},{}{}\n", label, self.span.extent_str(sub_span), values_str); - self.recorder.record(result.as_slice()); + self.recorder.record(result[]); } pub fn check_and_record(&mut self, @@ -273,7 +273,7 @@ impl<'a> FmtStrs<'a> { // variable def's node id let mut qualname = String::from_str(name); qualname.push_str("$"); - qualname.push_str(id.to_string().as_slice()); + qualname.push_str(id.to_string()[]); self.check_and_record(Variable, span, sub_span, diff --git a/src/librustc_trans/save/span_utils.rs b/src/librustc_trans/save/span_utils.rs index 49e8e0fd34714..a92d3c06e64fb 100644 --- a/src/librustc_trans/save/span_utils.rs +++ b/src/librustc_trans/save/span_utils.rs @@ -218,7 +218,7 @@ impl<'a> SpanUtils<'a> { let loc = self.sess.codemap().lookup_char_pos(span.lo); self.sess.span_bug(span, format!("Mis-counted brackets when breaking path? Parsing '{}' in {}, line {}", - self.snippet(span), loc.file.name, loc.line).as_slice()); + self.snippet(span), loc.file.name, loc.line)[]); } if result.is_none() && prev.tok.is_ident() && bracket_count == 0 { return self.make_sub_span(span, Some(prev.sp)); @@ -244,7 +244,7 @@ impl<'a> SpanUtils<'a> { let loc = self.sess.codemap().lookup_char_pos(span.lo); self.sess.span_bug(span, format!( "Mis-counted brackets when breaking path? Parsing '{}' in {}, line {}", - self.snippet(span), loc.file.name, loc.line).as_slice()); + self.snippet(span), loc.file.name, loc.line)[]); } return result } diff --git a/src/librustc_trans/trans/_match.rs b/src/librustc_trans/trans/_match.rs index 1401f1ad1f551..33fd14a441b8a 100644 --- a/src/librustc_trans/trans/_match.rs +++ b/src/librustc_trans/trans/_match.rs @@ -193,12 +193,11 @@ use llvm::{ValueRef, BasicBlockRef}; use middle::check_match::StaticInliner; use middle::check_match; use middle::const_eval; -use middle::def; +use middle::def::{mod, DefMap}; use middle::expr_use_visitor as euv; use middle::lang_items::StrEqFnLangItem; use middle::mem_categorization as mc; use middle::pat_util::*; -use middle::resolve::DefMap; use trans::adt; use trans::base::*; use trans::build::{AddCase, And, BitCast, Br, CondBr, GEPi, InBoundsGEP, Load}; @@ -428,7 +427,7 @@ fn enter_match<'a, 'b, 'p, 'blk, 'tcx>(bcx: Block<'blk, 'tcx>, let _indenter = indenter(); m.iter().filter_map(|br| { - e(br.pats.as_slice()).map(|pats| { + e(br.pats[]).map(|pats| { let this = br.pats[col]; let mut bound_ptrs = br.bound_ptrs.clone(); match this.node { @@ -549,7 +548,7 @@ fn enter_opt<'a, 'p, 'blk, 'tcx>( param_env: param_env, }; enter_match(bcx, dm, m, col, val, |pats| - check_match::specialize(&mcx, pats.as_slice(), &ctor, col, variant_size) + check_match::specialize(&mcx, pats[], &ctor, col, variant_size) ) } @@ -791,7 +790,7 @@ fn compare_values<'blk, 'tcx>(cx: Block<'blk, 'tcx>, let did = langcall(cx, None, format!("comparison of `{}`", - cx.ty_to_string(rhs_t)).as_slice(), + cx.ty_to_string(rhs_t))[], StrEqFnLangItem); callee::trans_lang_call(cx, did, &[lhs, rhs], None) } @@ -944,7 +943,7 @@ fn compile_submatch<'a, 'p, 'blk, 'tcx>(bcx: Block<'blk, 'tcx>, if has_nested_bindings(m, col) { let expanded = expand_nested_bindings(bcx, m, col, val); compile_submatch_continue(bcx, - expanded.as_slice(), + expanded[], vals, chk, col, @@ -1036,8 +1035,8 @@ fn compile_submatch_continue<'a, 'p, 'blk, 'tcx>(mut bcx: Block<'blk, 'tcx>, field_vals.len()) ); let mut vals = field_vals; - vals.push_all(vals_left.as_slice()); - compile_submatch(bcx, pats.as_slice(), vals.as_slice(), chk, has_genuine_default); + vals.push_all(vals_left[]); + compile_submatch(bcx, pats[], vals[], chk, has_genuine_default); return; } _ => () @@ -1190,10 +1189,10 @@ fn compile_submatch_continue<'a, 'p, 'blk, 'tcx>(mut bcx: Block<'blk, 'tcx>, } let opt_ms = enter_opt(opt_cx, pat_id, dm, m, opt, col, size, val); let mut opt_vals = unpacked; - opt_vals.push_all(vals_left.as_slice()); + opt_vals.push_all(vals_left[]); compile_submatch(opt_cx, - opt_ms.as_slice(), - opt_vals.as_slice(), + opt_ms[], + opt_vals[], branch_chk.as_ref().unwrap_or(chk), has_genuine_default); } @@ -1212,8 +1211,8 @@ fn compile_submatch_continue<'a, 'p, 'blk, 'tcx>(mut bcx: Block<'blk, 'tcx>, } _ => { compile_submatch(else_cx, - defaults.as_slice(), - vals_left.as_slice(), + defaults[], + vals_left[], chk, has_genuine_default); } @@ -1334,7 +1333,7 @@ fn create_bindings_map<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, pat: &ast::Pat, "__llmatch"); trmode = TrByCopy(alloca_no_lifetime(bcx, llvariable_ty, - bcx.ident(ident).as_slice())); + bcx.ident(ident)[])); } ast::BindByValue(_) => { // in this case, the final type of the variable will be T, @@ -1342,13 +1341,13 @@ fn create_bindings_map<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, pat: &ast::Pat, // above llmatch = alloca_no_lifetime(bcx, llvariable_ty.ptr_to(), - bcx.ident(ident).as_slice()); + bcx.ident(ident)[]); trmode = TrByMove; } ast::BindByRef(_) => { llmatch = alloca_no_lifetime(bcx, llvariable_ty, - bcx.ident(ident).as_slice()); + bcx.ident(ident)[]); trmode = TrByRef; } }; @@ -1416,7 +1415,7 @@ fn trans_match_inner<'blk, 'tcx>(scope_cx: Block<'blk, 'tcx>, && arm.pats.last().unwrap().node == ast::PatWild(ast::PatWildSingle) }); - compile_submatch(bcx, matches.as_slice(), &[discr_datum.val], &chk, has_default); + compile_submatch(bcx, matches[], &[discr_datum.val], &chk, has_default); let mut arm_cxs = Vec::new(); for arm_data in arm_datas.iter() { @@ -1430,7 +1429,7 @@ fn trans_match_inner<'blk, 'tcx>(scope_cx: Block<'blk, 'tcx>, arm_cxs.push(bcx); } - bcx = scope_cx.fcx.join_blocks(match_id, arm_cxs.as_slice()); + bcx = scope_cx.fcx.join_blocks(match_id, arm_cxs[]); return bcx; } @@ -1582,7 +1581,7 @@ fn mk_binding_alloca<'blk, 'tcx, A, F>(bcx: Block<'blk, 'tcx>, let var_ty = node_id_type(bcx, p_id); // Allocate memory on stack for the binding. - let llval = alloc_ty(bcx, var_ty, bcx.ident(*ident).as_slice()); + let llval = alloc_ty(bcx, var_ty, bcx.ident(*ident)[]); // Subtle: be sure that we *populate* the memory *before* // we schedule the cleanup. @@ -1620,7 +1619,7 @@ fn bind_irrefutable_pat<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, if bcx.sess().asm_comments() { add_comment(bcx, format!("bind_irrefutable_pat(pat={})", - pat.repr(bcx.tcx())).as_slice()); + pat.repr(bcx.tcx()))[]); } let _indenter = indenter(); diff --git a/src/librustc_trans/trans/adt.rs b/src/librustc_trans/trans/adt.rs index f7edb281b9eda..9794611dd8471 100644 --- a/src/librustc_trans/trans/adt.rs +++ b/src/librustc_trans/trans/adt.rs @@ -156,7 +156,7 @@ fn represent_type_uncached<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>, t: Ty<'tcx>) -> Repr<'tcx> { match t.sty { ty::ty_tup(ref elems) => { - Univariant(mk_struct(cx, elems.as_slice(), false, t), false) + Univariant(mk_struct(cx, elems[], false, t), false) } ty::ty_struct(def_id, ref substs) => { let fields = ty::lookup_struct_fields(cx.tcx(), def_id); @@ -167,16 +167,16 @@ fn represent_type_uncached<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>, let dtor = ty::ty_dtor(cx.tcx(), def_id).has_drop_flag(); if dtor { ftys.push(ty::mk_bool()); } - Univariant(mk_struct(cx, ftys.as_slice(), packed, t), dtor) + Univariant(mk_struct(cx, ftys[], packed, t), dtor) } ty::ty_unboxed_closure(def_id, _, ref substs) => { let upvars = ty::unboxed_closure_upvars(cx.tcx(), def_id, substs); let upvar_types = upvars.iter().map(|u| u.ty).collect::>(); - Univariant(mk_struct(cx, upvar_types.as_slice(), false, t), false) + Univariant(mk_struct(cx, upvar_types[], false, t), false) } ty::ty_enum(def_id, ref substs) => { let cases = get_cases(cx.tcx(), def_id, substs); - let hint = *ty::lookup_repr_hints(cx.tcx(), def_id).as_slice().get(0) + let hint = *ty::lookup_repr_hints(cx.tcx(), def_id)[].get(0) .unwrap_or(&attr::ReprAny); let dtor = ty::ty_dtor(cx.tcx(), def_id).has_drop_flag(); @@ -186,7 +186,7 @@ fn represent_type_uncached<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>, // (Typechecking will reject discriminant-sizing attrs.) assert_eq!(hint, attr::ReprAny); let ftys = if dtor { vec!(ty::mk_bool()) } else { vec!() }; - return Univariant(mk_struct(cx, ftys.as_slice(), false, t), + return Univariant(mk_struct(cx, ftys[], false, t), dtor); } @@ -209,7 +209,7 @@ fn represent_type_uncached<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>, cx.sess().bug(format!("non-C-like enum {} with specified \ discriminants", ty::item_path_str(cx.tcx(), - def_id)).as_slice()); + def_id))[]); } if cases.len() == 1 { @@ -218,7 +218,7 @@ fn represent_type_uncached<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>, assert_eq!(hint, attr::ReprAny); let mut ftys = cases[0].tys.clone(); if dtor { ftys.push(ty::mk_bool()); } - return Univariant(mk_struct(cx, ftys.as_slice(), false, t), + return Univariant(mk_struct(cx, ftys[], false, t), dtor); } @@ -227,7 +227,7 @@ fn represent_type_uncached<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>, let mut discr = 0; while discr < 2 { if cases[1 - discr].is_zerolen(cx, t) { - let st = mk_struct(cx, cases[discr].tys.as_slice(), + let st = mk_struct(cx, cases[discr].tys[], false, t); match cases[discr].find_ptr(cx) { Some(ThinPointer(_)) if st.fields.len() == 1 => { @@ -260,17 +260,17 @@ fn represent_type_uncached<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>, let fields : Vec<_> = cases.iter().map(|c| { let mut ftys = vec!(ty_of_inttype(ity)); - ftys.push_all(c.tys.as_slice()); + ftys.push_all(c.tys[]); if dtor { ftys.push(ty::mk_bool()); } - mk_struct(cx, ftys.as_slice(), false, t) + mk_struct(cx, ftys[], false, t) }).collect(); - ensure_enum_fits_in_address_space(cx, ity, fields.as_slice(), t); + ensure_enum_fits_in_address_space(cx, ity, fields[], t); General(ity, fields, dtor) } _ => cx.sess().bug(format!("adt::represent_type called on non-ADT type: {}", - ty_to_string(cx.tcx(), t)).as_slice()) + ty_to_string(cx.tcx(), t))[]) } } @@ -290,7 +290,7 @@ pub enum PointerField { impl<'tcx> Case<'tcx> { fn is_zerolen<'a>(&self, cx: &CrateContext<'a, 'tcx>, scapegoat: Ty<'tcx>) -> bool { - mk_struct(cx, self.tys.as_slice(), false, scapegoat).size == 0 + mk_struct(cx, self.tys[], false, scapegoat).size == 0 } fn find_ptr<'a>(&self, cx: &CrateContext<'a, 'tcx>) -> Option { @@ -352,9 +352,9 @@ fn mk_struct<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>, .map(|&ty| type_of::sizing_type_of(cx, ty)).collect() }; - ensure_struct_fits_in_address_space(cx, lltys.as_slice(), packed, scapegoat); + ensure_struct_fits_in_address_space(cx, lltys[], packed, scapegoat); - let llty_rec = Type::struct_(cx, lltys.as_slice(), packed); + let llty_rec = Type::struct_(cx, lltys[], packed); Struct { size: machine::llsize_of_alloc(cx, llty_rec), align: machine::llalign_of_min(cx, llty_rec), @@ -403,7 +403,7 @@ fn range_to_inttype(cx: &CrateContext, hint: Hint, bounds: &IntBounds) -> IntTyp return ity; } attr::ReprExtern => { - attempts = match cx.sess().target.target.arch.as_slice() { + attempts = match cx.sess().target.target.arch[] { // WARNING: the ARM EABI has two variants; the one corresponding to `at_least_32` // appears to be used on Linux and NetBSD, but some systems may use the variant // corresponding to `choose_shortest`. However, we don't run on those yet...? @@ -530,7 +530,7 @@ pub fn finish_type_of<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>, match *r { CEnum(..) | General(..) | RawNullablePointer { .. } => { } Univariant(ref st, _) | StructWrappedNullablePointer { nonnull: ref st, .. } => - llty.set_struct_body(struct_llfields(cx, st, false, false).as_slice(), + llty.set_struct_body(struct_llfields(cx, st, false, false)[], st.packed) } } @@ -546,7 +546,7 @@ fn generic_type_of<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>, Univariant(ref st, _) | StructWrappedNullablePointer { nonnull: ref st, .. } => { match name { None => { - Type::struct_(cx, struct_llfields(cx, st, sizing, dst).as_slice(), + Type::struct_(cx, struct_llfields(cx, st, sizing, dst)[], st.packed) } Some(name) => { assert_eq!(sizing, false); Type::named_struct(cx, name) } @@ -565,7 +565,7 @@ fn generic_type_of<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>, // of the size. // // FIXME #10604: this breaks when vector types are present. - let (size, align) = union_size_and_align(sts.as_slice()); + let (size, align) = union_size_and_align(sts[]); let align_s = align as u64; let discr_ty = ll_inttype(cx, ity); let discr_size = machine::llsize_of_alloc(cx, discr_ty); @@ -586,10 +586,10 @@ fn generic_type_of<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>, Type::array(&discr_ty, align_s / discr_size - 1), pad_ty); match name { - None => Type::struct_(cx, fields.as_slice(), false), + None => Type::struct_(cx, fields[], false), Some(name) => { let mut llty = Type::named_struct(cx, name); - llty.set_struct_body(fields.as_slice(), false); + llty.set_struct_body(fields[], false); llty } } @@ -847,7 +847,7 @@ pub fn struct_field_ptr<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, st: &Struct<'tcx>, v let val = if needs_cast { let ccx = bcx.ccx(); let fields = st.fields.iter().map(|&ty| type_of::type_of(ccx, ty)).collect::>(); - let real_ty = Type::struct_(ccx, fields.as_slice(), st.packed); + let real_ty = Type::struct_(ccx, fields[], st.packed); PointerCast(bcx, val, real_ty.ptr_to()) } else { val @@ -879,14 +879,14 @@ pub fn fold_variants<'blk, 'tcx, F>(bcx: Block<'blk, 'tcx>, for (discr, case) in cases.iter().enumerate() { let mut variant_cx = fcx.new_temp_block( - format!("enum-variant-iter-{}", discr.to_string()).as_slice() + format!("enum-variant-iter-{}", discr.to_string())[] ); let rhs_val = C_integral(ll_inttype(ccx, ity), discr as u64, true); AddCase(llswitch, rhs_val, variant_cx.llbb); let fields = case.fields.iter().map(|&ty| type_of::type_of(bcx.ccx(), ty)).collect::>(); - let real_ty = Type::struct_(ccx, fields.as_slice(), case.packed); + let real_ty = Type::struct_(ccx, fields[], case.packed); let variant_value = PointerCast(variant_cx, value, real_ty.ptr_to()); variant_cx = f(variant_cx, case, variant_value); @@ -961,14 +961,14 @@ pub fn trans_const<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>, r: &Repr<'tcx>, discr let lldiscr = C_integral(ll_inttype(ccx, ity), discr as u64, true); let mut f = vec![lldiscr]; f.push_all(vals); - let mut contents = build_const_struct(ccx, case, f.as_slice()); + let mut contents = build_const_struct(ccx, case, f[]); contents.push_all(&[padding(ccx, max_sz - case.size)]); - C_struct(ccx, contents.as_slice(), false) + C_struct(ccx, contents[], false) } Univariant(ref st, _dro) => { assert!(discr == 0); let contents = build_const_struct(ccx, st, vals); - C_struct(ccx, contents.as_slice(), st.packed) + C_struct(ccx, contents[], st.packed) } RawNullablePointer { nndiscr, nnty, .. } => { if discr == nndiscr { @@ -982,7 +982,7 @@ pub fn trans_const<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>, r: &Repr<'tcx>, discr if discr == nndiscr { C_struct(ccx, build_const_struct(ccx, nonnull, - vals).as_slice(), + vals)[], false) } else { let vals = nonnull.fields.iter().map(|&ty| { @@ -992,7 +992,7 @@ pub fn trans_const<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>, r: &Repr<'tcx>, discr }).collect::>(); C_struct(ccx, build_const_struct(ccx, nonnull, - vals.as_slice()).as_slice(), + vals[])[], false) } } diff --git a/src/librustc_trans/trans/asm.rs b/src/librustc_trans/trans/asm.rs index e3afe22897e39..b8bee1000824d 100644 --- a/src/librustc_trans/trans/asm.rs +++ b/src/librustc_trans/trans/asm.rs @@ -72,7 +72,7 @@ pub fn trans_inline_asm<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, ia: &ast::InlineAsm) callee::DontAutorefArg) }) }).collect::>(); - inputs.push_all(ext_inputs.as_slice()); + inputs.push_all(ext_inputs[]); // no failure occurred preparing operands, no need to cleanup fcx.pop_custom_cleanup_scope(temp_scope); @@ -92,18 +92,18 @@ pub fn trans_inline_asm<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, ia: &ast::InlineAsm) if !clobbers.is_empty() { clobbers.push(','); } - clobbers.push_str(more_clobbers.as_slice()); + clobbers.push_str(more_clobbers[]); } // Add the clobbers to our constraints list if clobbers.len() != 0 && constraints.len() != 0 { constraints.push(','); - constraints.push_str(clobbers.as_slice()); + constraints.push_str(clobbers[]); } else { - constraints.push_str(clobbers.as_slice()); + constraints.push_str(clobbers[]); } - debug!("Asm Constraints: {}", constraints.as_slice()); + debug!("Asm Constraints: {}", constraints[]); let num_outputs = outputs.len(); @@ -113,7 +113,7 @@ pub fn trans_inline_asm<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, ia: &ast::InlineAsm) } else if num_outputs == 1 { output_types[0] } else { - Type::struct_(bcx.ccx(), output_types.as_slice(), false) + Type::struct_(bcx.ccx(), output_types[], false) }; let dialect = match ia.dialect { @@ -126,7 +126,7 @@ pub fn trans_inline_asm<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, ia: &ast::InlineAsm) InlineAsmCall(bcx, a, c, - inputs.as_slice(), + inputs[], output_type, ia.volatile, ia.alignstack, diff --git a/src/librustc_trans/trans/base.rs b/src/librustc_trans/trans/base.rs index 25fbaa6677684..f49fc7f06c501 100644 --- a/src/librustc_trans/trans/base.rs +++ b/src/librustc_trans/trans/base.rs @@ -38,6 +38,7 @@ use llvm::{BasicBlockRef, Linkage, ValueRef, Vector, get_param}; use llvm; use metadata::{csearch, encoder, loader}; use middle::astencode; +use middle::cfg; use middle::lang_items::{LangItem, ExchangeMallocFnLangItem, StartFnLangItem}; use middle::subst; use middle::weak_lang_items; @@ -248,7 +249,7 @@ fn get_extern_rust_fn<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>, fn_ty: Ty<'tcx>, let f = decl_rust_fn(ccx, fn_ty, name); csearch::get_item_attrs(&ccx.sess().cstore, did, |attrs| { - set_llvm_fn_attrs(ccx, attrs.as_slice(), f) + set_llvm_fn_attrs(ccx, attrs[], f) }); ccx.externs().borrow_mut().insert(name.to_string(), f); @@ -281,7 +282,7 @@ pub fn kind_for_unboxed_closure(ccx: &CrateContext, closure_id: ast::DefId) pub fn decl_rust_fn<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>, fn_ty: Ty<'tcx>, name: &str) -> ValueRef { let (inputs, output, abi, env) = match fn_ty.sty { - ty::ty_bare_fn(ref f) => { + ty::ty_bare_fn(_, ref f) => { (f.sig.0.inputs.clone(), f.sig.0.output, f.abi, None) } ty::ty_closure(ref f) => { @@ -301,7 +302,7 @@ pub fn decl_rust_fn<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>, _ => panic!("expected closure or fn") }; - let llfty = type_of_rust_fn(ccx, env, inputs.as_slice(), output, abi); + let llfty = type_of_rust_fn(ccx, env, inputs[], output, abi); debug!("decl_rust_fn(input count={},type={})", inputs.len(), ccx.tn().type_to_string(llfty)); @@ -368,7 +369,7 @@ fn require_alloc_fn<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, Err(s) => { bcx.sess().fatal(format!("allocation of `{}` {}", bcx.ty_to_string(info_ty), - s).as_slice()); + s)[]); } } } @@ -509,7 +510,7 @@ pub fn unset_split_stack(f: ValueRef) { // silently mangles such symbols, breaking our linkage model. pub fn note_unique_llvm_symbol(ccx: &CrateContext, sym: String) { if ccx.all_llvm_symbols().borrow().contains(&sym) { - ccx.sess().bug(format!("duplicate LLVM symbol: {}", sym).as_slice()); + ccx.sess().bug(format!("duplicate LLVM symbol: {}", sym)[]); } ccx.all_llvm_symbols().borrow_mut().insert(sym); } @@ -541,11 +542,12 @@ pub fn get_res_dtor<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>, let class_ty = ty::lookup_item_type(tcx, parent_id).ty.subst(tcx, substs); let llty = type_of_dtor(ccx, class_ty); let dtor_ty = ty::mk_ctor_fn(ccx.tcx(), + did, &[glue::get_drop_glue_type(ccx, t)], ty::mk_nil(ccx.tcx())); get_extern_fn(ccx, &mut *ccx.externs().borrow_mut(), - name.as_slice(), + name[], llvm::CCallConv, llty, dtor_ty) @@ -795,8 +797,8 @@ pub fn iter_structural_ty<'a, 'blk, 'tcx>(cx: Block<'blk, 'tcx>, let variant_cx = fcx.new_temp_block( format!("enum-iter-variant-{}", - variant.disr_val.to_string().as_slice()) - .as_slice()); + variant.disr_val.to_string()[]) + []); match adt::trans_case(cx, &*repr, variant.disr_val) { _match::SingleResult(r) => { AddCase(llswitch, r.val, variant_cx.llbb) @@ -821,7 +823,7 @@ pub fn iter_structural_ty<'a, 'blk, 'tcx>(cx: Block<'blk, 'tcx>, } _ => { cx.sess().unimpl(format!("type in iter_structural_ty: {}", - ty_to_string(cx.tcx(), t)).as_slice()) + ty_to_string(cx.tcx(), t))[]) } } return cx; @@ -903,7 +905,7 @@ pub fn fail_if_zero_or_overflows<'blk, 'tcx>( } _ => { cx.sess().bug(format!("fail-if-zero on unexpected type: {}", - ty_to_string(cx.tcx(), rhs_t)).as_slice()); + ty_to_string(cx.tcx(), rhs_t))[]); } }; let bcx = with_cond(cx, is_zero, |bcx| { @@ -954,22 +956,22 @@ pub fn trans_external_path<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>, did: ast::DefId, t: Ty<'tcx>) -> ValueRef { let name = csearch::get_symbol(&ccx.sess().cstore, did); match t.sty { - ty::ty_bare_fn(ref fn_ty) => { + ty::ty_bare_fn(_, ref fn_ty) => { match ccx.sess().target.target.adjust_abi(fn_ty.abi) { Rust | RustCall => { - get_extern_rust_fn(ccx, t, name.as_slice(), did) + get_extern_rust_fn(ccx, t, name[], did) } RustIntrinsic => { ccx.sess().bug("unexpected intrinsic in trans_external_path") } _ => { foreign::register_foreign_item_fn(ccx, fn_ty.abi, t, - name.as_slice()) + name[]) } } } ty::ty_closure(_) => { - get_extern_rust_fn(ccx, t, name.as_slice(), did) + get_extern_rust_fn(ccx, t, name[], did) } _ => { get_extern_const(ccx, did, t) @@ -1023,7 +1025,7 @@ pub fn invoke<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, let llresult = Invoke(bcx, llfn, - llargs.as_slice(), + llargs[], normal_bcx.llbb, landing_pad, Some(attributes)); @@ -1039,7 +1041,7 @@ pub fn invoke<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, None => debuginfo::clear_source_location(bcx.fcx) }; - let llresult = Call(bcx, llfn, llargs.as_slice(), Some(attributes)); + let llresult = Call(bcx, llfn, llargs[], Some(attributes)); return (llresult, bcx); } } @@ -1156,7 +1158,7 @@ pub fn call_lifetime_end(cx: Block, ptr: ValueRef) { pub fn call_memcpy(cx: Block, dst: ValueRef, src: ValueRef, n_bytes: ValueRef, align: u32) { let _icx = push_ctxt("call_memcpy"); let ccx = cx.ccx(); - let key = match ccx.sess().target.target.target_word_size.as_slice() { + let key = match ccx.sess().target.target.target_word_size[] { "32" => "llvm.memcpy.p0i8.p0i8.i32", "64" => "llvm.memcpy.p0i8.p0i8.i64", tws => panic!("Unsupported target word size for memcpy: {}", tws), @@ -1203,7 +1205,7 @@ fn memzero<'a, 'tcx>(b: &Builder<'a, 'tcx>, llptr: ValueRef, ty: Ty<'tcx>) { let llty = type_of::type_of(ccx, ty); - let intrinsic_key = match ccx.sess().target.target.target_word_size.as_slice() { + let intrinsic_key = match ccx.sess().target.target.target_word_size[] { "32" => "llvm.memset.p0i8.i32", "64" => "llvm.memset.p0i8.i64", tws => panic!("Unsupported target word size for memset: {}", tws), @@ -1306,47 +1308,33 @@ pub fn make_return_slot_pointer<'a, 'tcx>(fcx: &FunctionContext<'a, 'tcx>, } } -struct CheckForNestedReturnsVisitor { +struct FindNestedReturn { found: bool, - in_return: bool } -impl CheckForNestedReturnsVisitor { - fn explicit() -> CheckForNestedReturnsVisitor { - CheckForNestedReturnsVisitor { found: false, in_return: false } - } - fn implicit() -> CheckForNestedReturnsVisitor { - CheckForNestedReturnsVisitor { found: false, in_return: true } +impl FindNestedReturn { + fn new() -> FindNestedReturn { + FindNestedReturn { found: false } } } -impl<'v> Visitor<'v> for CheckForNestedReturnsVisitor { +impl<'v> Visitor<'v> for FindNestedReturn { fn visit_expr(&mut self, e: &ast::Expr) { match e.node { ast::ExprRet(..) => { - if self.in_return { - self.found = true; - } else { - self.in_return = true; - visit::walk_expr(self, e); - self.in_return = false; - } + self.found = true; } _ => visit::walk_expr(self, e) } } } -fn has_nested_returns(tcx: &ty::ctxt, id: ast::NodeId) -> bool { - match tcx.map.find(id) { +fn build_cfg(tcx: &ty::ctxt, id: ast::NodeId) -> (ast::NodeId, Option) { + let blk = match tcx.map.find(id) { Some(ast_map::NodeItem(i)) => { match i.node { ast::ItemFn(_, _, _, _, ref blk) => { - let mut explicit = CheckForNestedReturnsVisitor::explicit(); - let mut implicit = CheckForNestedReturnsVisitor::implicit(); - visit::walk_item(&mut explicit, &*i); - visit::walk_expr_opt(&mut implicit, &blk.expr); - explicit.found || implicit.found + blk } _ => tcx.sess.bug("unexpected item variant in has_nested_returns") } @@ -1356,11 +1344,7 @@ fn has_nested_returns(tcx: &ty::ctxt, id: ast::NodeId) -> bool { ast::ProvidedMethod(ref m) => { match m.node { ast::MethDecl(_, _, _, _, _, _, ref blk, _) => { - let mut explicit = CheckForNestedReturnsVisitor::explicit(); - let mut implicit = CheckForNestedReturnsVisitor::implicit(); - visit::walk_method_helper(&mut explicit, &**m); - visit::walk_expr_opt(&mut implicit, &blk.expr); - explicit.found || implicit.found + blk } ast::MethMac(_) => tcx.sess.bug("unexpanded macro") } @@ -1380,11 +1364,7 @@ fn has_nested_returns(tcx: &ty::ctxt, id: ast::NodeId) -> bool { ast::MethodImplItem(ref m) => { match m.node { ast::MethDecl(_, _, _, _, _, _, ref blk, _) => { - let mut explicit = CheckForNestedReturnsVisitor::explicit(); - let mut implicit = CheckForNestedReturnsVisitor::implicit(); - visit::walk_method_helper(&mut explicit, &**m); - visit::walk_expr_opt(&mut implicit, &blk.expr); - explicit.found || implicit.found + blk } ast::MethMac(_) => tcx.sess.bug("unexpanded macro") } @@ -1398,24 +1378,58 @@ fn has_nested_returns(tcx: &ty::ctxt, id: ast::NodeId) -> bool { Some(ast_map::NodeExpr(e)) => { match e.node { ast::ExprClosure(_, _, _, ref blk) => { - let mut explicit = CheckForNestedReturnsVisitor::explicit(); - let mut implicit = CheckForNestedReturnsVisitor::implicit(); - visit::walk_expr(&mut explicit, e); - visit::walk_expr_opt(&mut implicit, &blk.expr); - explicit.found || implicit.found + blk } _ => tcx.sess.bug("unexpected expr variant in has_nested_returns") } } - - Some(ast_map::NodeVariant(..)) | Some(ast_map::NodeStructCtor(..)) => false, + Some(ast_map::NodeVariant(..)) | + Some(ast_map::NodeStructCtor(..)) => return (ast::DUMMY_NODE_ID, None), // glue, shims, etc - None if id == ast::DUMMY_NODE_ID => false, + None if id == ast::DUMMY_NODE_ID => return (ast::DUMMY_NODE_ID, None), _ => tcx.sess.bug(format!("unexpected variant in has_nested_returns: {}", tcx.map.path_to_string(id)).as_slice()) + }; + + (blk.id, Some(cfg::CFG::new(tcx, &**blk))) +} + +// Checks for the presence of "nested returns" in a function. +// Nested returns are when the inner expression of a return expression +// (the 'expr' in 'return expr') contains a return expression. Only cases +// where the outer return is actually reachable are considered. Implicit +// returns from the end of blocks are considered as well. +// +// This check is needed to handle the case where the inner expression is +// part of a larger expression that may have already partially-filled the +// return slot alloca. This can cause errors related to clean-up due to +// the clobbering of the existing value in the return slot. +fn has_nested_returns(tcx: &ty::ctxt, cfg: &cfg::CFG, blk_id: ast::NodeId) -> bool { + for n in cfg.graph.depth_traverse(cfg.entry) { + match tcx.map.find(n.id) { + Some(ast_map::NodeExpr(ex)) => { + if let ast::ExprRet(Some(ref ret_expr)) = ex.node { + let mut visitor = FindNestedReturn::new(); + visit::walk_expr(&mut visitor, &**ret_expr); + if visitor.found { + return true; + } + } + } + Some(ast_map::NodeBlock(blk)) if blk.id == blk_id => { + let mut visitor = FindNestedReturn::new(); + visit::walk_expr_opt(&mut visitor, &blk.expr); + if visitor.found { + return true; + } + } + _ => {} + } } + + return false; } // NB: must keep 4 fns in sync: @@ -1454,7 +1468,12 @@ pub fn new_fn_ctxt<'a, 'tcx>(ccx: &'a CrateContext<'a, 'tcx>, ty::FnDiverging => false }; let debug_context = debuginfo::create_function_debug_context(ccx, id, param_substs, llfndecl); - let nested_returns = has_nested_returns(ccx.tcx(), id); + let (blk_id, cfg) = build_cfg(ccx.tcx(), id); + let nested_returns = if let Some(ref cfg) = cfg { + has_nested_returns(ccx.tcx(), cfg, blk_id) + } else { + false + }; let mut fcx = FunctionContext { llfn: llfndecl, @@ -1473,7 +1492,8 @@ pub fn new_fn_ctxt<'a, 'tcx>(ccx: &'a CrateContext<'a, 'tcx>, block_arena: block_arena, ccx: ccx, debug_context: debug_context, - scopes: RefCell::new(Vec::new()) + scopes: RefCell::new(Vec::new()), + cfg: cfg }; if has_env { @@ -1672,7 +1692,7 @@ fn copy_unboxed_closure_args_to_allocas<'blk, 'tcx>( "argtuple", arg_scope_id)); let untupled_arg_types = match monomorphized_arg_types[0].sty { - ty::ty_tup(ref types) => types.as_slice(), + ty::ty_tup(ref types) => types[], _ => { bcx.tcx().sess.span_bug(args[0].pat.span, "first arg to `rust-call` ABI function \ @@ -1860,12 +1880,12 @@ pub fn trans_closure<'a, 'b, 'tcx>(ccx: &CrateContext<'a, 'tcx>, let arg_datums = if abi != RustCall { create_datums_for_fn_args(&fcx, - monomorphized_arg_types.as_slice()) + monomorphized_arg_types[]) } else { create_datums_for_fn_args_under_call_abi( bcx, arg_scope, - monomorphized_arg_types.as_slice()) + monomorphized_arg_types[]) }; bcx = match closure_env.kind { @@ -1873,16 +1893,16 @@ pub fn trans_closure<'a, 'b, 'tcx>(ccx: &CrateContext<'a, 'tcx>, copy_args_to_allocas(&fcx, arg_scope, bcx, - decl.inputs.as_slice(), + decl.inputs[], arg_datums) } closure::UnboxedClosure(..) => { copy_unboxed_closure_args_to_allocas( bcx, arg_scope, - decl.inputs.as_slice(), + decl.inputs[], arg_datums, - monomorphized_arg_types.as_slice()) + monomorphized_arg_types[]) } }; @@ -1995,11 +2015,11 @@ pub fn trans_named_tuple_constructor<'blk, 'tcx>(mut bcx: Block<'blk, 'tcx>, let tcx = ccx.tcx(); let result_ty = match ctor_ty.sty { - ty::ty_bare_fn(ref bft) => bft.sig.0.output.unwrap(), + ty::ty_bare_fn(_, ref bft) => bft.sig.0.output.unwrap(), _ => ccx.sess().bug( format!("trans_enum_variant_constructor: \ unexpected ctor return type {}", - ctor_ty.repr(tcx)).as_slice()) + ctor_ty.repr(tcx))[]) }; // Get location to store the result. If the user does not care about @@ -2022,7 +2042,7 @@ pub fn trans_named_tuple_constructor<'blk, 'tcx>(mut bcx: Block<'blk, 'tcx>, bcx = expr::trans_adt(bcx, result_ty, disr, - fields.as_slice(), + fields[], None, expr::SaveIn(llresult), call_info); @@ -2067,11 +2087,11 @@ fn trans_enum_variant_or_tuple_like_struct<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx let ctor_ty = ctor_ty.subst(ccx.tcx(), param_substs); let result_ty = match ctor_ty.sty { - ty::ty_bare_fn(ref bft) => bft.sig.0.output, + ty::ty_bare_fn(_, ref bft) => bft.sig.0.output, _ => ccx.sess().bug( format!("trans_enum_variant_or_tuple_like_struct: \ unexpected ctor return type {}", - ty_to_string(ccx.tcx(), ctor_ty)).as_slice()) + ty_to_string(ccx.tcx(), ctor_ty))[]) }; let arena = TypedArena::new(); @@ -2083,7 +2103,7 @@ fn trans_enum_variant_or_tuple_like_struct<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx let arg_tys = ty::ty_fn_args(ctor_ty); - let arg_datums = create_datums_for_fn_args(&fcx, arg_tys.as_slice()); + let arg_datums = create_datums_for_fn_args(&fcx, arg_tys[]); if !type_is_zero_size(fcx.ccx, result_ty.unwrap()) { let dest = fcx.get_ret_slot(bcx, result_ty, "eret_slot"); @@ -2147,7 +2167,7 @@ fn enum_variant_size_lint(ccx: &CrateContext, enum_def: &ast::EnumDef, sp: Span, lvlsrc, Some(sp), format!("enum variant is more than three times larger \ ({} bytes) than the next largest (ignoring padding)", - largest).as_slice()); + largest)[]); ccx.sess().span_note(enum_def.variants[largest_index].span, "this variant is the largest"); @@ -2265,7 +2285,7 @@ pub fn trans_item(ccx: &CrateContext, item: &ast::Item) { match item.node { ast::ItemFn(ref decl, _fn_style, abi, ref generics, ref body) => { if !generics.is_type_parameterized() { - let trans_everywhere = attr::requests_inline(item.attrs.as_slice()); + let trans_everywhere = attr::requests_inline(item.attrs[]); // Ignore `trans_everywhere` for cross-crate inlined items // (`from_external`). `trans_item` will be called once for each // compilation unit that references the item, so it will still get @@ -2276,7 +2296,7 @@ pub fn trans_item(ccx: &CrateContext, item: &ast::Item) { foreign::trans_rust_fn_with_foreign_abi(ccx, &**decl, &**body, - item.attrs.as_slice(), + item.attrs[], llfn, &Substs::trans_empty(), item.id, @@ -2288,7 +2308,7 @@ pub fn trans_item(ccx: &CrateContext, item: &ast::Item) { llfn, &Substs::trans_empty(), item.id, - item.attrs.as_slice()); + item.attrs[]); } update_linkage(ccx, llfn, @@ -2305,7 +2325,7 @@ pub fn trans_item(ccx: &CrateContext, item: &ast::Item) { ast::ItemImpl(_, ref generics, _, _, ref impl_items) => { meth::trans_impl(ccx, item.ident, - impl_items.as_slice(), + impl_items[], generics, item.id); } @@ -2331,7 +2351,7 @@ pub fn trans_item(ccx: &CrateContext, item: &ast::Item) { // Do static_assert checking. It can't really be done much earlier // because we need to get the value of the bool out of LLVM - if attr::contains_name(item.attrs.as_slice(), "static_assert") { + if attr::contains_name(item.attrs[], "static_assert") { if m == ast::MutMutable { ccx.sess().span_fatal(expr.span, "cannot have static_assert on a mutable \ @@ -2402,13 +2422,13 @@ fn register_fn<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>, node_type: Ty<'tcx>) -> ValueRef { match node_type.sty { - ty::ty_bare_fn(ref f) => { + ty::ty_bare_fn(_, ref f) => { assert!(f.abi == Rust || f.abi == RustCall); } _ => panic!("expected bare rust fn") }; - let llfn = decl_rust_fn(ccx, node_type, sym.as_slice()); + let llfn = decl_rust_fn(ccx, node_type, sym[]); finish_register_fn(ccx, sp, sym, node_id, llfn); llfn } @@ -2419,7 +2439,7 @@ pub fn get_fn_llvm_attributes<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>, fn_ty: Ty< let (fn_sig, abi, has_env) = match fn_ty.sty { ty::ty_closure(ref f) => (f.sig.clone(), f.abi, true), - ty::ty_bare_fn(ref f) => (f.sig.clone(), f.abi, false), + ty::ty_bare_fn(_, ref f) => (f.sig.clone(), f.abi, false), ty::ty_unboxed_closure(closure_did, _, ref substs) => { let unboxed_closures = ccx.tcx().unboxed_closures.borrow(); let ref function_type = (*unboxed_closures)[closure_did] @@ -2448,12 +2468,12 @@ pub fn get_fn_llvm_attributes<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>, fn_ty: Ty< _ => ccx.sess().bug("expected tuple'd inputs") } }, - ty::ty_bare_fn(_) if abi == RustCall => { + ty::ty_bare_fn(..) if abi == RustCall => { let mut inputs = vec![fn_sig.0.inputs[0]]; match fn_sig.0.inputs[1].sty { ty::ty_tup(ref t_in) => { - inputs.push_all(t_in.as_slice()); + inputs.push_all(t_in[]); inputs } _ => ccx.sess().bug("expected tuple'd inputs") @@ -2588,7 +2608,7 @@ pub fn register_fn_llvmty(ccx: &CrateContext, llfty: Type) -> ValueRef { debug!("register_fn_llvmty id={} sym={}", node_id, sym); - let llfn = decl_fn(ccx, sym.as_slice(), cc, llfty, ty::FnConverging(ty::mk_nil(ccx.tcx()))); + let llfn = decl_fn(ccx, sym[], cc, llfty, ty::FnConverging(ty::mk_nil(ccx.tcx()))); finish_register_fn(ccx, sp, sym, node_id, llfn); llfn } @@ -2640,7 +2660,7 @@ pub fn create_entry_wrapper(ccx: &CrateContext, let (start_fn, args) = if use_start_lang_item { let start_def_id = match ccx.tcx().lang_items.require(StartFnLangItem) { Ok(id) => id, - Err(s) => { ccx.sess().fatal(s.as_slice()); } + Err(s) => { ccx.sess().fatal(s[]); } }; let start_fn = if start_def_id.krate == ast::LOCAL_CRATE { get_item_val(ccx, start_def_id.node) @@ -2731,7 +2751,7 @@ pub fn get_item_val(ccx: &CrateContext, id: ast::NodeId) -> ValueRef { let val = match item { ast_map::NodeItem(i) => { let ty = ty::node_id_to_type(ccx.tcx(), i.id); - let sym = || exported_name(ccx, id, ty, i.attrs.as_slice()); + let sym = || exported_name(ccx, id, ty, i.attrs[]); let v = match i.node { ast::ItemStatic(_, _, ref expr) => { @@ -2754,16 +2774,16 @@ pub fn get_item_val(ccx: &CrateContext, id: ast::NodeId) -> ValueRef { } else { llvm::LLVMTypeOf(v) }; - if contains_null(sym.as_slice()) { + if contains_null(sym[]) { ccx.sess().fatal( format!("Illegal null byte in export_name \ - value: `{}`", sym).as_slice()); + value: `{}`", sym)[]); } let g = sym.with_c_str(|buf| { llvm::LLVMAddGlobal(ccx.llmod(), llty, buf) }); - if attr::contains_name(i.attrs.as_slice(), + if attr::contains_name(i.attrs[], "thread_local") { llvm::set_thread_local(g, true); } @@ -2788,19 +2808,19 @@ pub fn get_item_val(ccx: &CrateContext, id: ast::NodeId) -> ValueRef { sym, i.id) }; - set_llvm_fn_attrs(ccx, i.attrs.as_slice(), llfn); + set_llvm_fn_attrs(ccx, i.attrs[], llfn); llfn } _ => panic!("get_item_val: weird result in table") }; - match attr::first_attr_value_str_by_name(i.attrs.as_slice(), + match attr::first_attr_value_str_by_name(i.attrs[], "link_section") { Some(sect) => { if contains_null(sect.get()) { ccx.sess().fatal(format!("Illegal null byte in link_section value: `{}`", - sect.get()).as_slice()); + sect.get())[]); } unsafe { sect.get().with_c_str(|buf| { @@ -2844,7 +2864,7 @@ pub fn get_item_val(ccx: &CrateContext, id: ast::NodeId) -> ValueRef { let abi = ccx.tcx().map.get_foreign_abi(id); let ty = ty::node_id_to_type(ccx.tcx(), ni.id); let name = foreign::link_name(&*ni); - foreign::register_foreign_item_fn(ccx, abi, ty, name.get().as_slice()) + foreign::register_foreign_item_fn(ccx, abi, ty, name.get()[]) } ast::ForeignItemStatic(..) => { foreign::register_static(ccx, &*ni) @@ -2867,7 +2887,7 @@ pub fn get_item_val(ccx: &CrateContext, id: ast::NodeId) -> ValueRef { let sym = exported_name(ccx, id, ty, - enm.attrs.as_slice()); + enm.attrs[]); llfn = match enm.node { ast::ItemEnum(_, _) => { @@ -2895,7 +2915,7 @@ pub fn get_item_val(ccx: &CrateContext, id: ast::NodeId) -> ValueRef { id, ty, struct_item.attrs - .as_slice()); + []); let llfn = register_fn(ccx, struct_item.span, sym, ctor_id, ty); set_inline_hint(llfn); @@ -2904,7 +2924,7 @@ pub fn get_item_val(ccx: &CrateContext, id: ast::NodeId) -> ValueRef { ref variant => { ccx.sess().bug(format!("get_item_val(): unexpected variant: {}", - variant).as_slice()) + variant)[]) } }; @@ -2925,10 +2945,10 @@ fn register_method(ccx: &CrateContext, id: ast::NodeId, m: &ast::Method) -> ValueRef { let mty = ty::node_id_to_type(ccx.tcx(), id); - let sym = exported_name(ccx, id, mty, m.attrs.as_slice()); + let sym = exported_name(ccx, id, mty, m.attrs[]); let llfn = register_fn(ccx, m.span, sym, id, mty); - set_llvm_fn_attrs(ccx, m.attrs.as_slice(), llfn); + set_llvm_fn_attrs(ccx, m.attrs[], llfn); llfn } @@ -2938,7 +2958,7 @@ pub fn crate_ctxt_to_encode_parms<'a, 'tcx>(cx: &'a SharedCrateContext<'tcx>, encoder::EncodeParams { diag: cx.sess().diagnostic(), tcx: cx.tcx(), - reexports2: cx.exp_map2(), + reexports: cx.export_map(), item_symbols: cx.item_symbols(), link_meta: cx.link_meta(), cstore: &cx.sess().cstore, @@ -2967,7 +2987,7 @@ pub fn write_metadata(cx: &SharedCrateContext, krate: &ast::Crate) -> Vec { Some(compressed) => compressed, None => cx.sess().fatal("failed to compress metadata"), }.as_slice()); - let llmeta = C_bytes_in_context(cx.metadata_llcx(), compressed.as_slice()); + let llmeta = C_bytes_in_context(cx.metadata_llcx(), compressed[]); let llconst = C_struct_in_context(cx.metadata_llcx(), &[llmeta], false); let name = format!("rust_metadata_{}_{}", cx.link_meta().crate_name, @@ -3071,7 +3091,7 @@ fn internalize_symbols(cx: &SharedCrateContext, reachable: &HashSet) { pub fn trans_crate<'tcx>(analysis: ty::CrateAnalysis<'tcx>) -> (ty::ctxt<'tcx>, CrateTranslation) { - let ty::CrateAnalysis { ty_cx: tcx, exp_map2, reachable, name, .. } = analysis; + let ty::CrateAnalysis { ty_cx: tcx, export_map, reachable, name, .. } = analysis; let krate = tcx.map.krate(); // Before we touch LLVM, make sure that multithreading is enabled. @@ -3095,10 +3115,10 @@ pub fn trans_crate<'tcx>(analysis: ty::CrateAnalysis<'tcx>) let link_meta = link::build_link_meta(&tcx.sess, krate, name); let codegen_units = tcx.sess.opts.cg.codegen_units; - let shared_ccx = SharedCrateContext::new(link_meta.crate_name.as_slice(), + let shared_ccx = SharedCrateContext::new(link_meta.crate_name[], codegen_units, tcx, - exp_map2, + export_map, Sha256::new(), link_meta.clone(), reachable); @@ -3197,7 +3217,7 @@ pub fn trans_crate<'tcx>(analysis: ty::CrateAnalysis<'tcx>) llmod: shared_ccx.metadata_llmod(), }; let formats = shared_ccx.tcx().dependency_formats.borrow().clone(); - let no_builtins = attr::contains_name(krate.attrs.as_slice(), "no_builtins"); + let no_builtins = attr::contains_name(krate.attrs[], "no_builtins"); let translation = CrateTranslation { modules: modules, diff --git a/src/librustc_trans/trans/basic_block.rs b/src/librustc_trans/trans/basic_block.rs index 476f5e2d618fe..ab25343ff5fe0 100644 --- a/src/librustc_trans/trans/basic_block.rs +++ b/src/librustc_trans/trans/basic_block.rs @@ -37,7 +37,10 @@ impl BasicBlock { pub fn pred_iter(self) -> Preds { fn is_a_terminator_inst(user: &Value) -> bool { user.is_a_terminator_inst() } + let is_a_terminator_inst: fn(&Value) -> bool = is_a_terminator_inst; + fn get_parent(user: Value) -> BasicBlock { user.get_parent().unwrap() } + let get_parent: fn(Value) -> BasicBlock = get_parent; self.as_value().user_iter() .filter(is_a_terminator_inst) diff --git a/src/librustc_trans/trans/builder.rs b/src/librustc_trans/trans/builder.rs index cf940b1384671..1b9c9d221b909 100644 --- a/src/librustc_trans/trans/builder.rs +++ b/src/librustc_trans/trans/builder.rs @@ -555,7 +555,7 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { } else { let v = ixs.iter().map(|i| C_i32(self.ccx, *i as i32)).collect::>(); self.count_insn("gepi"); - self.inbounds_gep(base, v.as_slice()) + self.inbounds_gep(base, v[]) } } @@ -763,8 +763,8 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { let s = format!("{} ({})", text, self.ccx.sess().codemap().span_to_string(sp)); - debug!("{}", s.as_slice()); - self.add_comment(s.as_slice()); + debug!("{}", s[]); + self.add_comment(s[]); } } @@ -801,7 +801,7 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { }).collect::>(); debug!("Asm Output Type: {}", self.ccx.tn().type_to_string(output)); - let fty = Type::func(argtys.as_slice(), &output); + let fty = Type::func(argtys[], &output); unsafe { let v = llvm::LLVMInlineAsm( fty.to_ref(), asm, cons, volatile, alignstack, dia as c_uint); diff --git a/src/librustc_trans/trans/cabi.rs b/src/librustc_trans/trans/cabi.rs index ad2a6db1222c2..9ea158fbe2101 100644 --- a/src/librustc_trans/trans/cabi.rs +++ b/src/librustc_trans/trans/cabi.rs @@ -107,7 +107,7 @@ pub fn compute_abi_info(ccx: &CrateContext, atys: &[Type], rty: Type, ret_def: bool) -> FnType { - match ccx.sess().target.target.arch.as_slice() { + match ccx.sess().target.target.arch[] { "x86" => cabi_x86::compute_abi_info(ccx, atys, rty, ret_def), "x86_64" => if ccx.sess().target.target.options.is_like_windows { cabi_x86_win64::compute_abi_info(ccx, atys, rty, ret_def) @@ -117,6 +117,6 @@ pub fn compute_abi_info(ccx: &CrateContext, "arm" => cabi_arm::compute_abi_info(ccx, atys, rty, ret_def), "mips" => cabi_mips::compute_abi_info(ccx, atys, rty, ret_def), a => ccx.sess().fatal((format!("unrecognized arch \"{}\" in target specification", a)) - .as_slice()), + []), } } diff --git a/src/librustc_trans/trans/callee.rs b/src/librustc_trans/trans/callee.rs index 3376479b7a42d..7f22faf050da0 100644 --- a/src/librustc_trans/trans/callee.rs +++ b/src/librustc_trans/trans/callee.rs @@ -21,7 +21,8 @@ pub use self::CallArgs::*; use arena::TypedArena; use back::{abi,link}; use session; -use llvm::{ValueRef, get_param}; +use llvm::{ValueRef}; +use llvm::get_param; use llvm; use metadata::csearch; use middle::def; @@ -122,7 +123,7 @@ fn trans<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, expr: &ast::Expr) expr.span, format!("type of callee is neither bare-fn nor closure: \ {}", - bcx.ty_to_string(datum.ty)).as_slice()); + bcx.ty_to_string(datum.ty))[]); } } } @@ -158,7 +159,7 @@ fn trans<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, expr: &ast::Expr) } } def::DefFn(did, _) if match expr_ty.sty { - ty::ty_bare_fn(ref f) => f.abi == synabi::RustIntrinsic, + ty::ty_bare_fn(_, ref f) => f.abi == synabi::RustIntrinsic, _ => false } => { let substs = node_id_substs(bcx, ExprId(ref_expr.id)); @@ -208,7 +209,7 @@ fn trans<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, expr: &ast::Expr) bcx.tcx().sess.span_bug( ref_expr.span, format!("cannot translate def {} \ - to a callable thing!", def).as_slice()); + to a callable thing!", def)[]); } } } @@ -275,24 +276,26 @@ pub fn trans_fn_pointer_shim<'a, 'tcx>( // Construct the "tuply" version of `bare_fn_ty`. It takes two arguments: `self`, // which is the fn pointer, and `args`, which is the arguments tuple. - let (input_tys, output_ty) = + let (opt_def_id, input_tys, output_ty) = match bare_fn_ty.sty { - ty::ty_bare_fn(ty::BareFnTy { unsafety: ast::Unsafety::Normal, + ty::ty_bare_fn(opt_def_id, + ty::BareFnTy { unsafety: ast::Unsafety::Normal, abi: synabi::Rust, sig: ty::Binder(ty::FnSig { inputs: ref input_tys, output: output_ty, variadic: false })}) => { - (input_tys, output_ty) + (opt_def_id, input_tys, output_ty) } _ => { tcx.sess.bug(format!("trans_fn_pointer_shim invoked on invalid type: {}", - bare_fn_ty.repr(tcx)).as_slice()); + bare_fn_ty.repr(tcx))[]); } }; let tuple_input_ty = ty::mk_tup(tcx, input_tys.to_vec()); let tuple_fn_ty = ty::mk_bare_fn(tcx, + opt_def_id, ty::BareFnTy { unsafety: ast::Unsafety::Normal, abi: synabi::RustCall, sig: ty::Binder(ty::FnSig { @@ -310,7 +313,7 @@ pub fn trans_fn_pointer_shim<'a, 'tcx>( let llfn = decl_internal_rust_fn(ccx, tuple_fn_ty, - function_name.as_slice()); + function_name[]); // let block_arena = TypedArena::new(); @@ -345,7 +348,7 @@ pub fn trans_fn_pointer_shim<'a, 'tcx>( None, bare_fn_ty, |bcx, _| Callee { bcx: bcx, data: Fn(llfnpointer) }, - ArgVals(llargs.as_slice()), + ArgVals(llargs[]), dest).bcx; finish_fn(&fcx, bcx, output_ty); @@ -487,7 +490,7 @@ pub fn trans_fn_ref_with_substs<'blk, 'tcx>( let opt_ref_id = match node { ExprId(id) => if id != 0 { Some(id) } else { None }, - MethodCall(_) => None, + MethodCallKey(_) => None, }; let (val, must_cast) = @@ -498,7 +501,7 @@ pub fn trans_fn_ref_with_substs<'blk, 'tcx>( // are subst'd) let ref_ty = match node { ExprId(id) => node_id_type(bcx, id), - MethodCall(method_call) => { + MethodCallKey(method_call) => { let t = (*bcx.tcx().method_map.borrow())[method_call].ty; monomorphize_type(bcx, t) } @@ -655,7 +658,7 @@ pub fn trans_call_inner<'a, 'blk, 'tcx, F>(bcx: Block<'blk, 'tcx>, let mut bcx = callee.bcx; let (abi, ret_ty) = match callee_ty.sty { - ty::ty_bare_fn(ref f) => (f.abi, f.sig.0.output), + ty::ty_bare_fn(_, ref f) => (f.abi, f.sig.0.output), ty::ty_closure(ref f) => (f.abi, f.sig.0.output), _ => panic!("expected bare rust fn or closure in trans_call_inner") }; @@ -813,7 +816,7 @@ pub fn trans_call_inner<'a, 'blk, 'tcx, F>(bcx: Block<'blk, 'tcx>, bcx = foreign::trans_native_call(bcx, callee_ty, llfn, opt_llretslot.unwrap(), - llargs.as_slice(), arg_tys); + llargs[], arg_tys); } fcx.pop_and_trans_custom_cleanup_scope(bcx, arg_cleanup_scope); diff --git a/src/librustc_trans/trans/cleanup.rs b/src/librustc_trans/trans/cleanup.rs index fb2c432ef5cf4..c1bb21c496adf 100644 --- a/src/librustc_trans/trans/cleanup.rs +++ b/src/librustc_trans/trans/cleanup.rs @@ -404,7 +404,7 @@ impl<'blk, 'tcx> CleanupMethods<'blk, 'tcx> for FunctionContext<'blk, 'tcx> { self.ccx.sess().bug( format!("no cleanup scope {} found", - self.ccx.tcx().map.node_to_string(cleanup_scope)).as_slice()); + self.ccx.tcx().map.node_to_string(cleanup_scope))[]); } /// Schedules a cleanup to occur in the top-most scope, which must be a temporary scope. @@ -586,7 +586,7 @@ impl<'blk, 'tcx> CleanupHelperMethods<'blk, 'tcx> for FunctionContext<'blk, 'tcx LoopExit(id, _) => { self.ccx.sess().bug(format!( "cannot exit from scope {}, \ - not in scope", id).as_slice()); + not in scope", id)[]); } } } @@ -655,7 +655,7 @@ impl<'blk, 'tcx> CleanupHelperMethods<'blk, 'tcx> for FunctionContext<'blk, 'tcx let name = scope.block_name("clean"); debug!("generating cleanups for {}", name); let bcx_in = self.new_block(label.is_unwind(), - name.as_slice(), + name[], None); let mut bcx_out = bcx_in; for cleanup in scope.cleanups.iter().rev() { @@ -702,7 +702,7 @@ impl<'blk, 'tcx> CleanupHelperMethods<'blk, 'tcx> for FunctionContext<'blk, 'tcx Some(llbb) => { return llbb; } None => { let name = last_scope.block_name("unwind"); - pad_bcx = self.new_block(true, name.as_slice(), None); + pad_bcx = self.new_block(true, name[], None); last_scope.cached_landing_pad = Some(pad_bcx.llbb); } } @@ -1020,7 +1020,7 @@ pub fn temporary_scope(tcx: &ty::ctxt, } None => { tcx.sess.bug(format!("no temporary scope available for expr {}", - id).as_slice()) + id)[]) } } } diff --git a/src/librustc_trans/trans/closure.rs b/src/librustc_trans/trans/closure.rs index d5d954f5a907b..0ae9de8c89183 100644 --- a/src/librustc_trans/trans/closure.rs +++ b/src/librustc_trans/trans/closure.rs @@ -13,7 +13,6 @@ pub use self::ClosureKind::*; use back::abi; use back::link::mangle_internal_name_by_path_and_seq; use llvm::ValueRef; -use middle::def; use middle::mem_categorization::Typer; use trans::adt; use trans::base::*; @@ -177,7 +176,7 @@ pub fn store_environment<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, let tcx = ccx.tcx(); // compute the type of the closure - let cdata_ty = mk_closure_tys(tcx, bound_values.as_slice()); + let cdata_ty = mk_closure_tys(tcx, bound_values[]); // cbox_ty has the form of a tuple: (a, b, c) we want a ptr to a // tuple. This could be a ptr in uniq or a box or on stack, @@ -206,7 +205,7 @@ pub fn store_environment<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, if ccx.sess().asm_comments() { add_comment(bcx, format!("Copy {} into closure", - bv.to_string(ccx)).as_slice()); + bv.to_string(ccx))[]); } let bound_data = GEPi(bcx, llbox, &[0u, abi::BOX_FIELD_BODY, i]); @@ -444,7 +443,7 @@ pub fn trans_expr_fn<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, let s = tcx.map.with_path(id, |path| { mangle_internal_name_by_path_and_seq(path, "closure") }); - let llfn = decl_internal_rust_fn(ccx, fty, s.as_slice()); + let llfn = decl_internal_rust_fn(ccx, fty, s[]); // set an inline hint for all closures set_inline_hint(llfn); @@ -468,7 +467,7 @@ pub fn trans_expr_fn<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, &[], ty::ty_fn_ret(fty), ty::ty_fn_abi(fty), - ClosureEnv::new(freevars.as_slice(), + ClosureEnv::new(freevars[], BoxedClosure(cdata_ty, store))); fill_fn_pair(bcx, dest_addr, llfn, llbox); bcx @@ -514,7 +513,7 @@ pub fn get_or_create_declaration_if_unboxed_closure<'blk, 'tcx>(bcx: Block<'blk, mangle_internal_name_by_path_and_seq(path, "unboxed_closure") }); - let llfn = decl_internal_rust_fn(ccx, function_type, symbol.as_slice()); + let llfn = decl_internal_rust_fn(ccx, function_type, symbol[]); // set an inline hint for all closures set_inline_hint(llfn); @@ -563,7 +562,7 @@ pub fn trans_unboxed_closure<'blk, 'tcx>( &[], ty::ty_fn_ret(function_type), ty::ty_fn_abi(function_type), - ClosureEnv::new(freevars.as_slice(), + ClosureEnv::new(freevars[], UnboxedClosure(freevar_mode))); // Don't hoist this to the top of the function. It's perfectly legitimate @@ -603,21 +602,10 @@ pub fn trans_unboxed_closure<'blk, 'tcx>( pub fn get_wrapper_for_bare_fn<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>, closure_ty: Ty<'tcx>, - def: def::Def, + def_id: ast::DefId, fn_ptr: ValueRef, is_local: bool) -> ValueRef { - let def_id = match def { - def::DefFn(did, _) | def::DefStaticMethod(did, _) | - def::DefVariant(_, did, _) | def::DefStruct(did) => did, - _ => { - ccx.sess().bug(format!("get_wrapper_for_bare_fn: \ - expected a statically resolved fn, got \ - {}", - def).as_slice()); - } - }; - match ccx.closure_bare_wrapper_cache().borrow().get(&fn_ptr) { Some(&llval) => return llval, None => {} @@ -632,7 +620,7 @@ pub fn get_wrapper_for_bare_fn<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>, _ => { ccx.sess().bug(format!("get_wrapper_for_bare_fn: \ expected a closure ty, got {}", - closure_ty.repr(tcx)).as_slice()); + closure_ty.repr(tcx))[]); } }; @@ -640,9 +628,9 @@ pub fn get_wrapper_for_bare_fn<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>, mangle_internal_name_by_path_and_seq(path, "as_closure") }); let llfn = if is_local { - decl_internal_rust_fn(ccx, closure_ty, name.as_slice()) + decl_internal_rust_fn(ccx, closure_ty, name[]) } else { - decl_rust_fn(ccx, closure_ty, name.as_slice()) + decl_rust_fn(ccx, closure_ty, name[]) }; ccx.closure_bare_wrapper_cache().borrow_mut().insert(fn_ptr, llfn); @@ -663,7 +651,7 @@ pub fn get_wrapper_for_bare_fn<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>, let args = create_datums_for_fn_args(&fcx, ty::ty_fn_args(closure_ty) - .as_slice()); + []); let mut llargs = Vec::new(); match fcx.llretslotptr.get() { Some(llretptr) => { @@ -697,11 +685,11 @@ pub fn get_wrapper_for_bare_fn<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>, pub fn make_closure_from_bare_fn<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, closure_ty: Ty<'tcx>, - def: def::Def, + def_id: ast::DefId, fn_ptr: ValueRef) -> DatumBlock<'blk, 'tcx, Expr> { let scratch = rvalue_scratch_datum(bcx, closure_ty, "__adjust"); - let wrapper = get_wrapper_for_bare_fn(bcx.ccx(), closure_ty, def, fn_ptr, true); + let wrapper = get_wrapper_for_bare_fn(bcx.ccx(), closure_ty, def_id, fn_ptr, true); fill_fn_pair(bcx, scratch.val, wrapper, C_null(Type::i8p(bcx.ccx()))); DatumBlock::new(bcx, scratch.to_expr_datum()) diff --git a/src/librustc_trans/trans/common.rs b/src/librustc_trans/trans/common.rs index 4dd4e27c9c0d4..9a3e39ff10b31 100644 --- a/src/librustc_trans/trans/common.rs +++ b/src/librustc_trans/trans/common.rs @@ -18,6 +18,7 @@ use session::Session; use llvm; use llvm::{ValueRef, BasicBlockRef, BuilderRef, ContextRef}; use llvm::{True, False, Bool}; +use middle::cfg; use middle::def; use middle::infer; use middle::lang_items::LangItem; @@ -116,7 +117,7 @@ pub fn gensym_name(name: &str) -> PathElem { let num = token::gensym(name).uint(); // use one colon which will get translated to a period by the mangler, and // we're guaranteed that `num` is globally unique for this crate. - PathName(token::gensym(format!("{}:{}", name, num).as_slice())) + PathName(token::gensym(format!("{}:{}", name, num)[])) } #[deriving(Copy)] @@ -262,6 +263,8 @@ pub struct FunctionContext<'a, 'tcx: 'a> { // Cleanup scopes. pub scopes: RefCell>>, + + pub cfg: Option, } impl<'a, 'tcx> FunctionContext<'a, 'tcx> { @@ -433,7 +436,7 @@ impl<'blk, 'tcx> BlockS<'blk, 'tcx> { Some(v) => v.clone(), None => { self.tcx().sess.bug(format!( - "no def associated with node id {}", nid).as_slice()); + "no def associated with node id {}", nid)[]); } } } @@ -814,7 +817,7 @@ pub fn fulfill_obligation<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>, span, format!("Encountered error `{}` selecting `{}` during trans", e.repr(tcx), - trait_ref.repr(tcx)).as_slice()) + trait_ref.repr(tcx))[]) } }; @@ -841,7 +844,7 @@ pub fn fulfill_obligation<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>, span, format!("Encountered errors `{}` fulfilling `{}` during trans", errors.repr(tcx), - trait_ref.repr(tcx)).as_slice()); + trait_ref.repr(tcx))[]); } } } @@ -867,7 +870,7 @@ pub enum ExprOrMethodCall { ExprId(ast::NodeId), // Type parameters for a method call like `a.foo::()` - MethodCall(ty::MethodCall) + MethodCallKey(ty::MethodCall) } pub fn node_id_substs<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, @@ -879,7 +882,7 @@ pub fn node_id_substs<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, ExprId(id) => { ty::node_id_item_substs(tcx, id).substs } - MethodCall(method_call) => { + MethodCallKey(method_call) => { (*tcx.method_map.borrow())[method_call].substs.clone() } }; @@ -889,7 +892,7 @@ pub fn node_id_substs<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, format!("type parameters for node {} include inference types: \ {}", node, - substs.repr(bcx.tcx())).as_slice()); + substs.repr(bcx.tcx()))[]); } let substs = substs.erase_regions(); @@ -906,8 +909,8 @@ pub fn langcall(bcx: Block, Err(s) => { let msg = format!("{} {}", msg, s); match span { - Some(span) => bcx.tcx().sess.span_fatal(span, msg.as_slice()), - None => bcx.tcx().sess.fatal(msg.as_slice()), + Some(span) => bcx.tcx().sess.span_fatal(span, msg[]), + None => bcx.tcx().sess.fatal(msg[]), } } } diff --git a/src/librustc_trans/trans/consts.rs b/src/librustc_trans/trans/consts.rs index e4f0543b5e70f..bc386dc96a469 100644 --- a/src/librustc_trans/trans/consts.rs +++ b/src/librustc_trans/trans/consts.rs @@ -54,7 +54,7 @@ pub fn const_lit(cx: &CrateContext, e: &ast::Expr, lit: &ast::Lit) _ => cx.sess().span_bug(lit.span, format!("integer literal has type {} (expected int \ or uint)", - ty_to_string(cx.tcx(), lit_int_ty)).as_slice()) + ty_to_string(cx.tcx(), lit_int_ty))[]) } } ast::LitFloat(ref fs, t) => { @@ -74,7 +74,7 @@ pub fn const_lit(cx: &CrateContext, e: &ast::Expr, lit: &ast::Lit) } ast::LitBool(b) => C_bool(cx, b), ast::LitStr(ref s, _) => C_str_slice(cx, (*s).clone()), - ast::LitBinary(ref data) => C_binary_slice(cx, data.as_slice()), + ast::LitBinary(ref data) => C_binary_slice(cx, data[]), } } @@ -95,9 +95,9 @@ fn const_vec(cx: &CrateContext, e: &ast::Expr, .collect::>(); // If the vector contains enums, an LLVM array won't work. let v = if vs.iter().any(|vi| val_ty(*vi) != llunitty) { - C_struct(cx, vs.as_slice(), false) + C_struct(cx, vs[], false) } else { - C_array(llunitty, vs.as_slice()) + C_array(llunitty, vs[]) }; (v, llunitty) } @@ -152,13 +152,13 @@ fn const_deref<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>, v: ValueRef, } _ => { cx.sess().bug(format!("unexpected dereferenceable type {}", - ty_to_string(cx.tcx(), t)).as_slice()) + ty_to_string(cx.tcx(), t))[]) } } } None => { cx.sess().bug(format!("cannot dereference const of type {}", - ty_to_string(cx.tcx(), t)).as_slice()) + ty_to_string(cx.tcx(), t))[]) } } } @@ -190,20 +190,23 @@ pub fn const_expr<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>, e: &ast::Expr) None => { } Some(adj) => { match adj { - ty::AdjustAddEnv(ty::RegionTraitStore(ty::ReStatic, _)) => { - let def = ty::resolve_expr(cx.tcx(), e); + ty::AdjustAddEnv(def_id, ty::RegionTraitStore(ty::ReStatic, _)) => { let wrapper = closure::get_wrapper_for_bare_fn(cx, ety_adjusted, - def, + def_id, llconst, true); llconst = C_struct(cx, &[wrapper, C_null(Type::i8p(cx))], false) } - ty::AdjustAddEnv(store) => { + ty::AdjustAddEnv(_, store) => { cx.sess() .span_bug(e.span, format!("unexpected static function: {}", - store).as_slice()) + store)[]) + } + ty::AdjustReifyFnPointer(_def_id) => { + // FIXME(#19925) once fn item types are + // zero-sized, we'll need to do something here } ty::AdjustDerefRef(ref adj) => { let mut ty = ety; @@ -264,7 +267,7 @@ pub fn const_expr<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>, e: &ast::Expr) } _ => cx.sess().span_bug(e.span, format!("unimplemented type in const unsize: {}", - ty_to_string(cx.tcx(), ty)).as_slice()) + ty_to_string(cx.tcx(), ty))[]) } } _ => { @@ -272,7 +275,7 @@ pub fn const_expr<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>, e: &ast::Expr) .span_bug(e.span, format!("unimplemented const \ autoref {}", - autoref).as_slice()) + autoref)[]) } } } @@ -293,7 +296,7 @@ pub fn const_expr<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>, e: &ast::Expr) } cx.sess().bug(format!("const {} of type {} has size {} instead of {}", e.repr(cx.tcx()), ty_to_string(cx.tcx(), ety), - csize, tsize).as_slice()); + csize, tsize)[]); } (llconst, ety_adjusted) } @@ -443,7 +446,7 @@ fn const_expr_unadjusted(cx: &CrateContext, e: &ast::Expr) -> ValueRef { _ => cx.sess().span_bug(base.span, format!("index-expr base must be a vector \ or string type, found {}", - ty_to_string(cx.tcx(), bt)).as_slice()) + ty_to_string(cx.tcx(), bt))[]) }, ty::ty_rptr(_, mt) => match mt.ty.sty { ty::ty_vec(_, Some(u)) => { @@ -452,12 +455,12 @@ fn const_expr_unadjusted(cx: &CrateContext, e: &ast::Expr) -> ValueRef { _ => cx.sess().span_bug(base.span, format!("index-expr base must be a vector \ or string type, found {}", - ty_to_string(cx.tcx(), bt)).as_slice()) + ty_to_string(cx.tcx(), bt))[]) }, _ => cx.sess().span_bug(base.span, format!("index-expr base must be a vector \ or string type, found {}", - ty_to_string(cx.tcx(), bt)).as_slice()) + ty_to_string(cx.tcx(), bt))[]) }; let len = llvm::LLVMConstIntGetZExtValue(len) as u64; @@ -558,8 +561,8 @@ fn const_expr_unadjusted(cx: &CrateContext, e: &ast::Expr) -> ValueRef { ast::ExprTup(ref es) => { let ety = ty::expr_ty(cx.tcx(), e); let repr = adt::represent_type(cx, ety); - let vals = map_list(es.as_slice()); - adt::trans_const(cx, &*repr, 0, vals.as_slice()) + let vals = map_list(es[]); + adt::trans_const(cx, &*repr, 0, vals[]) } ast::ExprStruct(_, ref fs, ref base_opt) => { let ety = ty::expr_ty(cx.tcx(), e); @@ -590,7 +593,7 @@ fn const_expr_unadjusted(cx: &CrateContext, e: &ast::Expr) -> ValueRef { } } }).collect::>(); - adt::trans_const(cx, &*repr, discr, cs.as_slice()) + adt::trans_const(cx, &*repr, discr, cs[]) }) } ast::ExprVec(ref es) => { @@ -607,9 +610,9 @@ fn const_expr_unadjusted(cx: &CrateContext, e: &ast::Expr) -> ValueRef { }; let vs = Vec::from_elem(n, const_expr(cx, &**elem).0); if vs.iter().any(|vi| val_ty(*vi) != llunitty) { - C_struct(cx, vs.as_slice(), false) + C_struct(cx, vs[], false) } else { - C_array(llunitty, vs.as_slice()) + C_array(llunitty, vs[]) } } ast::ExprPath(ref pth) => { @@ -655,8 +658,8 @@ fn const_expr_unadjusted(cx: &CrateContext, e: &ast::Expr) -> ValueRef { Some(def::DefStruct(_)) => { let ety = ty::expr_ty(cx.tcx(), e); let repr = adt::represent_type(cx, ety); - let arg_vals = map_list(args.as_slice()); - adt::trans_const(cx, &*repr, 0, arg_vals.as_slice()) + let arg_vals = map_list(args[]); + adt::trans_const(cx, &*repr, 0, arg_vals[]) } Some(def::DefVariant(enum_did, variant_did, _)) => { let ety = ty::expr_ty(cx.tcx(), e); @@ -664,11 +667,11 @@ fn const_expr_unadjusted(cx: &CrateContext, e: &ast::Expr) -> ValueRef { let vinfo = ty::enum_variant_with_id(cx.tcx(), enum_did, variant_did); - let arg_vals = map_list(args.as_slice()); + let arg_vals = map_list(args[]); adt::trans_const(cx, &*repr, vinfo.disr_val, - arg_vals.as_slice()) + arg_vals[]) } _ => cx.sess().span_bug(e.span, "expected a struct or variant def") } diff --git a/src/librustc_trans/trans/context.rs b/src/librustc_trans/trans/context.rs index af003b011579f..2c71dd831fbcb 100644 --- a/src/librustc_trans/trans/context.rs +++ b/src/librustc_trans/trans/context.rs @@ -13,7 +13,7 @@ use llvm::{ContextRef, ModuleRef, ValueRef, BuilderRef}; use llvm::{TargetData}; use llvm::mk_target_data; use metadata::common::LinkMeta; -use middle::resolve; +use middle::def::ExportMap; use middle::traits; use trans::adt; use trans::base; @@ -61,7 +61,7 @@ pub struct SharedCrateContext<'tcx> { metadata_llmod: ModuleRef, metadata_llcx: ContextRef, - exp_map2: resolve::ExportMap2, + export_map: ExportMap, reachable: NodeSet, item_symbols: RefCell>, link_meta: LinkMeta, @@ -238,7 +238,7 @@ impl<'tcx> SharedCrateContext<'tcx> { pub fn new(crate_name: &str, local_count: uint, tcx: ty::ctxt<'tcx>, - emap2: resolve::ExportMap2, + export_map: ExportMap, symbol_hasher: Sha256, link_meta: LinkMeta, reachable: NodeSet) @@ -251,7 +251,7 @@ impl<'tcx> SharedCrateContext<'tcx> { local_ccxs: Vec::with_capacity(local_count), metadata_llmod: metadata_llmod, metadata_llcx: metadata_llcx, - exp_map2: emap2, + export_map: export_map, reachable: reachable, item_symbols: RefCell::new(NodeMap::new()), link_meta: link_meta, @@ -284,7 +284,7 @@ impl<'tcx> SharedCrateContext<'tcx> { // such as a function name in the module. // 1. http://llvm.org/bugs/show_bug.cgi?id=11479 let llmod_id = format!("{}.{}.rs", crate_name, i); - let local_ccx = LocalCrateContext::new(&shared_ccx, llmod_id.as_slice()); + let local_ccx = LocalCrateContext::new(&shared_ccx, llmod_id[]); shared_ccx.local_ccxs.push(local_ccx); } @@ -329,8 +329,8 @@ impl<'tcx> SharedCrateContext<'tcx> { self.metadata_llcx } - pub fn exp_map2<'a>(&'a self) -> &'a resolve::ExportMap2 { - &self.exp_map2 + pub fn export_map<'a>(&'a self) -> &'a ExportMap { + &self.export_map } pub fn reachable<'a>(&'a self) -> &'a NodeSet { @@ -374,7 +374,7 @@ impl<'tcx> LocalCrateContext<'tcx> { .target .target .data_layout - .as_slice()); + []); let dbg_cx = if shared.tcx.sess.opts.debuginfo != NoDebugInfo { Some(debuginfo::CrateDebugContext::new(llmod)) @@ -553,8 +553,8 @@ impl<'b, 'tcx> CrateContext<'b, 'tcx> { &self.local.item_vals } - pub fn exp_map2<'a>(&'a self) -> &'a resolve::ExportMap2 { - &self.shared.exp_map2 + pub fn export_map<'a>(&'a self) -> &'a ExportMap { + &self.shared.export_map } pub fn reachable<'a>(&'a self) -> &'a NodeSet { @@ -726,7 +726,7 @@ impl<'b, 'tcx> CrateContext<'b, 'tcx> { pub fn report_overbig_object(&self, obj: Ty<'tcx>) -> ! { self.sess().fatal( format!("the type `{}` is too big for the current architecture", - obj.repr(self.tcx())).as_slice()) + obj.repr(self.tcx()))[]) } } diff --git a/src/librustc_trans/trans/controlflow.rs b/src/librustc_trans/trans/controlflow.rs index a1574aa2f0e43..3b24ded6717cc 100644 --- a/src/librustc_trans/trans/controlflow.rs +++ b/src/librustc_trans/trans/controlflow.rs @@ -8,7 +8,7 @@ // option. This file may not be copied, modified, or distributed // except according to those terms. -use llvm::*; +use llvm::ValueRef; use middle::def; use middle::lang_items::{PanicFnLangItem, PanicBoundsCheckFnLangItem}; use trans::_match; @@ -48,7 +48,7 @@ pub fn trans_stmt<'blk, 'tcx>(cx: Block<'blk, 'tcx>, debug!("trans_stmt({})", s.repr(cx.tcx())); if cx.sess().asm_comments() { - add_span_comment(cx, s.span, s.repr(cx.tcx()).as_slice()); + add_span_comment(cx, s.span, s.repr(cx.tcx())[]); } let mut bcx = cx; @@ -112,8 +112,17 @@ pub fn trans_block<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, if dest != expr::Ignore { let block_ty = node_id_type(bcx, b.id); + if b.expr.is_none() || type_is_zero_size(bcx.ccx(), block_ty) { dest = expr::Ignore; + } else if b.expr.is_some() { + // If the block has an expression, but that expression isn't reachable, + // don't save into the destination given, ignore it. + if let Some(ref cfg) = bcx.fcx.cfg { + if !cfg.node_is_reachable(b.expr.as_ref().unwrap().id) { + dest = expr::Ignore; + } + } } } @@ -179,7 +188,7 @@ pub fn trans_if<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, } let name = format!("then-block-{}-", thn.id); - let then_bcx_in = bcx.fcx.new_id_block(name.as_slice(), thn.id); + let then_bcx_in = bcx.fcx.new_id_block(name[], thn.id); let then_bcx_out = trans_block(then_bcx_in, &*thn, dest); trans::debuginfo::clear_source_location(bcx.fcx); @@ -277,6 +286,7 @@ pub fn trans_for<'blk, 'tcx>(mut bcx: Block<'blk, 'tcx>, debug!("iterator type is {}, datum type is {}", ppaux::ty_to_string(bcx.tcx(), iterator_type), ppaux::ty_to_string(bcx.tcx(), iterator_datum.ty)); + let lliterator = load_ty(bcx, iterator_datum.val, iterator_datum.ty); // Create our basic blocks and set up our loop cleanups. @@ -356,6 +366,8 @@ pub fn trans_for<'blk, 'tcx>(mut bcx: Block<'blk, 'tcx>, llpayload, binding_cleanup_scope_id); + debuginfo::create_for_loop_var_metadata(body_bcx_in, pat); + // Codegen the body. body_bcx_out = trans_block(body_bcx_out, body, expr::Ignore); body_bcx_out = @@ -425,7 +437,7 @@ pub fn trans_break_cont<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, Some(&def::DefLabel(loop_id)) => loop_id, ref r => { bcx.tcx().sess.bug(format!("{} in def-map for label", - r).as_slice()) + r)[]) } } } @@ -489,7 +501,7 @@ pub fn trans_fail<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, let v_str = C_str_slice(ccx, fail_str); let loc = bcx.sess().codemap().lookup_char_pos(sp.lo); - let filename = token::intern_and_get_ident(loc.file.name.as_slice()); + let filename = token::intern_and_get_ident(loc.file.name[]); let filename = C_str_slice(ccx, filename); let line = C_uint(ccx, loc.line); let expr_file_line_const = C_struct(ccx, &[v_str, filename, line], false); @@ -498,7 +510,7 @@ pub fn trans_fail<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, let did = langcall(bcx, Some(sp), "", PanicFnLangItem); let bcx = callee::trans_lang_call(bcx, did, - args.as_slice(), + args[], Some(expr::Ignore)).bcx; Unreachable(bcx); return bcx; @@ -514,7 +526,7 @@ pub fn trans_fail_bounds_check<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, // Extract the file/line from the span let loc = bcx.sess().codemap().lookup_char_pos(sp.lo); - let filename = token::intern_and_get_ident(loc.file.name.as_slice()); + let filename = token::intern_and_get_ident(loc.file.name[]); // Invoke the lang item let filename = C_str_slice(ccx, filename); @@ -525,7 +537,7 @@ pub fn trans_fail_bounds_check<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, let did = langcall(bcx, Some(sp), "", PanicBoundsCheckFnLangItem); let bcx = callee::trans_lang_call(bcx, did, - args.as_slice(), + args[], Some(expr::Ignore)).bcx; Unreachable(bcx); return bcx; diff --git a/src/librustc_trans/trans/datum.rs b/src/librustc_trans/trans/datum.rs index 75473dc58bf47..9ab4e92b51131 100644 --- a/src/librustc_trans/trans/datum.rs +++ b/src/librustc_trans/trans/datum.rs @@ -463,7 +463,7 @@ impl<'tcx> Datum<'tcx, Lvalue> { } _ => bcx.tcx().sess.bug( format!("Unexpected unsized type in get_element: {}", - bcx.ty_to_string(self.ty)).as_slice()) + bcx.ty_to_string(self.ty))[]) }; Datum { val: val, diff --git a/src/librustc_trans/trans/debuginfo.rs b/src/librustc_trans/trans/debuginfo.rs index 9a5e6830da194..adad2d5013212 100644 --- a/src/librustc_trans/trans/debuginfo.rs +++ b/src/librustc_trans/trans/debuginfo.rs @@ -182,7 +182,6 @@ //! comparatively expensive to construct, though, `ty::type_id()` is still used //! additionally as an optimization for cases where the exact same type has been //! seen before (which is most of the time). -use self::FunctionDebugContextRepr::*; use self::VariableAccess::*; use self::VariableKind::*; use self::MemberOffset::*; @@ -285,7 +284,7 @@ impl<'tcx> TypeMap<'tcx> { metadata: DIType) { if self.type_to_metadata.insert(type_, metadata).is_some() { cx.sess().bug(format!("Type metadata for Ty '{}' is already in the TypeMap!", - ppaux::ty_to_string(cx.tcx(), type_)).as_slice()); + ppaux::ty_to_string(cx.tcx(), type_))[]); } } @@ -298,7 +297,7 @@ impl<'tcx> TypeMap<'tcx> { if self.unique_id_to_metadata.insert(unique_type_id, metadata).is_some() { let unique_type_id_str = self.get_unique_type_id_as_string(unique_type_id); cx.sess().bug(format!("Type metadata for unique id '{}' is already in the TypeMap!", - unique_type_id_str.as_slice()).as_slice()); + unique_type_id_str[])[]); } } @@ -334,7 +333,7 @@ impl<'tcx> TypeMap<'tcx> { // mut ptr (*mut) -> {*mut :pointee-uid:} // unique ptr (~) -> {~ :pointee-uid:} // @-ptr (@) -> {@ :pointee-uid:} - // sized vec ([T, ..x]) -> {[:size:] :element-uid:} + // sized vec ([T; x]) -> {[:size:] :element-uid:} // unsized vec ([T]) -> {[] :element-uid:} // trait (T) -> {trait_:svh: / :node-id:_<(:param-uid:),*> } // closure -> { :store-sigil: |(:param-uid:),* <,_...>| -> \ @@ -379,14 +378,14 @@ impl<'tcx> TypeMap<'tcx> { self.get_unique_type_id_of_type(cx, component_type); let component_type_id = self.get_unique_type_id_as_string(component_type_id); - unique_type_id.push_str(component_type_id.as_slice()); + unique_type_id.push_str(component_type_id[]); } }, ty::ty_uniq(inner_type) => { unique_type_id.push('~'); let inner_type_id = self.get_unique_type_id_of_type(cx, inner_type); let inner_type_id = self.get_unique_type_id_as_string(inner_type_id); - unique_type_id.push_str(inner_type_id.as_slice()); + unique_type_id.push_str(inner_type_id[]); }, ty::ty_ptr(ty::mt { ty: inner_type, mutbl } ) => { unique_type_id.push('*'); @@ -396,7 +395,7 @@ impl<'tcx> TypeMap<'tcx> { let inner_type_id = self.get_unique_type_id_of_type(cx, inner_type); let inner_type_id = self.get_unique_type_id_as_string(inner_type_id); - unique_type_id.push_str(inner_type_id.as_slice()); + unique_type_id.push_str(inner_type_id[]); }, ty::ty_rptr(_, ty::mt { ty: inner_type, mutbl }) => { unique_type_id.push('&'); @@ -406,12 +405,12 @@ impl<'tcx> TypeMap<'tcx> { let inner_type_id = self.get_unique_type_id_of_type(cx, inner_type); let inner_type_id = self.get_unique_type_id_as_string(inner_type_id); - unique_type_id.push_str(inner_type_id.as_slice()); + unique_type_id.push_str(inner_type_id[]); }, ty::ty_vec(inner_type, optional_length) => { match optional_length { Some(len) => { - unique_type_id.push_str(format!("[{}]", len).as_slice()); + unique_type_id.push_str(format!("[{}]", len)[]); } None => { unique_type_id.push_str("[]"); @@ -420,7 +419,7 @@ impl<'tcx> TypeMap<'tcx> { let inner_type_id = self.get_unique_type_id_of_type(cx, inner_type); let inner_type_id = self.get_unique_type_id_as_string(inner_type_id); - unique_type_id.push_str(inner_type_id.as_slice()); + unique_type_id.push_str(inner_type_id[]); }, ty::ty_trait(ref trait_data) => { unique_type_id.push_str("trait "); @@ -431,7 +430,7 @@ impl<'tcx> TypeMap<'tcx> { trait_data.principal.substs(), &mut unique_type_id); }, - ty::ty_bare_fn(ty::BareFnTy{ unsafety, abi, ref sig } ) => { + ty::ty_bare_fn(_, ty::BareFnTy{ unsafety, abi, ref sig } ) => { if unsafety == ast::Unsafety::Unsafe { unique_type_id.push_str("unsafe "); } @@ -445,7 +444,7 @@ impl<'tcx> TypeMap<'tcx> { self.get_unique_type_id_of_type(cx, parameter_type); let parameter_type_id = self.get_unique_type_id_as_string(parameter_type_id); - unique_type_id.push_str(parameter_type_id.as_slice()); + unique_type_id.push_str(parameter_type_id[]); unique_type_id.push(','); } @@ -458,7 +457,7 @@ impl<'tcx> TypeMap<'tcx> { ty::FnConverging(ret_ty) => { let return_type_id = self.get_unique_type_id_of_type(cx, ret_ty); let return_type_id = self.get_unique_type_id_as_string(return_type_id); - unique_type_id.push_str(return_type_id.as_slice()); + unique_type_id.push_str(return_type_id[]); } ty::FnDiverging => { unique_type_id.push_str("!"); @@ -479,8 +478,8 @@ impl<'tcx> TypeMap<'tcx> { }, _ => { cx.sess().bug(format!("get_unique_type_id_of_type() - unexpected type: {}, {}", - ppaux::ty_to_string(cx.tcx(), type_).as_slice(), - type_.sty).as_slice()) + ppaux::ty_to_string(cx.tcx(), type_)[], + type_.sty)[]) } }; @@ -523,7 +522,7 @@ impl<'tcx> TypeMap<'tcx> { output.push_str(crate_hash.as_str()); output.push_str("/"); - output.push_str(format!("{:x}", def_id.node).as_slice()); + output.push_str(format!("{:x}", def_id.node)[]); // Maybe check that there is no self type here. @@ -536,7 +535,7 @@ impl<'tcx> TypeMap<'tcx> { type_map.get_unique_type_id_of_type(cx, type_parameter); let param_type_id = type_map.get_unique_type_id_as_string(param_type_id); - output.push_str(param_type_id.as_slice()); + output.push_str(param_type_id[]); output.push(','); } @@ -578,7 +577,7 @@ impl<'tcx> TypeMap<'tcx> { self.get_unique_type_id_of_type(cx, parameter_type); let parameter_type_id = self.get_unique_type_id_as_string(parameter_type_id); - unique_type_id.push_str(parameter_type_id.as_slice()); + unique_type_id.push_str(parameter_type_id[]); unique_type_id.push(','); } @@ -592,7 +591,7 @@ impl<'tcx> TypeMap<'tcx> { ty::FnConverging(ret_ty) => { let return_type_id = self.get_unique_type_id_of_type(cx, ret_ty); let return_type_id = self.get_unique_type_id_as_string(return_type_id); - unique_type_id.push_str(return_type_id.as_slice()); + unique_type_id.push_str(return_type_id[]); } ty::FnDiverging => { unique_type_id.push_str("!"); @@ -623,7 +622,7 @@ impl<'tcx> TypeMap<'tcx> { let enum_type_id = self.get_unique_type_id_of_type(cx, enum_type); let enum_variant_type_id = format!("{}::{}", self.get_unique_type_id_as_string(enum_type_id) - .as_slice(), + [], variant_name); let interner_key = self.unique_id_interner.intern(Rc::new(enum_variant_type_id)); UniqueTypeId(interner_key) @@ -679,12 +678,8 @@ impl<'tcx> CrateDebugContext<'tcx> { } } -pub struct FunctionDebugContext { - repr: FunctionDebugContextRepr, -} - -enum FunctionDebugContextRepr { - DebugInfo(Box), +pub enum FunctionDebugContext { + RegularContext(Box), DebugInfoDisabled, FunctionWithoutDebugInfo, } @@ -694,13 +689,13 @@ impl FunctionDebugContext { cx: &CrateContext, span: Span) -> &'a FunctionDebugContextData { - match self.repr { - DebugInfo(box ref data) => data, - DebugInfoDisabled => { + match *self { + FunctionDebugContext::RegularContext(box ref data) => data, + FunctionDebugContext::DebugInfoDisabled => { cx.sess().span_bug(span, FunctionDebugContext::debuginfo_disabled_message()); } - FunctionWithoutDebugInfo => { + FunctionDebugContext::FunctionWithoutDebugInfo => { cx.sess().span_bug(span, FunctionDebugContext::should_be_ignored_message()); } @@ -798,19 +793,19 @@ pub fn create_global_var_metadata(cx: &CrateContext, create_global_var_metadata() - Captured var-id refers to \ unexpected ast_item variant: {}", - var_item).as_slice()) + var_item)[]) } } }, _ => cx.sess().bug(format!("debuginfo::create_global_var_metadata() \ - Captured var-id refers to unexpected \ ast_map variant: {}", - var_item).as_slice()) + var_item)[]) }; let (file_metadata, line_number) = if span != codemap::DUMMY_SP { let loc = span_start(cx, span); - (file_metadata(cx, loc.file.name.as_slice()), loc.line as c_uint) + (file_metadata(cx, loc.file.name[]), loc.line as c_uint) } else { (UNKNOWN_FILE_METADATA, UNKNOWN_LINE_NUMBER) }; @@ -821,7 +816,7 @@ pub fn create_global_var_metadata(cx: &CrateContext, let namespace_node = namespace_for_item(cx, ast_util::local_def(node_id)); let var_name = token::get_ident(ident).get().to_string(); let linkage_name = - namespace_node.mangled_name_of_contained_item(var_name.as_slice()); + namespace_node.mangled_name_of_contained_item(var_name[]); let var_scope = namespace_node.scope; var_name.with_c_str(|var_name| { @@ -844,6 +839,8 @@ pub fn create_global_var_metadata(cx: &CrateContext, /// Creates debug information for the given local variable. /// +/// This function assumes that there's a datum for each pattern component of the +/// local in `bcx.fcx.lllocals`. /// Adds the created metadata nodes directly to the crate's IR. pub fn create_local_var_metadata(bcx: Block, local: &ast::Local) { if fn_should_be_ignored(bcx.fcx) { @@ -852,23 +849,27 @@ pub fn create_local_var_metadata(bcx: Block, local: &ast::Local) { let cx = bcx.ccx(); let def_map = &cx.tcx().def_map; + let locals = bcx.fcx.lllocals.borrow(); - pat_util::pat_bindings(def_map, &*local.pat, |_, node_id, span, path1| { - let var_ident = path1.node; - - let datum = match bcx.fcx.lllocals.borrow().get(&node_id).cloned() { + pat_util::pat_bindings(def_map, &*local.pat, |_, node_id, span, var_ident| { + let datum = match locals.get(&node_id) { Some(datum) => datum, None => { bcx.sess().span_bug(span, format!("no entry in lllocals table for {}", - node_id).as_slice()); + node_id)[]); } }; + if unsafe { llvm::LLVMIsAAllocaInst(datum.val) } == ptr::null_mut() { + cx.sess().span_bug(span, "debuginfo::create_local_var_metadata() - \ + Referenced variable location is not an alloca!"); + } + let scope_metadata = scope_metadata(bcx.fcx, node_id, span); declare_local(bcx, - var_ident, + var_ident.node, datum.ty, scope_metadata, DirectVariable { alloca: datum.val }, @@ -910,7 +911,7 @@ pub fn create_captured_var_metadata<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, "debuginfo::create_captured_var_metadata() - \ Captured var-id refers to unexpected \ ast_map variant: {}", - ast_item).as_slice()); + ast_item)[]); } } } @@ -920,7 +921,7 @@ pub fn create_captured_var_metadata<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, format!("debuginfo::create_captured_var_metadata() - \ Captured var-id refers to unexpected \ ast_map variant: {}", - ast_item).as_slice()); + ast_item)[]); } }; @@ -981,7 +982,7 @@ pub fn create_match_binding_metadata<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, // for the binding. For ByRef bindings that's a `T*` but for ByMove bindings we // actually have `T**`. So to get the actual variable we need to dereference once // more. For ByCopy we just use the stack slot we created for the binding. - let var_type = match binding.trmode { + let var_access = match binding.trmode { TrByCopy(llbinding) => DirectVariable { alloca: llbinding }, @@ -998,57 +999,105 @@ pub fn create_match_binding_metadata<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, variable_ident, binding.ty, scope_metadata, - var_type, + var_access, LocalVariable, binding.span); } /// Creates debug information for the given function argument. /// +/// This function assumes that there's a datum for each pattern component of the +/// argument in `bcx.fcx.lllocals`. /// Adds the created metadata nodes directly to the crate's IR. pub fn create_argument_metadata(bcx: Block, arg: &ast::Arg) { if fn_should_be_ignored(bcx.fcx) { return; } - let fcx = bcx.fcx; - let cx = fcx.ccx; - - let def_map = &cx.tcx().def_map; - let scope_metadata = bcx.fcx.debug_context.get_ref(cx, arg.pat.span).fn_metadata; + let def_map = &bcx.tcx().def_map; + let scope_metadata = bcx + .fcx + .debug_context + .get_ref(bcx.ccx(), arg.pat.span) + .fn_metadata; + let locals = bcx.fcx.lllocals.borrow(); - pat_util::pat_bindings(def_map, &*arg.pat, |_, node_id, span, path1| { - let llarg = match bcx.fcx.lllocals.borrow().get(&node_id).cloned() { + pat_util::pat_bindings(def_map, &*arg.pat, |_, node_id, span, var_ident| { + let datum = match locals.get(&node_id) { Some(v) => v, None => { bcx.sess().span_bug(span, format!("no entry in lllocals table for {}", - node_id).as_slice()); + node_id)[]); } }; - if unsafe { llvm::LLVMIsAAllocaInst(llarg.val) } == ptr::null_mut() { - cx.sess().span_bug(span, "debuginfo::create_argument_metadata() - \ - Referenced variable location is not an alloca!"); + if unsafe { llvm::LLVMIsAAllocaInst(datum.val) } == ptr::null_mut() { + bcx.sess().span_bug(span, "debuginfo::create_argument_metadata() - \ + Referenced variable location is not an alloca!"); } let argument_index = { - let counter = &fcx.debug_context.get_ref(cx, span).argument_counter; + let counter = &bcx + .fcx + .debug_context + .get_ref(bcx.ccx(), span) + .argument_counter; let argument_index = counter.get(); counter.set(argument_index + 1); argument_index }; declare_local(bcx, - path1.node, - llarg.ty, + var_ident.node, + datum.ty, scope_metadata, - DirectVariable { alloca: llarg.val }, + DirectVariable { alloca: datum.val }, ArgumentVariable(argument_index), span); }) } +/// Creates debug information for the given for-loop variable. +/// +/// This function assumes that there's a datum for each pattern component of the +/// loop variable in `bcx.fcx.lllocals`. +/// Adds the created metadata nodes directly to the crate's IR. +pub fn create_for_loop_var_metadata(bcx: Block, pat: &ast::Pat) { + if fn_should_be_ignored(bcx.fcx) { + return; + } + + let def_map = &bcx.tcx().def_map; + let locals = bcx.fcx.lllocals.borrow(); + + pat_util::pat_bindings(def_map, pat, |_, node_id, span, var_ident| { + let datum = match locals.get(&node_id) { + Some(datum) => datum, + None => { + bcx.sess().span_bug(span, + format!("no entry in lllocals table for {}", + node_id).as_slice()); + } + }; + + if unsafe { llvm::LLVMIsAAllocaInst(datum.val) } == ptr::null_mut() { + bcx.sess().span_bug(span, "debuginfo::create_for_loop_var_metadata() - \ + Referenced variable location is not an alloca!"); + } + + let scope_metadata = scope_metadata(bcx.fcx, node_id, span); + + declare_local(bcx, + var_ident.node, + datum.ty, + scope_metadata, + DirectVariable { alloca: datum.val }, + LocalVariable, + span); + }) +} + pub fn get_cleanup_debug_loc_for_ast_node<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>, node_id: ast::NodeId, node_span: Span, @@ -1117,13 +1166,13 @@ pub fn get_cleanup_debug_loc_for_ast_node<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>, pub fn set_source_location(fcx: &FunctionContext, node_id: ast::NodeId, span: Span) { - match fcx.debug_context.repr { - DebugInfoDisabled => return, - FunctionWithoutDebugInfo => { + match fcx.debug_context { + FunctionDebugContext::DebugInfoDisabled => return, + FunctionDebugContext::FunctionWithoutDebugInfo => { set_debug_location(fcx.ccx, UnknownLocation); return; } - DebugInfo(box ref function_debug_context) => { + FunctionDebugContext::RegularContext(box ref function_debug_context) => { let cx = fcx.ccx; debug!("set_source_location: {}", cx.sess().codemap().span_to_string(span)); @@ -1160,8 +1209,8 @@ pub fn clear_source_location(fcx: &FunctionContext) { /// switches source location emitting on and must therefore be called before the /// first real statement/expression of the function is translated. pub fn start_emitting_source_locations(fcx: &FunctionContext) { - match fcx.debug_context.repr { - DebugInfo(box ref data) => { + match fcx.debug_context { + FunctionDebugContext::RegularContext(box ref data) => { data.source_locations_enabled.set(true) }, _ => { /* safe to ignore */ } @@ -1179,7 +1228,7 @@ pub fn create_function_debug_context<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>, param_substs: &Substs<'tcx>, llfn: ValueRef) -> FunctionDebugContext { if cx.sess().opts.debuginfo == NoDebugInfo { - return FunctionDebugContext { repr: DebugInfoDisabled }; + return FunctionDebugContext::DebugInfoDisabled; } // Clear the debug location so we don't assign them in the function prelude. @@ -1189,7 +1238,7 @@ pub fn create_function_debug_context<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>, if fn_ast_id == ast::DUMMY_NODE_ID { // This is a function not linked to any source location, so don't // generate debuginfo for it. - return FunctionDebugContext { repr: FunctionWithoutDebugInfo }; + return FunctionDebugContext::FunctionWithoutDebugInfo; } let empty_generics = ast_util::empty_generics(); @@ -1199,7 +1248,7 @@ pub fn create_function_debug_context<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>, let (ident, fn_decl, generics, top_level_block, span, has_path) = match fnitem { ast_map::NodeItem(ref item) => { if contains_nodebug_attribute(item.attrs.as_slice()) { - return FunctionDebugContext { repr: FunctionWithoutDebugInfo }; + return FunctionDebugContext::FunctionWithoutDebugInfo; } match item.node { @@ -1216,9 +1265,7 @@ pub fn create_function_debug_context<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>, match **item { ast::MethodImplItem(ref method) => { if contains_nodebug_attribute(method.attrs.as_slice()) { - return FunctionDebugContext { - repr: FunctionWithoutDebugInfo - }; + return FunctionDebugContext::FunctionWithoutDebugInfo; } (method.pe_ident(), @@ -1239,7 +1286,7 @@ pub fn create_function_debug_context<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>, match expr.node { ast::ExprClosure(_, _, ref fn_decl, ref top_level_block) => { let name = format!("fn{}", token::gensym("fn")); - let name = token::str_to_ident(name.as_slice()); + let name = token::str_to_ident(name[]); (name, &**fn_decl, // This is not quite right. It should actually inherit // the generics of the enclosing function. @@ -1257,9 +1304,7 @@ pub fn create_function_debug_context<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>, match **trait_method { ast::ProvidedMethod(ref method) => { if contains_nodebug_attribute(method.attrs.as_slice()) { - return FunctionDebugContext { - repr: FunctionWithoutDebugInfo - }; + return FunctionDebugContext::FunctionWithoutDebugInfo; } (method.pe_ident(), @@ -1273,27 +1318,27 @@ pub fn create_function_debug_context<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>, cx.sess() .bug(format!("create_function_debug_context: \ unexpected sort of node: {}", - fnitem).as_slice()) + fnitem)[]) } } } ast_map::NodeForeignItem(..) | ast_map::NodeVariant(..) | ast_map::NodeStructCtor(..) => { - return FunctionDebugContext { repr: FunctionWithoutDebugInfo }; + return FunctionDebugContext::FunctionWithoutDebugInfo; } _ => cx.sess().bug(format!("create_function_debug_context: \ unexpected sort of node: {}", - fnitem).as_slice()) + fnitem)[]) }; // This can be the case for functions inlined from another crate if span == codemap::DUMMY_SP { - return FunctionDebugContext { repr: FunctionWithoutDebugInfo }; + return FunctionDebugContext::FunctionWithoutDebugInfo; } let loc = span_start(cx, span); - let file_metadata = file_metadata(cx, loc.file.name.as_slice()); + let file_metadata = file_metadata(cx, loc.file.name[]); let function_type_metadata = unsafe { let fn_signature = get_function_signature(cx, @@ -1320,7 +1365,7 @@ pub fn create_function_debug_context<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>, let (linkage_name, containing_scope) = if has_path { let namespace_node = namespace_for_item(cx, ast_util::local_def(fn_ast_id)); let linkage_name = namespace_node.mangled_name_of_contained_item( - function_name.as_slice()); + function_name[]); let containing_scope = namespace_node.scope; (linkage_name, containing_scope) } else { @@ -1356,22 +1401,23 @@ pub fn create_function_debug_context<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>, }) }); + let scope_map = create_scope_map(cx, + fn_decl.inputs.as_slice(), + &*top_level_block, + fn_metadata, + fn_ast_id); + // Initialize fn debug context (including scope map and namespace map) let fn_debug_context = box FunctionDebugContextData { - scope_map: RefCell::new(NodeMap::new()), + scope_map: RefCell::new(scope_map), fn_metadata: fn_metadata, argument_counter: Cell::new(1), source_locations_enabled: Cell::new(false), }; - populate_scope_map(cx, - fn_decl.inputs.as_slice(), - &*top_level_block, - fn_metadata, - fn_ast_id, - &mut *fn_debug_context.scope_map.borrow_mut()); - return FunctionDebugContext { repr: DebugInfo(fn_debug_context) }; + + return FunctionDebugContext::RegularContext(fn_debug_context); fn get_function_signature<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>, fn_ast_id: ast::NodeId, @@ -1405,7 +1451,7 @@ pub fn create_function_debug_context<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>, signature.push(type_metadata(cx, arg_type, codemap::DUMMY_SP)); } - return create_DIArray(DIB(cx), signature.as_slice()); + return create_DIArray(DIB(cx), signature[]); } fn get_template_parameters<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>, @@ -1438,7 +1484,7 @@ pub fn create_function_debug_context<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>, actual_self_type, true); - name_to_append_suffix_to.push_str(actual_self_type_name.as_slice()); + name_to_append_suffix_to.push_str(actual_self_type_name[]); if generics.is_type_parameterized() { name_to_append_suffix_to.push_str(","); @@ -1478,7 +1524,7 @@ pub fn create_function_debug_context<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>, let actual_type_name = compute_debuginfo_type_name(cx, actual_type, true); - name_to_append_suffix_to.push_str(actual_type_name.as_slice()); + name_to_append_suffix_to.push_str(actual_type_name[]); if index != generics.ty_params.len() - 1 { name_to_append_suffix_to.push_str(","); @@ -1506,7 +1552,7 @@ pub fn create_function_debug_context<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>, name_to_append_suffix_to.push('>'); - return create_DIArray(DIB(cx), template_params.as_slice()); + return create_DIArray(DIB(cx), template_params[]); } } @@ -1604,7 +1650,7 @@ fn declare_local<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, let cx: &CrateContext = bcx.ccx(); let filename = span_start(cx, span).file.name.clone(); - let file_metadata = file_metadata(cx, filename.as_slice()); + let file_metadata = file_metadata(cx, filename[]); let name = token::get_ident(variable_ident); let loc = span_start(cx, span); @@ -1691,7 +1737,7 @@ fn file_metadata(cx: &CrateContext, full_path: &str) -> DIFile { let work_dir = cx.sess().working_dir.as_str().unwrap(); let file_name = if full_path.starts_with(work_dir) { - full_path.slice(work_dir.len() + 1u, full_path.len()) + full_path[work_dir.len() + 1u..full_path.len()] } else { full_path }; @@ -1725,7 +1771,7 @@ fn scope_metadata(fcx: &FunctionContext, fcx.ccx.sess().span_bug(error_reporting_span, format!("debuginfo: Could not find scope info for node {}", - node).as_slice()); + node)[]); } } } @@ -1925,7 +1971,7 @@ impl<'tcx> RecursiveTypeDescription<'tcx> { cx.sess().bug(format!("Forward declaration of potentially recursive type \ '{}' was not found in TypeMap!", ppaux::ty_to_string(cx.tcx(), unfinished_type)) - .as_slice()); + []); } } @@ -1937,7 +1983,7 @@ impl<'tcx> RecursiveTypeDescription<'tcx> { set_members_of_composite_type(cx, metadata_stub, llvm_type, - member_descriptions.as_slice()); + member_descriptions[]); return MetadataCreationResult::new(metadata_stub, true); } } @@ -2009,7 +2055,7 @@ fn prepare_struct_metadata<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>, let struct_metadata_stub = create_struct_stub(cx, struct_llvm_type, - struct_name.as_slice(), + struct_name[], unique_type_id, containing_scope); @@ -2070,7 +2116,7 @@ fn prepare_tuple_metadata<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>, unique_type_id, create_struct_stub(cx, tuple_llvm_type, - tuple_name.as_slice(), + tuple_name[], unique_type_id, UNKNOWN_SCOPE_METADATA), tuple_llvm_type, @@ -2130,7 +2176,7 @@ impl<'tcx> EnumMemberDescriptionFactory<'tcx> { set_members_of_composite_type(cx, variant_type_metadata, variant_llvm_type, - member_descriptions.as_slice()); + member_descriptions[]); MemberDescription { name: "".to_string(), llvm_type: variant_llvm_type, @@ -2163,7 +2209,7 @@ impl<'tcx> EnumMemberDescriptionFactory<'tcx> { set_members_of_composite_type(cx, variant_type_metadata, variant_llvm_type, - member_descriptions.as_slice()); + member_descriptions[]); vec![ MemberDescription { name: "".to_string(), @@ -2263,7 +2309,7 @@ impl<'tcx> EnumMemberDescriptionFactory<'tcx> { set_members_of_composite_type(cx, variant_type_metadata, variant_llvm_type, - variant_member_descriptions.as_slice()); + variant_member_descriptions[]); // Encode the information about the null variant in the union // member's name. @@ -2342,7 +2388,7 @@ fn describe_enum_variant<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>, .iter() .map(|&t| type_of::type_of(cx, t)) .collect::>() - .as_slice(), + [], struct_def.packed); // Could do some consistency checks here: size, align, field count, discr type @@ -2366,7 +2412,7 @@ fn describe_enum_variant<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>, Some(ref names) => { names.iter() .map(|ident| { - token::get_ident(*ident).get().to_string().into_string() + token::get_ident(*ident).get().to_string() }).collect() } None => variant_info.args.iter().map(|_| "".to_string()).collect() @@ -2409,7 +2455,7 @@ fn prepare_enum_metadata<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>, let (containing_scope, definition_span) = get_namespace_and_span_for_item(cx, enum_def_id); let loc = span_start(cx, definition_span); - let file_metadata = file_metadata(cx, loc.file.name.as_slice()); + let file_metadata = file_metadata(cx, loc.file.name[]); let variants = ty::enum_variants(cx.tcx(), enum_def_id); @@ -2456,7 +2502,7 @@ fn prepare_enum_metadata<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>, UNKNOWN_LINE_NUMBER, bytes_to_bits(discriminant_size), bytes_to_bits(discriminant_align), - create_DIArray(DIB(cx), enumerators_metadata.as_slice()), + create_DIArray(DIB(cx), enumerators_metadata[]), discriminant_base_type_metadata) } }); @@ -2598,7 +2644,7 @@ fn set_members_of_composite_type(cx: &CrateContext, Please use a rustc built with anewer \ version of LLVM.", llvm_version_major, - llvm_version_minor).as_slice()); + llvm_version_minor)[]); } else { cx.sess().bug("debuginfo::set_members_of_composite_type() - \ Already completed forward declaration re-encountered."); @@ -2637,7 +2683,7 @@ fn set_members_of_composite_type(cx: &CrateContext, .collect(); unsafe { - let type_array = create_DIArray(DIB(cx), member_metadata.as_slice()); + let type_array = create_DIArray(DIB(cx), member_metadata[]); llvm::LLVMDICompositeTypeSetTypeArray(composite_type_metadata, type_array); } } @@ -2738,7 +2784,7 @@ fn vec_slice_metadata<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>, let member_llvm_types = slice_llvm_type.field_types(); assert!(slice_layout_is_correct(cx, - member_llvm_types.as_slice(), + member_llvm_types[], element_type)); let member_descriptions = [ MemberDescription { @@ -2760,11 +2806,11 @@ fn vec_slice_metadata<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>, assert!(member_descriptions.len() == member_llvm_types.len()); let loc = span_start(cx, span); - let file_metadata = file_metadata(cx, loc.file.name.as_slice()); + let file_metadata = file_metadata(cx, loc.file.name[]); let metadata = composite_type_metadata(cx, slice_llvm_type, - slice_type_name.as_slice(), + slice_type_name[], unique_type_id, &member_descriptions, UNKNOWN_SCOPE_METADATA, @@ -2810,7 +2856,7 @@ fn subroutine_type_metadata<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>, llvm::LLVMDIBuilderCreateSubroutineType( DIB(cx), UNKNOWN_FILE_METADATA, - create_DIArray(DIB(cx), signature_metadata.as_slice())) + create_DIArray(DIB(cx), signature_metadata[])) }, false); } @@ -2836,7 +2882,7 @@ fn trait_pointer_metadata<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>, let pp_type_name = ppaux::ty_to_string(cx.tcx(), trait_type); cx.sess().bug(format!("debuginfo: Unexpected trait-object type in \ trait_pointer_metadata(): {}", - pp_type_name.as_slice()).as_slice()); + pp_type_name[])[]); } }; @@ -2850,7 +2896,7 @@ fn trait_pointer_metadata<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>, composite_type_metadata(cx, trait_llvm_type, - trait_type_name.as_slice(), + trait_type_name[], unique_type_id, &[], containing_scope, @@ -2951,7 +2997,7 @@ fn type_metadata<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>, } } } - ty::ty_bare_fn(ref barefnty) => { + ty::ty_bare_fn(_, ref barefnty) => { subroutine_type_metadata(cx, unique_type_id, &barefnty.sig, usage_site_span) } ty::ty_closure(ref closurety) => { @@ -2973,13 +3019,13 @@ fn type_metadata<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>, ty::ty_tup(ref elements) => { prepare_tuple_metadata(cx, t, - elements.as_slice(), + elements[], unique_type_id, usage_site_span).finalize(cx) } _ => { cx.sess().bug(format!("debuginfo: unexpected type in type_metadata: {}", - sty).as_slice()) + sty)[]) } }; @@ -2997,9 +3043,9 @@ fn type_metadata<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>, type id '{}' to already be in \ the debuginfo::TypeMap but it \ was not. (Ty = {})", - unique_type_id_str.as_slice(), + unique_type_id_str[], ppaux::ty_to_string(cx.tcx(), t)); - cx.sess().span_bug(usage_site_span, error_message.as_slice()); + cx.sess().span_bug(usage_site_span, error_message[]); } }; @@ -3012,9 +3058,9 @@ fn type_metadata<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>, UniqueTypeId maps in \ debuginfo::TypeMap. \ UniqueTypeId={}, Ty={}", - unique_type_id_str.as_slice(), + unique_type_id_str[], ppaux::ty_to_string(cx.tcx(), t)); - cx.sess().span_bug(usage_site_span, error_message.as_slice()); + cx.sess().span_bug(usage_site_span, error_message[]); } } None => { @@ -3134,8 +3180,8 @@ fn DIB(cx: &CrateContext) -> DIBuilderRef { } fn fn_should_be_ignored(fcx: &FunctionContext) -> bool { - match fcx.debug_context.repr { - DebugInfo(_) => false, + match fcx.debug_context { + FunctionDebugContext::RegularContext(_) => false, _ => true } } @@ -3169,12 +3215,14 @@ fn get_namespace_and_span_for_item(cx: &CrateContext, def_id: ast::DefId) // what belongs to which scope, creating DIScope DIEs along the way, and // introducing *artificial* lexical scope descriptors where necessary. These // artificial scopes allow GDB to correctly handle name shadowing. -fn populate_scope_map(cx: &CrateContext, - args: &[ast::Arg], - fn_entry_block: &ast::Block, - fn_metadata: DISubprogram, - fn_ast_id: ast::NodeId, - scope_map: &mut NodeMap) { +fn create_scope_map(cx: &CrateContext, + args: &[ast::Arg], + fn_entry_block: &ast::Block, + fn_metadata: DISubprogram, + fn_ast_id: ast::NodeId) + -> NodeMap { + let mut scope_map = NodeMap::new(); + let def_map = &cx.tcx().def_map; struct ScopeStackEntry { @@ -3200,11 +3248,14 @@ fn populate_scope_map(cx: &CrateContext, with_new_scope(cx, fn_entry_block.span, &mut scope_stack, - scope_map, + &mut scope_map, |cx, scope_stack, scope_map| { walk_block(cx, fn_entry_block, scope_stack, scope_map); }); + return scope_map; + + // local helper functions for walking the AST. fn with_new_scope(cx: &CrateContext, scope_span: Span, @@ -3215,7 +3266,7 @@ fn populate_scope_map(cx: &CrateContext, { // Create a new lexical scope and push it onto the stack let loc = cx.sess().codemap().lookup_char_pos(scope_span.lo); - let file_metadata = file_metadata(cx, loc.file.name.as_slice()); + let file_metadata = file_metadata(cx, loc.file.name[]); let parent_scope = scope_stack.last().unwrap().scope_metadata; let scope_metadata = unsafe { @@ -3340,7 +3391,7 @@ fn populate_scope_map(cx: &CrateContext, let file_metadata = file_metadata(cx, loc.file .name - .as_slice()); + []); let parent_scope = scope_stack.last().unwrap().scope_metadata; let scope_metadata = unsafe { @@ -3440,7 +3491,7 @@ fn populate_scope_map(cx: &CrateContext, } ast::PatMac(_) => { - cx.sess().span_bug(pat.span, "debuginfo::populate_scope_map() - \ + cx.sess().span_bug(pat.span, "debuginfo::create_scope_map() - \ Found unexpanded macro."); } } @@ -3526,7 +3577,7 @@ fn populate_scope_map(cx: &CrateContext, } ast::ExprIfLet(..) => { - cx.sess().span_bug(exp.span, "debuginfo::populate_scope_map() - \ + cx.sess().span_bug(exp.span, "debuginfo::create_scope_map() - \ Found unexpanded if-let."); } @@ -3543,7 +3594,7 @@ fn populate_scope_map(cx: &CrateContext, } ast::ExprWhileLet(..) => { - cx.sess().span_bug(exp.span, "debuginfo::populate_scope_map() - \ + cx.sess().span_bug(exp.span, "debuginfo::create_scope_map() - \ Found unexpanded while-let."); } @@ -3568,7 +3619,7 @@ fn populate_scope_map(cx: &CrateContext, } ast::ExprMac(_) => { - cx.sess().span_bug(exp.span, "debuginfo::populate_scope_map() - \ + cx.sess().span_bug(exp.span, "debuginfo::create_scope_map() - \ Found unexpanded macro."); } @@ -3752,7 +3803,7 @@ fn push_debuginfo_type_name<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>, match optional_length { Some(len) => { - output.push_str(format!(", ..{}", len).as_slice()); + output.push_str(format!("; {}", len).as_slice()); } None => { /* nothing to do */ } }; @@ -3763,7 +3814,7 @@ fn push_debuginfo_type_name<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>, push_item_name(cx, trait_data.principal.def_id(), false, output); push_type_params(cx, trait_data.principal.substs(), output); }, - ty::ty_bare_fn(ty::BareFnTy{ unsafety, abi, ref sig } ) => { + ty::ty_bare_fn(_, ty::BareFnTy{ unsafety, abi, ref sig } ) => { if unsafety == ast::Unsafety::Unsafe { output.push_str("unsafe "); } @@ -3874,7 +3925,7 @@ fn push_debuginfo_type_name<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>, ty::ty_open(_) | ty::ty_param(_) => { cx.sess().bug(format!("debuginfo: Trying to create type name for \ - unexpected type: {}", ppaux::ty_to_string(cx.tcx(), t)).as_slice()); + unexpected type: {}", ppaux::ty_to_string(cx.tcx(), t))[]); } } @@ -3957,13 +4008,13 @@ impl NamespaceTreeNode { None => {} } let string = token::get_name(node.name); - output.push_str(format!("{}", string.get().len()).as_slice()); + output.push_str(format!("{}", string.get().len())[]); output.push_str(string.get()); } let mut name = String::from_str("_ZN"); fill_nested(self, &mut name); - name.push_str(format!("{}", item_name.len()).as_slice()); + name.push_str(format!("{}", item_name.len())[]); name.push_str(item_name); name.push('E'); name @@ -3971,7 +4022,7 @@ impl NamespaceTreeNode { } fn crate_root_namespace<'a>(cx: &'a CrateContext) -> &'a str { - cx.link_meta().crate_name.as_slice() + cx.link_meta().crate_name[] } fn namespace_for_item(cx: &CrateContext, def_id: ast::DefId) -> Rc { @@ -4048,7 +4099,7 @@ fn namespace_for_item(cx: &CrateContext, def_id: ast::DefId) -> Rc { cx.sess().bug(format!("debuginfo::namespace_for_item(): \ path too short for {}", - def_id).as_slice()); + def_id)[]); } } }) diff --git a/src/librustc_trans/trans/expr.rs b/src/librustc_trans/trans/expr.rs index dd87879b73755..3388a7623e315 100644 --- a/src/librustc_trans/trans/expr.rs +++ b/src/librustc_trans/trans/expr.rs @@ -54,7 +54,7 @@ use trans::inline; use trans::tvec; use trans::type_of; use middle::ty::{struct_fields, tup_fields}; -use middle::ty::{AdjustDerefRef, AdjustAddEnv, AutoUnsafe}; +use middle::ty::{AdjustDerefRef, AdjustReifyFnPointer, AdjustAddEnv, AutoUnsafe}; use middle::ty::{AutoPtr}; use middle::ty::{mod, Ty}; use middle::ty::MethodCall; @@ -177,8 +177,12 @@ fn apply_adjustments<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, datum.to_string(bcx.ccx()), adjustment.repr(bcx.tcx())); match adjustment { - AdjustAddEnv(..) => { - datum = unpack_datum!(bcx, add_env(bcx, expr, datum)); + AdjustAddEnv(def_id, _) => { + datum = unpack_datum!(bcx, add_env(bcx, def_id, expr, datum)); + } + AdjustReifyFnPointer(_def_id) => { + // FIXME(#19925) once fn item types are + // zero-sized, we'll need to do something here } AdjustDerefRef(ref adj) => { let (autoderefs, use_autoref) = match adj.autoref { @@ -311,7 +315,7 @@ fn apply_adjustments<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, unsized_info(bcx, k, id, ty_substs[tp_index], |t| t) } _ => bcx.sess().bug(format!("UnsizeStruct with bad sty: {}", - bcx.ty_to_string(unadjusted_ty)).as_slice()) + bcx.ty_to_string(unadjusted_ty))[]) }, &ty::UnsizeVtable(ty::TyTrait { ref principal, .. }, _) => { let substs = principal.substs().with_self_ty(unadjusted_ty).erase_regions(); @@ -442,7 +446,7 @@ fn apply_adjustments<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, let unboxed_ty = match datum_ty.sty { ty::ty_uniq(t) => t, _ => bcx.sess().bug(format!("Expected ty_uniq, found {}", - bcx.ty_to_string(datum_ty)).as_slice()) + bcx.ty_to_string(datum_ty))[]) }; let result_ty = ty::mk_uniq(tcx, ty::unsize_ty(tcx, unboxed_ty, k, expr.span)); @@ -466,6 +470,7 @@ fn apply_adjustments<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, } fn add_env<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, + def_id: ast::DefId, expr: &ast::Expr, datum: Datum<'tcx, Expr>) -> DatumBlock<'blk, 'tcx, Expr> { @@ -477,8 +482,7 @@ fn apply_adjustments<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, let closure_ty = expr_ty_adjusted(bcx, expr); let fn_ptr = datum.to_llscalarish(bcx); - let def = ty::resolve_expr(bcx.tcx(), expr); - closure::make_closure_from_bare_fn(bcx, closure_ty, def, fn_ptr) + closure::make_closure_from_bare_fn(bcx, closure_ty, def_id, fn_ptr) } } @@ -660,7 +664,7 @@ fn trans_datum_unadjusted<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, expr.span, format!("trans_rvalue_datum_unadjusted reached \ fall-through case: {}", - expr.node).as_slice()); + expr.node)[]); } } } @@ -926,7 +930,29 @@ fn trans_rvalue_stmt_unadjusted<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, controlflow::trans_cont(bcx, expr.id, label_opt) } ast::ExprRet(ref ex) => { - controlflow::trans_ret(bcx, ex.as_ref().map(|e| &**e)) + // Check to see if the return expression itself is reachable. + // This can occur when the inner expression contains a return + let reachable = if let Some(ref cfg) = bcx.fcx.cfg { + cfg.node_is_reachable(expr.id) + } else { + true + }; + + if reachable { + controlflow::trans_ret(bcx, ex.as_ref().map(|e| &**e)) + } else { + // If it's not reachable, just translate the inner expression + // directly. This avoids having to manage a return slot when + // it won't actually be used anyway. + if let &Some(ref x) = ex { + bcx = trans_into(bcx, &**x, Ignore); + } + // Mark the end of the block as unreachable. Once we get to + // a return expression, there's no more we should be doing + // after this. + Unreachable(bcx); + bcx + } } ast::ExprWhile(ref cond, ref body, _) => { controlflow::trans_while(bcx, expr.id, &**cond, &**body) @@ -985,7 +1011,7 @@ fn trans_rvalue_stmt_unadjusted<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, expr.span, format!("trans_rvalue_stmt_unadjusted reached \ fall-through case: {}", - expr.node).as_slice()); + expr.node)[]); } } } @@ -1011,14 +1037,14 @@ fn trans_rvalue_dps_unadjusted<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, controlflow::trans_if(bcx, expr.id, &**cond, &**thn, els.as_ref().map(|e| &**e), dest) } ast::ExprMatch(ref discr, ref arms, _) => { - _match::trans_match(bcx, expr, &**discr, arms.as_slice(), dest) + _match::trans_match(bcx, expr, &**discr, arms[], dest) } ast::ExprBlock(ref blk) => { controlflow::trans_block(bcx, &**blk, dest) } ast::ExprStruct(_, ref fields, ref base) => { trans_struct(bcx, - fields.as_slice(), + fields[], base.as_ref().map(|e| &**e), expr.span, expr.id, @@ -1030,7 +1056,7 @@ fn trans_rvalue_dps_unadjusted<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, trans_adt(bcx, expr_ty(bcx, expr), 0, - numbered_fields.as_slice(), + numbered_fields[], None, dest, Some(NodeInfo { id: expr.id, span: expr.span })) @@ -1074,13 +1100,13 @@ fn trans_rvalue_dps_unadjusted<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, trans_overloaded_call(bcx, expr, &**f, - args.as_slice(), + args[], Some(dest)) } else { callee::trans_call(bcx, expr, &**f, - callee::ArgExprs(args.as_slice()), + callee::ArgExprs(args[]), dest) } } @@ -1088,7 +1114,7 @@ fn trans_rvalue_dps_unadjusted<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, callee::trans_method_call(bcx, expr, &*args[0], - callee::ArgExprs(args.as_slice()), + callee::ArgExprs(args[]), dest) } ast::ExprBinary(op, ref lhs, ref rhs) => { @@ -1137,7 +1163,7 @@ fn trans_rvalue_dps_unadjusted<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, expr.span, format!("trans_rvalue_dps_unadjusted reached fall-through \ case: {}", - expr.node).as_slice()); + expr.node)[]); } } } @@ -1185,7 +1211,7 @@ fn trans_def_dps_unadjusted<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, _ => { bcx.tcx().sess.span_bug(ref_expr.span, format!( "Non-DPS def {} referened by {}", - def, bcx.node_id_to_string(ref_expr.id)).as_slice()); + def, bcx.node_id_to_string(ref_expr.id))[]); } } } @@ -1212,7 +1238,7 @@ fn trans_def_fn_unadjusted<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, bcx.tcx().sess.span_bug(ref_expr.span, format!( "trans_def_fn_unadjusted invoked on: {} for {}", def, - ref_expr.repr(bcx.tcx())).as_slice()); + ref_expr.repr(bcx.tcx()))[]); } }; @@ -1235,7 +1261,7 @@ pub fn trans_local_var<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, None => { bcx.sess().bug(format!( "trans_local_var: no llval for upvar {} found", - nid).as_slice()); + nid)[]); } } } @@ -1245,7 +1271,7 @@ pub fn trans_local_var<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, None => { bcx.sess().bug(format!( "trans_local_var: no datum for local/arg {} found", - nid).as_slice()); + nid)[]); } }; debug!("take_local(nid={}, v={}, ty={})", @@ -1255,7 +1281,7 @@ pub fn trans_local_var<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, _ => { bcx.sess().unimpl(format!( "unsupported def type in trans_local_var: {}", - def).as_slice()); + def)[]); } } } @@ -1272,11 +1298,11 @@ pub fn with_field_tys<'tcx, R, F>(tcx: &ty::ctxt<'tcx>, { match ty.sty { ty::ty_struct(did, ref substs) => { - op(0, struct_fields(tcx, did, substs).as_slice()) + op(0, struct_fields(tcx, did, substs)[]) } ty::ty_tup(ref v) => { - op(0, tup_fields(v.as_slice()).as_slice()) + op(0, tup_fields(v[])[]) } ty::ty_enum(_, ref substs) => { @@ -1286,7 +1312,7 @@ pub fn with_field_tys<'tcx, R, F>(tcx: &ty::ctxt<'tcx>, tcx.sess.bug(format!( "cannot get field types from the enum type {} \ without a node ID", - ty.repr(tcx)).as_slice()); + ty.repr(tcx))[]); } Some(node_id) => { let def = tcx.def_map.borrow()[node_id].clone(); @@ -1297,7 +1323,7 @@ pub fn with_field_tys<'tcx, R, F>(tcx: &ty::ctxt<'tcx>, op(variant_info.disr_val, struct_fields(tcx, variant_id, - substs).as_slice()) + substs)[]) } _ => { tcx.sess.bug("resolve didn't map this expr to a \ @@ -1311,7 +1337,7 @@ pub fn with_field_tys<'tcx, R, F>(tcx: &ty::ctxt<'tcx>, _ => { tcx.sess.bug(format!( "cannot get field types from the type {}", - ty.repr(tcx)).as_slice()); + ty.repr(tcx))[]); } } } @@ -1366,7 +1392,7 @@ fn trans_struct<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, trans_adt(bcx, ty, discr, - numbered_fields.as_slice(), + numbered_fields[], optbase, dest, Some(NodeInfo { id: expr_id, span: expr_span })) @@ -2003,7 +2029,7 @@ fn trans_imm_cast<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, t_in.repr(bcx.tcx()), k_in, t_out.repr(bcx.tcx()), - k_out).as_slice()) + k_out)[]) } } } @@ -2012,7 +2038,7 @@ fn trans_imm_cast<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, t_in.repr(bcx.tcx()), k_in, t_out.repr(bcx.tcx()), - k_out).as_slice()) + k_out)[]) }; return immediate_rvalue_bcx(bcx, newval, t_out).to_expr_datumblock(); } @@ -2174,7 +2200,7 @@ fn deref_once<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, bcx.tcx().sess.span_bug( expr.span, format!("deref invoked on expr of illegal type {}", - datum.ty.repr(bcx.tcx())).as_slice()); + datum.ty.repr(bcx.tcx()))[]); } }; diff --git a/src/librustc_trans/trans/foreign.rs b/src/librustc_trans/trans/foreign.rs index d072031993050..1bad476863fdb 100644 --- a/src/librustc_trans/trans/foreign.rs +++ b/src/librustc_trans/trans/foreign.rs @@ -106,7 +106,7 @@ pub fn register_static(ccx: &CrateContext, let llty = type_of::type_of(ccx, ty); let ident = link_name(foreign_item); - match attr::first_attr_value_str_by_name(foreign_item.attrs.as_slice(), + match attr::first_attr_value_str_by_name(foreign_item.attrs[], "linkage") { // If this is a static with a linkage specified, then we need to handle // it a little specially. The typesystem prevents things like &T and @@ -228,16 +228,16 @@ pub fn trans_native_call<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, ccx.tn().val_to_string(llretptr)); let (fn_abi, fn_sig) = match callee_ty.sty { - ty::ty_bare_fn(ref fn_ty) => (fn_ty.abi, fn_ty.sig.clone()), + ty::ty_bare_fn(_, ref fn_ty) => (fn_ty.abi, fn_ty.sig.clone()), _ => ccx.sess().bug("trans_native_call called on non-function type") }; - let llsig = foreign_signature(ccx, &fn_sig, passed_arg_tys.as_slice()); + let llsig = foreign_signature(ccx, &fn_sig, passed_arg_tys[]); let fn_type = cabi::compute_abi_info(ccx, - llsig.llarg_tys.as_slice(), + llsig.llarg_tys[], llsig.llret_ty, llsig.ret_def); - let arg_tys: &[cabi::ArgType] = fn_type.arg_tys.as_slice(); + let arg_tys: &[cabi::ArgType] = fn_type.arg_tys[]; let mut llargs_foreign = Vec::new(); @@ -363,7 +363,7 @@ pub fn trans_native_call<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, let llforeign_retval = CallWithConv(bcx, llfn, - llargs_foreign.as_slice(), + llargs_foreign[], cc, Some(attrs)); @@ -433,7 +433,7 @@ pub fn trans_foreign_mod(ccx: &CrateContext, foreign_mod: &ast::ForeignMod) { abi => { let ty = ty::node_id_to_type(ccx.tcx(), foreign_item.id); register_foreign_item_fn(ccx, abi, ty, - lname.get().as_slice()); + lname.get()[]); // Unlike for other items, we shouldn't call // `base::update_linkage` here. Foreign items have // special linkage requirements, which are handled @@ -479,7 +479,7 @@ pub fn decl_rust_fn_with_foreign_abi<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>, let tys = foreign_types_for_fn_ty(ccx, t); let llfn_ty = lltype_for_fn_from_foreign_types(ccx, &tys); let cconv = match t.sty { - ty::ty_bare_fn(ref fn_ty) => { + ty::ty_bare_fn(_, ref fn_ty) => { llvm_calling_convention(ccx, fn_ty.abi) } _ => panic!("expected bare fn in decl_rust_fn_with_foreign_abi") @@ -502,7 +502,7 @@ pub fn register_rust_fn_with_foreign_abi(ccx: &CrateContext, let llfn_ty = lltype_for_fn_from_foreign_types(ccx, &tys); let t = ty::node_id_to_type(ccx.tcx(), node_id); let cconv = match t.sty { - ty::ty_bare_fn(ref fn_ty) => { + ty::ty_bare_fn(_, ref fn_ty) => { llvm_calling_convention(ccx, fn_ty.abi) } _ => panic!("expected bare fn in register_rust_fn_with_foreign_abi") @@ -556,14 +556,14 @@ pub fn trans_rust_fn_with_foreign_abi<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>, // Compute the type that the function would have if it were just a // normal Rust function. This will be the type of the wrappee fn. match t.sty { - ty::ty_bare_fn(ref f) => { + ty::ty_bare_fn(_, ref f) => { assert!(f.abi != Rust && f.abi != RustIntrinsic); } _ => { ccx.sess().bug(format!("build_rust_fn: extern fn {} has ty {}, \ expected a bare fn ty", ccx.tcx().map.path_to_string(id), - t.repr(tcx)).as_slice()); + t.repr(tcx))[]); } }; @@ -571,7 +571,7 @@ pub fn trans_rust_fn_with_foreign_abi<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>, ccx.tcx().map.path_to_string(id), id, t.repr(tcx)); - let llfn = base::decl_internal_rust_fn(ccx, t, ps.as_slice()); + let llfn = base::decl_internal_rust_fn(ccx, t, ps[]); base::set_llvm_fn_attrs(ccx, attrs, llfn); base::trans_fn(ccx, decl, body, llfn, param_substs, id, &[]); llfn @@ -744,7 +744,7 @@ pub fn trans_rust_fn_with_foreign_abi<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>, debug!("calling llrustfn = {}, t = {}", ccx.tn().val_to_string(llrustfn), t.repr(ccx.tcx())); let attributes = base::get_fn_llvm_attributes(ccx, t); - let llrust_ret_val = builder.call(llrustfn, llrust_args.as_slice(), Some(attributes)); + let llrust_ret_val = builder.call(llrustfn, llrust_args[], Some(attributes)); // Get the return value where the foreign fn expects it. let llforeign_ret_ty = match tys.fn_ty.ret_ty.cast { @@ -811,9 +811,9 @@ pub fn trans_rust_fn_with_foreign_abi<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>, // the massive simplifications that have occurred. pub fn link_name(i: &ast::ForeignItem) -> InternedString { - match attr::first_attr_value_str_by_name(i.attrs.as_slice(), "link_name") { + match attr::first_attr_value_str_by_name(i.attrs[], "link_name") { Some(ln) => ln.clone(), - None => match weak_lang_items::link_name(i.attrs.as_slice()) { + None => match weak_lang_items::link_name(i.attrs[]) { Some(name) => name, None => token::get_ident(i.ident), } @@ -849,12 +849,12 @@ fn foreign_types_for_id<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>, fn foreign_types_for_fn_ty<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>, ty: Ty<'tcx>) -> ForeignTypes<'tcx> { let fn_sig = match ty.sty { - ty::ty_bare_fn(ref fn_ty) => fn_ty.sig.clone(), + ty::ty_bare_fn(_, ref fn_ty) => fn_ty.sig.clone(), _ => ccx.sess().bug("foreign_types_for_fn_ty called on non-function type") }; let llsig = foreign_signature(ccx, &fn_sig, fn_sig.0.inputs.as_slice()); let fn_ty = cabi::compute_abi_info(ccx, - llsig.llarg_tys.as_slice(), + llsig.llarg_tys[], llsig.llret_ty, llsig.ret_def); debug!("foreign_types_for_fn_ty(\ @@ -863,9 +863,9 @@ fn foreign_types_for_fn_ty<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>, fn_ty={} -> {}, \ ret_def={}", ty.repr(ccx.tcx()), - ccx.tn().types_to_str(llsig.llarg_tys.as_slice()), + ccx.tn().types_to_str(llsig.llarg_tys[]), ccx.tn().type_to_string(llsig.llret_ty), - ccx.tn().types_to_str(fn_ty.arg_tys.iter().map(|t| t.ty).collect::>().as_slice()), + ccx.tn().types_to_str(fn_ty.arg_tys.iter().map(|t| t.ty).collect::>()[]), ccx.tn().type_to_string(fn_ty.ret_ty.ty), llsig.ret_def); @@ -915,7 +915,7 @@ fn lltype_for_fn_from_foreign_types(ccx: &CrateContext, tys: &ForeignTypes) -> T if tys.fn_sig.0.variadic { Type::variadic_func(llargument_tys.as_slice(), &llreturn_ty) } else { - Type::func(llargument_tys.as_slice(), &llreturn_ty) + Type::func(llargument_tys[], &llreturn_ty) } } diff --git a/src/librustc_trans/trans/glue.rs b/src/librustc_trans/trans/glue.rs index dea095ecaf594..26734d854afc0 100644 --- a/src/librustc_trans/trans/glue.rs +++ b/src/librustc_trans/trans/glue.rs @@ -160,7 +160,7 @@ pub fn get_drop_glue<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>, t: Ty<'tcx>) -> Val let (glue, new_sym) = match ccx.available_drop_glues().borrow().get(&t) { Some(old_sym) => { - let glue = decl_cdecl_fn(ccx, old_sym.as_slice(), llfnty, ty::mk_nil(ccx.tcx())); + let glue = decl_cdecl_fn(ccx, old_sym[], llfnty, ty::mk_nil(ccx.tcx())); (glue, None) }, None => { @@ -226,12 +226,12 @@ fn trans_struct_drop<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, let fty = ty::lookup_item_type(bcx.tcx(), dtor_did).ty.subst(bcx.tcx(), substs); let self_ty = match fty.sty { - ty::ty_bare_fn(ref f) => { + ty::ty_bare_fn(_, ref f) => { assert!(f.sig.0.inputs.len() == 1); f.sig.0.inputs[0] } _ => bcx.sess().bug(format!("Expected function type, found {}", - bcx.ty_to_string(fty)).as_slice()) + bcx.ty_to_string(fty))[]) }; let (struct_data, info) = if ty::type_is_sized(bcx.tcx(), t) { @@ -289,6 +289,7 @@ fn trans_struct_drop<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, } let dtor_ty = ty::mk_ctor_fn(bcx.tcx(), + class_did, &[get_drop_glue_type(bcx.ccx(), t)], ty::mk_nil(bcx.tcx())); let (_, variant_cx) = invoke(variant_cx, dtor_addr, args[], dtor_ty, None, false); @@ -350,7 +351,7 @@ fn size_and_align_of_dst<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, t: Ty<'tcx>, info: (Mul(bcx, info, C_uint(bcx.ccx(), unit_size)), C_uint(bcx.ccx(), 8u)) } _ => bcx.sess().bug(format!("Unexpected unsized type, found {}", - bcx.ty_to_string(t)).as_slice()) + bcx.ty_to_string(t))[]) } } @@ -422,7 +423,7 @@ fn make_drop_glue<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, v0: ValueRef, t: Ty<'tcx>) bcx.sess().warn(format!("Ignoring drop flag in destructor for {}\ because the struct is unsized. See issue\ #16758", - bcx.ty_to_string(t)).as_slice()); + bcx.ty_to_string(t))[]); trans_struct_drop(bcx, t, v0, dtor, did, substs) } } @@ -504,7 +505,7 @@ pub fn declare_tydesc<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>, t: Ty<'tcx>) note_unique_llvm_symbol(ccx, name); let ty_name = token::intern_and_get_ident( - ppaux::ty_to_string(ccx.tcx(), t).as_slice()); + ppaux::ty_to_string(ccx.tcx(), t)[]); let ty_name = C_str_slice(ccx, ty_name); debug!("--- declare_tydesc {}", ppaux::ty_to_string(ccx.tcx(), t)); @@ -523,8 +524,8 @@ fn declare_generic_glue<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>, t: Ty<'tcx>, let fn_nm = mangle_internal_name_by_type_and_seq( ccx, t, - format!("glue_{}", name).as_slice()); - let llfn = decl_cdecl_fn(ccx, fn_nm.as_slice(), llfnty, ty::mk_nil(ccx.tcx())); + format!("glue_{}", name)[]); + let llfn = decl_cdecl_fn(ccx, fn_nm[], llfnty, ty::mk_nil(ccx.tcx())); note_unique_llvm_symbol(ccx, fn_nm.clone()); return (fn_nm, llfn); } diff --git a/src/librustc_trans/trans/intrinsic.rs b/src/librustc_trans/trans/intrinsic.rs index a6f7c849f4d95..fff89999d99b5 100644 --- a/src/librustc_trans/trans/intrinsic.rs +++ b/src/librustc_trans/trans/intrinsic.rs @@ -118,7 +118,7 @@ pub fn check_intrinsics(ccx: &CrateContext) { "" } else { "s" - }).as_slice()); + })[]); } if ty::type_is_fat_ptr(ccx.tcx(), transmute_restriction.to) || ty::type_is_fat_ptr(ccx.tcx(), transmute_restriction.from) { @@ -150,7 +150,7 @@ pub fn trans_intrinsic_call<'a, 'blk, 'tcx>(mut bcx: Block<'blk, 'tcx>, let tcx = bcx.tcx(); let ret_ty = match callee_ty.sty { - ty::ty_bare_fn(ref f) => f.sig.0.output, + ty::ty_bare_fn(_, ref f) => f.sig.0.output, _ => panic!("expected bare_fn in trans_intrinsic_call") }; let foreign_item = tcx.map.expect_foreign_item(node); diff --git a/src/librustc_trans/trans/meth.rs b/src/librustc_trans/trans/meth.rs index f1c3c9be396af..38c6b802d0485 100644 --- a/src/librustc_trans/trans/meth.rs +++ b/src/librustc_trans/trans/meth.rs @@ -77,7 +77,7 @@ pub fn trans_impl(ccx: &CrateContext, match *impl_item { ast::MethodImplItem(ref method) => { if method.pe_generics().ty_params.len() == 0u { - let trans_everywhere = attr::requests_inline(method.attrs.as_slice()); + let trans_everywhere = attr::requests_inline(method.attrs[]); for (ref ccx, is_origin) in ccx.maybe_iter(trans_everywhere) { let llfn = get_item_val(ccx, method.id); trans_fn(ccx, @@ -124,7 +124,7 @@ pub fn trans_method_callee<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, bcx: bcx, data: Fn(callee::trans_fn_ref(bcx, did, - MethodCall(method_call))), + MethodCallKey(method_call))), } } @@ -293,7 +293,7 @@ pub fn trans_static_method_callee(bcx: Block, _ => { bcx.tcx().sess.bug( format!("static call to invalid vtable: {}", - vtbl.repr(bcx.tcx())).as_slice()); + vtbl.repr(bcx.tcx()))[]); } } } @@ -344,12 +344,12 @@ fn trans_monomorphized_callee<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, // those from the impl and those from the method: let callee_substs = combine_impl_and_methods_tps( - bcx, MethodCall(method_call), vtable_impl.substs); + bcx, MethodCallKey(method_call), vtable_impl.substs); // translate the function let llfn = trans_fn_ref_with_substs(bcx, mth_id, - MethodCall(method_call), + MethodCallKey(method_call), callee_substs); Callee { bcx: bcx, data: Fn(llfn) } @@ -359,7 +359,7 @@ fn trans_monomorphized_callee<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, // after passing through fulfill_obligation let llfn = trans_fn_ref_with_substs(bcx, closure_def_id, - MethodCall(method_call), + MethodCallKey(method_call), substs); Callee { @@ -375,7 +375,7 @@ fn trans_monomorphized_callee<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, traits::VtableParam(..) => { bcx.sess().bug( format!("resolved vtable bad vtable {} in trans", - vtable.repr(bcx.tcx())).as_slice()); + vtable.repr(bcx.tcx()))[]); } } } @@ -477,7 +477,7 @@ pub fn trans_trait_callee_from_llval<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, debug!("(translating trait callee) loading method"); // Replace the self type (&Self or Box) with an opaque pointer. let llcallee_ty = match callee_ty.sty { - ty::ty_bare_fn(ref f) if f.abi == Rust || f.abi == RustCall => { + ty::ty_bare_fn(_, ref f) if f.abi == Rust || f.abi == RustCall => { type_of_rust_fn(ccx, Some(Type::i8p(ccx)), f.sig.0.inputs.slice_from(1), @@ -566,7 +566,7 @@ pub fn get_vtable<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, bcx.sess().bug( format!("resolved vtable for {} to bad vtable {} in trans", trait_ref.repr(bcx.tcx()), - vtable.repr(bcx.tcx())).as_slice()); + vtable.repr(bcx.tcx()))[]); } } }); @@ -598,7 +598,7 @@ pub fn make_vtable>(ccx: &CrateContext, let components: Vec<_> = head.into_iter().chain(ptrs).collect(); unsafe { - let tbl = C_struct(ccx, components.as_slice(), false); + let tbl = C_struct(ccx, components[], false); let sym = token::gensym("vtable"); let vt_gvar = format!("vtable{}", sym.uint()).with_c_str(|buf| { llvm::LLVMAddGlobal(ccx.llmod(), val_ty(tbl).to_ref(), buf) @@ -639,7 +639,8 @@ fn emit_vtable_methods<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, m.repr(tcx), substs.repr(tcx)); if m.generics.has_type_params(subst::FnSpace) || - ty::type_has_self(ty::mk_bare_fn(tcx, m.fty.clone())) { + ty::type_has_self(ty::mk_bare_fn(tcx, None, m.fty.clone())) + { debug!("(making impl vtable) method has self or type \ params: {}", token::get_name(name)); diff --git a/src/librustc_trans/trans/monomorphize.rs b/src/librustc_trans/trans/monomorphize.rs index cb3c56ad2778e..2a6aff56513a1 100644 --- a/src/librustc_trans/trans/monomorphize.rs +++ b/src/librustc_trans/trans/monomorphize.rs @@ -122,7 +122,7 @@ pub fn monomorphic_fn<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>, hash = format!("h{}", state.result()); ccx.tcx().map.with_path(fn_id.node, |path| { - exported_name(path, hash.as_slice()) + exported_name(path, hash[]) }) }; @@ -132,9 +132,9 @@ pub fn monomorphic_fn<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>, let mut hash_id = Some(hash_id); let mk_lldecl = |abi: abi::Abi| { let lldecl = if abi != abi::Rust { - foreign::decl_rust_fn_with_foreign_abi(ccx, mono_ty, s.as_slice()) + foreign::decl_rust_fn_with_foreign_abi(ccx, mono_ty, s[]) } else { - decl_internal_rust_fn(ccx, mono_ty, s.as_slice()) + decl_internal_rust_fn(ccx, mono_ty, s[]) }; ccx.monomorphized().borrow_mut().insert(hash_id.take().unwrap(), lldecl); @@ -168,12 +168,12 @@ pub fn monomorphic_fn<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>, .. } => { let d = mk_lldecl(abi); - let needs_body = setup_lldecl(d, i.attrs.as_slice()); + let needs_body = setup_lldecl(d, i.attrs[]); if needs_body { if abi != abi::Rust { foreign::trans_rust_fn_with_foreign_abi( ccx, &**decl, &**body, &[], d, psubsts, fn_id.node, - Some(hash.as_slice())); + Some(hash[])); } else { trans_fn(ccx, &**decl, &**body, d, psubsts, fn_id.node, &[]); } @@ -197,7 +197,7 @@ pub fn monomorphic_fn<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>, trans_enum_variant(ccx, parent, &*v, - args.as_slice(), + args[], this_tv.disr_val, psubsts, d); @@ -211,7 +211,7 @@ pub fn monomorphic_fn<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>, match *ii { ast::MethodImplItem(ref mth) => { let d = mk_lldecl(abi::Rust); - let needs_body = setup_lldecl(d, mth.attrs.as_slice()); + let needs_body = setup_lldecl(d, mth.attrs[]); if needs_body { trans_fn(ccx, mth.pe_fn_decl(), @@ -232,7 +232,7 @@ pub fn monomorphic_fn<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>, match *method { ast::ProvidedMethod(ref mth) => { let d = mk_lldecl(abi::Rust); - let needs_body = setup_lldecl(d, mth.attrs.as_slice()); + let needs_body = setup_lldecl(d, mth.attrs[]); if needs_body { trans_fn(ccx, mth.pe_fn_decl(), mth.pe_body(), d, psubsts, mth.id, &[]); @@ -241,7 +241,7 @@ pub fn monomorphic_fn<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>, } _ => { ccx.sess().bug(format!("can't monomorphize a {}", - map_node).as_slice()) + map_node)[]) } } } @@ -249,7 +249,7 @@ pub fn monomorphic_fn<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>, let d = mk_lldecl(abi::Rust); set_inline_hint(d); base::trans_tuple_struct(ccx, - struct_def.fields.as_slice(), + struct_def.fields[], struct_def.ctor_id.expect("ast-mapped tuple struct \ didn't have a ctor id"), psubsts, @@ -267,7 +267,7 @@ pub fn monomorphic_fn<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>, ast_map::NodePat(..) | ast_map::NodeLocal(..) => { ccx.sess().bug(format!("can't monomorphize a {}", - map_node).as_slice()) + map_node)[]) } }; diff --git a/src/librustc_trans/trans/type_.rs b/src/librustc_trans/trans/type_.rs index 51a0533a7bb63..45a2a343066c4 100644 --- a/src/librustc_trans/trans/type_.rs +++ b/src/librustc_trans/trans/type_.rs @@ -102,7 +102,7 @@ impl Type { } pub fn int(ccx: &CrateContext) -> Type { - match ccx.tcx().sess.target.target.target_word_size.as_slice() { + match ccx.tcx().sess.target.target.target_word_size[] { "32" => Type::i32(ccx), "64" => Type::i64(ccx), tws => panic!("Unsupported target word size for int: {}", tws), diff --git a/src/librustc_trans/trans/type_of.rs b/src/librustc_trans/trans/type_of.rs index 2801e0ccead6d..499195b51b9ce 100644 --- a/src/librustc_trans/trans/type_of.rs +++ b/src/librustc_trans/trans/type_of.rs @@ -137,7 +137,7 @@ pub fn type_of_rust_fn<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>, let input_tys = inputs.iter().map(|&arg_ty| type_of_explicit_arg(cx, arg_ty)); atys.extend(input_tys); - Type::func(atys.as_slice(), &lloutputtype) + Type::func(atys[], &lloutputtype) } // Given a function type and a count of ty params, construct an llvm type @@ -150,7 +150,9 @@ pub fn type_of_fn_from_ty<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>, fty: Ty<'tcx>) f.sig.0.output, f.abi) } - ty::ty_bare_fn(ref f) => { + ty::ty_bare_fn(_, ref f) => { + // FIXME(#19925) once fn item types are + // zero-sized, we'll need to do something here if f.abi == abi::Rust || f.abi == abi::RustCall { type_of_rust_fn(cx, None, @@ -187,7 +189,7 @@ pub fn sizing_type_of<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>, t: Ty<'tcx>) -> Typ let llsizingty = match t.sty { _ if !ty::lltype_is_sized(cx.tcx(), t) => { cx.sess().bug(format!("trying to take the sizing type of {}, an unsized type", - ppaux::ty_to_string(cx.tcx(), t)).as_slice()) + ppaux::ty_to_string(cx.tcx(), t))[]) } ty::ty_bool => Type::bool(cx), @@ -241,7 +243,7 @@ pub fn sizing_type_of<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>, t: Ty<'tcx>) -> Typ ty::ty_infer(..) | ty::ty_param(..) | ty::ty_err(..) => { cx.sess().bug(format!("fictitious type {} in sizing_type_of()", - ppaux::ty_to_string(cx.tcx(), t)).as_slice()) + ppaux::ty_to_string(cx.tcx(), t))[]) } ty::ty_vec(_, None) | ty::ty_trait(..) | ty::ty_str => panic!("unreachable") }; @@ -318,7 +320,7 @@ pub fn type_of<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>, t: Ty<'tcx>) -> Type { let repr = adt::represent_type(cx, t); let tps = substs.types.get_slice(subst::TypeSpace); let name = llvm_type_name(cx, an_enum, did, tps); - adt::incomplete_type_of(cx, &*repr, name.as_slice()) + adt::incomplete_type_of(cx, &*repr, name[]) } ty::ty_unboxed_closure(did, _, ref substs) => { // Only create the named struct, but don't fill it in. We @@ -329,7 +331,7 @@ pub fn type_of<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>, t: Ty<'tcx>) -> Type { // contents of the VecPerParamSpace to to construct the llvm // name let name = llvm_type_name(cx, an_unboxed_closure, did, substs.types.as_slice()); - adt::incomplete_type_of(cx, &*repr, name.as_slice()) + adt::incomplete_type_of(cx, &*repr, name[]) } ty::ty_uniq(ty) | ty::ty_rptr(_, ty::mt{ty, ..}) | ty::ty_ptr(ty::mt{ty, ..}) => { @@ -364,7 +366,7 @@ pub fn type_of<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>, t: Ty<'tcx>) -> Type { ty::ty_str => Type::i8(cx), - ty::ty_bare_fn(_) => { + ty::ty_bare_fn(..) => { type_of_fn_from_ty(cx, t).ptr_to() } ty::ty_closure(_) => { @@ -389,7 +391,7 @@ pub fn type_of<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>, t: Ty<'tcx>) -> Type { let repr = adt::represent_type(cx, t); let tps = substs.types.get_slice(subst::TypeSpace); let name = llvm_type_name(cx, a_struct, did, tps); - adt::incomplete_type_of(cx, &*repr, name.as_slice()) + adt::incomplete_type_of(cx, &*repr, name[]) } } @@ -408,7 +410,7 @@ pub fn type_of<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>, t: Ty<'tcx>) -> Type { } ty::ty_trait(..) => Type::opaque_trait(cx), _ => cx.sess().bug(format!("ty_open with sized type: {}", - ppaux::ty_to_string(cx.tcx(), t)).as_slice()) + ppaux::ty_to_string(cx.tcx(), t))[]) }, ty::ty_infer(..) => cx.sess().bug("type_of with ty_infer"), diff --git a/src/librustc_typeck/astconv.rs b/src/librustc_typeck/astconv.rs index befb4bf81e5d0..ff577d2d45d4b 100644 --- a/src/librustc_typeck/astconv.rs +++ b/src/librustc_typeck/astconv.rs @@ -168,7 +168,7 @@ pub fn opt_ast_region_to_region<'tcx, AC: AstConv<'tcx>, RS: RegionScope>( format!("`{}`", name) } else { format!("one of `{}`'s {} elided lifetimes", name, n) - }.as_slice()); + }[]); if len == 2 && i == 0 { m.push_str(" or "); @@ -323,7 +323,7 @@ fn create_substs_for_ast_path<'tcx,AC,RS>( format!("wrong number of type arguments: {} {}, found {}", expected, required_ty_param_count, - supplied_ty_param_count).as_slice()); + supplied_ty_param_count)[]); } else if supplied_ty_param_count > formal_ty_param_count { let expected = if required_ty_param_count < formal_ty_param_count { "expected at most" @@ -334,7 +334,7 @@ fn create_substs_for_ast_path<'tcx,AC,RS>( format!("wrong number of type arguments: {} {}, found {}", expected, formal_ty_param_count, - supplied_ty_param_count).as_slice()); + supplied_ty_param_count)[]); } if supplied_ty_param_count > required_ty_param_count @@ -723,7 +723,7 @@ pub fn ast_ty_to_builtin_ty<'tcx, AC: AstConv<'tcx>, RS: RegionScope>( .sess .span_bug(ast_ty.span, format!("unbound path {}", - path.repr(this.tcx())).as_slice()) + path.repr(this.tcx()))[]) } Some(&d) => d }; @@ -920,10 +920,10 @@ pub fn ast_ty_to_ty<'tcx, AC: AstConv<'tcx>, RS: RegionScope>( ty::mk_vec(tcx, ast_ty_to_ty(this, rscope, &**ty), None) } ast::TyObjectSum(ref ty, ref bounds) => { - match ast_ty_to_trait_ref(this, rscope, &**ty, bounds.as_slice()) { + match ast_ty_to_trait_ref(this, rscope, &**ty, bounds[]) { Ok(trait_ref) => { trait_ref_to_object_type(this, rscope, ast_ty.span, - trait_ref, bounds.as_slice()) + trait_ref, bounds[]) } Err(ErrorReported) => { ty::mk_err() @@ -954,7 +954,7 @@ pub fn ast_ty_to_ty<'tcx, AC: AstConv<'tcx>, RS: RegionScope>( tcx.sess.span_err(ast_ty.span, "variadic function must have C calling convention"); } - ty::mk_bare_fn(tcx, ty_of_bare_fn(this, bf.unsafety, bf.abi, &*bf.decl)) + ty::mk_bare_fn(tcx, None, ty_of_bare_fn(this, bf.unsafety, bf.abi, &*bf.decl)) } ast::TyClosure(ref f) => { // Use corresponding trait store to figure out default bounds @@ -977,7 +977,7 @@ pub fn ast_ty_to_ty<'tcx, AC: AstConv<'tcx>, RS: RegionScope>( ty::mk_closure(tcx, fn_decl) } ast::TyPolyTraitRef(ref bounds) => { - conv_ty_poly_trait_ref(this, rscope, ast_ty.span, bounds.as_slice()) + conv_ty_poly_trait_ref(this, rscope, ast_ty.span, bounds[]) } ast::TyPath(ref path, id) => { let a_def = match tcx.def_map.borrow().get(&id) { @@ -985,7 +985,7 @@ pub fn ast_ty_to_ty<'tcx, AC: AstConv<'tcx>, RS: RegionScope>( tcx.sess .span_bug(ast_ty.span, format!("unbound path {}", - path.repr(tcx)).as_slice()) + path.repr(tcx))[]) } Some(&d) => d }; @@ -1019,7 +1019,7 @@ pub fn ast_ty_to_ty<'tcx, AC: AstConv<'tcx>, RS: RegionScope>( def::DefMod(id) => { tcx.sess.span_fatal(ast_ty.span, format!("found module name used as a type: {}", - tcx.map.node_to_string(id.node)).as_slice()); + tcx.map.node_to_string(id.node))[]); } def::DefPrimTy(_) => { panic!("DefPrimTy arm missed in previous ast_ty_to_prim_ty call"); @@ -1038,7 +1038,7 @@ pub fn ast_ty_to_ty<'tcx, AC: AstConv<'tcx>, RS: RegionScope>( .last() .unwrap() .identifier) - .get()).as_slice()); + .get())[]); ty::mk_err() } def::DefAssociatedPath(typ, assoc_ident) => { @@ -1084,7 +1084,7 @@ pub fn ast_ty_to_ty<'tcx, AC: AstConv<'tcx>, RS: RegionScope>( tcx.sess.span_fatal(ast_ty.span, format!("found value name used \ as a type: {}", - a_def).as_slice()); + a_def)[]); } } } @@ -1112,7 +1112,7 @@ pub fn ast_ty_to_ty<'tcx, AC: AstConv<'tcx>, RS: RegionScope>( ast_ty.span, format!("expected constant expr for array \ length: {}", - *r).as_slice()); + *r)[]); } } } @@ -1235,7 +1235,7 @@ fn ty_of_method_or_bare_fn<'a, 'tcx, AC: AstConv<'tcx>>( let input_params = if self_ty.is_some() { decl.inputs.slice_from(1) } else { - decl.inputs.as_slice() + decl.inputs[] }; let input_tys = input_params.iter().map(|a| ty_of_arg(this, &rb, a, None)); let input_pats: Vec = input_params.iter() @@ -1437,11 +1437,8 @@ pub fn conv_existential_bounds<'tcx, AC: AstConv<'tcx>, RS:RegionScope>( ast_bounds: &[ast::TyParamBound]) -> ty::ExistentialBounds { - let ast_bound_refs: Vec<&ast::TyParamBound> = - ast_bounds.iter().collect(); - let partitioned_bounds = - partition_bounds(this.tcx(), span, ast_bound_refs.as_slice()); + partition_bounds(this.tcx(), span, ast_bounds); conv_existential_bounds_from_partitioned_bounds( this, rscope, span, principal_trait_ref, partitioned_bounds) @@ -1455,7 +1452,6 @@ fn conv_ty_poly_trait_ref<'tcx, AC, RS>( -> Ty<'tcx> where AC: AstConv<'tcx>, RS:RegionScope { - let ast_bounds: Vec<&ast::TyParamBound> = ast_bounds.iter().collect(); let mut partitioned_bounds = partition_bounds(this.tcx(), span, ast_bounds[]); let main_trait_bound = match partitioned_bounds.trait_bounds.remove(0) { @@ -1506,7 +1502,7 @@ pub fn conv_existential_bounds_from_partitioned_bounds<'tcx, AC, RS>( this.tcx().sess.span_err( b.trait_ref.path.span, format!("only the builtin traits can be used \ - as closure or object bounds").as_slice()); + as closure or object bounds")[]); } let region_bound = compute_region_bound(this, @@ -1576,7 +1572,7 @@ fn compute_opt_region_bound<'tcx>(tcx: &ty::ctxt<'tcx>, tcx.sess.span_err( span, format!("ambiguous lifetime bound, \ - explicit lifetime bound required").as_slice()); + explicit lifetime bound required")[]); } return Some(r); } @@ -1602,7 +1598,7 @@ fn compute_region_bound<'tcx, AC: AstConv<'tcx>, RS:RegionScope>( None => { this.tcx().sess.span_err( span, - format!("explicit lifetime bound required").as_slice()); + format!("explicit lifetime bound required")[]); ty::ReStatic } } @@ -1620,14 +1616,14 @@ pub struct PartitionedBounds<'a> { /// general trait bounds, and region bounds. pub fn partition_bounds<'a>(tcx: &ty::ctxt, _span: Span, - ast_bounds: &'a [&ast::TyParamBound]) + ast_bounds: &'a [ast::TyParamBound]) -> PartitionedBounds<'a> { let mut builtin_bounds = ty::empty_builtin_bounds(); let mut region_bounds = Vec::new(); let mut trait_bounds = Vec::new(); let mut trait_def_ids = DefIdMap::new(); - for &ast_bound in ast_bounds.iter() { + for ast_bound in ast_bounds.iter() { match *ast_bound { ast::TraitTyParamBound(ref b) => { match ::lookup_def_tcx(tcx, b.trait_ref.path.span, b.trait_ref.ref_id) { diff --git a/src/librustc_typeck/check/_match.rs b/src/librustc_typeck/check/_match.rs index 44cc5fce53da3..35ffa8ace406f 100644 --- a/src/librustc_typeck/check/_match.rs +++ b/src/librustc_typeck/check/_match.rs @@ -21,7 +21,7 @@ use util::nodemap::FnvHashMap; use util::ppaux::Repr; use std::cmp; -use std::collections::hash_map::{Occupied, Vacant}; +use std::collections::hash_map::Entry::{Occupied, Vacant}; use syntax::ast; use syntax::ast_util; use syntax::codemap::{Span, Spanned}; @@ -238,7 +238,8 @@ pub fn check_match<'a, 'tcx>(fcx: &FnCtxt<'a, 'tcx>, expr: &ast::Expr, discrim: &ast::Expr, arms: &[ast::Arm], - expected: Expectation<'tcx>) { + expected: Expectation<'tcx>, + match_src: ast::MatchSource) { let tcx = fcx.ccx.tcx; let discrim_ty = fcx.infcx().next_ty_var(); @@ -290,12 +291,27 @@ pub fn check_match<'a, 'tcx>(fcx: &FnCtxt<'a, 'tcx>, if ty::type_is_error(result_ty) || ty::type_is_error(bty) { ty::mk_err() } else { + let (origin, expected, found) = match match_src { + /* if-let construct without an else block */ + ast::MatchSource::IfLetDesugar { contains_else_clause } + if !contains_else_clause => ( + infer::IfExpressionWithNoElse(expr.span), + bty, + result_ty, + ), + _ => ( + infer::MatchExpressionArm(expr.span, arm.body.span), + result_ty, + bty, + ), + }; + infer::common_supertype( fcx.infcx(), - infer::MatchExpressionArm(expr.span, arm.body.span), - true, // result_ty is "expected" here - result_ty, - bty + origin, + true, + expected, + found, ) } }); diff --git a/src/librustc_typeck/check/closure.rs b/src/librustc_typeck/check/closure.rs index 2ade3040d6cff..0922605236707 100644 --- a/src/librustc_typeck/check/closure.rs +++ b/src/librustc_typeck/check/closure.rs @@ -261,44 +261,43 @@ fn check_boxed_closure<'a,'tcx>(fcx: &FnCtxt<'a,'tcx>, // Find the expected input/output types (if any). Substitute // fresh bound regions for any bound regions we find in the // expected types so as to avoid capture. - let expected_sty = expected.map_to_option(fcx, |x| Some((*x).clone())); - let (expected_sig, - expected_onceness, - expected_bounds) = { - match expected_sty { - Some(ty::ty_closure(ref cenv)) => { - let (sig, _) = - ty::replace_late_bound_regions( - tcx, - &cenv.sig, - |_, debruijn| fcx.inh.infcx.fresh_bound_region(debruijn)); - let onceness = match (&store, &cenv.store) { - // As the closure type and onceness go, only three - // combinations are legit: - // once closure - // many closure - // once proc - // If the actual and expected closure type disagree with - // each other, set expected onceness to be always Once or - // Many according to the actual type. Otherwise, it will - // yield either an illegal "many proc" or a less known - // "once closure" in the error message. - (&ty::UniqTraitStore, &ty::UniqTraitStore) | - (&ty::RegionTraitStore(..), &ty::RegionTraitStore(..)) => - cenv.onceness, - (&ty::UniqTraitStore, _) => ast::Once, - (&ty::RegionTraitStore(..), _) => ast::Many, - }; - (Some(sig), onceness, cenv.bounds) - } - _ => { - // Not an error! Means we're inferring the closure type - let region = fcx.infcx().next_region_var( - infer::AddrOfRegion(expr.span)); - let bounds = ty::region_existential_bound(region); - let onceness = ast::Many; - (None, onceness, bounds) - } + let expected_cenv = expected.map_to_option(fcx, |ty| match ty.sty { + ty::ty_closure(ref cenv) => Some(cenv), + _ => None + }); + let (expected_sig, expected_onceness, expected_bounds) = match expected_cenv { + Some(cenv) => { + let (sig, _) = + ty::replace_late_bound_regions( + tcx, + &cenv.sig, + |_, debruijn| fcx.inh.infcx.fresh_bound_region(debruijn)); + let onceness = match (&store, &cenv.store) { + // As the closure type and onceness go, only three + // combinations are legit: + // once closure + // many closure + // once proc + // If the actual and expected closure type disagree with + // each other, set expected onceness to be always Once or + // Many according to the actual type. Otherwise, it will + // yield either an illegal "many proc" or a less known + // "once closure" in the error message. + (&ty::UniqTraitStore, &ty::UniqTraitStore) | + (&ty::RegionTraitStore(..), &ty::RegionTraitStore(..)) => + cenv.onceness, + (&ty::UniqTraitStore, _) => ast::Once, + (&ty::RegionTraitStore(..), _) => ast::Many, + }; + (Some(sig), onceness, cenv.bounds) + } + _ => { + // Not an error! Means we're inferring the closure type + let region = fcx.infcx().next_region_var( + infer::AddrOfRegion(expr.span)); + let bounds = ty::region_existential_bound(region); + let onceness = ast::Many; + (None, onceness, bounds) } }; diff --git a/src/librustc_typeck/check/method/confirm.rs b/src/librustc_typeck/check/method/confirm.rs index 2c220f298262f..d3b518ec2e3ab 100644 --- a/src/librustc_typeck/check/method/confirm.rs +++ b/src/librustc_typeck/check/method/confirm.rs @@ -113,7 +113,7 @@ impl<'a,'tcx> ConfirmContext<'a,'tcx> { self.add_obligations(&pick, &method_bounds_substs, &method_bounds); // Create the final `MethodCallee`. - let fty = ty::mk_bare_fn(self.tcx(), ty::BareFnTy { + let fty = ty::mk_bare_fn(self.tcx(), None, ty::BareFnTy { sig: ty::Binder(method_sig), unsafety: pick.method_ty.fty.unsafety, abi: pick.method_ty.fty.abi.clone(), @@ -466,7 +466,7 @@ impl<'a,'tcx> ConfirmContext<'a,'tcx> { fn fixup_derefs_on_method_receiver_if_necessary(&self, method_callee: &MethodCallee) { let sig = match method_callee.ty.sty { - ty::ty_bare_fn(ref f) => f.sig.clone(), + ty::ty_bare_fn(_, ref f) => f.sig.clone(), ty::ty_closure(ref f) => f.sig.clone(), _ => return, }; diff --git a/src/librustc_typeck/check/method/mod.rs b/src/librustc_typeck/check/method/mod.rs index 3b7eb22e56cc4..19776318c876d 100644 --- a/src/librustc_typeck/check/method/mod.rs +++ b/src/librustc_typeck/check/method/mod.rs @@ -199,7 +199,7 @@ pub fn lookup_in_trait_adjusted<'a, 'tcx>(fcx: &'a FnCtxt<'a, 'tcx>, infer::FnCall, &fn_sig).0; let transformed_self_ty = fn_sig.inputs[0]; - let fty = ty::mk_bare_fn(tcx, ty::BareFnTy { + let fty = ty::mk_bare_fn(tcx, None, ty::BareFnTy { sig: ty::Binder(fn_sig), unsafety: bare_fn_ty.unsafety, abi: bare_fn_ty.abi.clone(), @@ -269,7 +269,7 @@ pub fn lookup_in_trait_adjusted<'a, 'tcx>(fcx: &'a FnCtxt<'a, 'tcx>, span, format!( "trait method is &self but first arg is: {}", - transformed_self_ty.repr(fcx.tcx())).as_slice()); + transformed_self_ty.repr(fcx.tcx()))[]); } } } @@ -279,7 +279,7 @@ pub fn lookup_in_trait_adjusted<'a, 'tcx>(fcx: &'a FnCtxt<'a, 'tcx>, span, format!( "unexpected explicit self type in operator method: {}", - method_ty.explicit_self).as_slice()); + method_ty.explicit_self)[]); } } } @@ -333,7 +333,7 @@ pub fn report_error<'a, 'tcx>(fcx: &FnCtxt<'a, 'tcx>, if is_field { cx.sess.span_note(span, format!("use `(s.{0})(...)` if you meant to call the \ - function stored in the `{0}` field", method_ustring).as_slice()); + function stored in the `{0}` field", method_ustring)[]); } if static_sources.len() > 0 { diff --git a/src/librustc_typeck/check/method/probe.rs b/src/librustc_typeck/check/method/probe.rs index b5776f9aeb34a..961b664e404ca 100644 --- a/src/librustc_typeck/check/method/probe.rs +++ b/src/librustc_typeck/check/method/probe.rs @@ -557,7 +557,7 @@ impl<'a,'tcx> ProbeContext<'a,'tcx> { self.tcx().sess.span_bug( self.span, format!("No entry for unboxed closure: {}", - closure_def_id.repr(self.tcx())).as_slice()); + closure_def_id.repr(self.tcx()))[]); } }; diff --git a/src/librustc_typeck/check/mod.rs b/src/librustc_typeck/check/mod.rs index bbc33826f3551..3139a17f9989e 100644 --- a/src/librustc_typeck/check/mod.rs +++ b/src/librustc_typeck/check/mod.rs @@ -399,7 +399,7 @@ fn check_bare_fn<'a, 'tcx>(ccx: &CrateCtxt<'a, 'tcx>, let fty = fty.subst(ccx.tcx, ¶m_env.free_substs); match fty.sty { - ty::ty_bare_fn(ref fn_ty) => { + ty::ty_bare_fn(_, ref fn_ty) => { let inh = Inherited::new(ccx.tcx, param_env); let fcx = check_fn(ccx, fn_ty.unsafety, id, &fn_ty.sig, decl, id, body, &inh); @@ -518,7 +518,7 @@ fn check_fn<'a, 'tcx>(ccx: &'a CrateCtxt<'a, 'tcx>, // The free region references will be bound the node_id of the body block. let fn_sig = liberate_late_bound_regions(tcx, CodeExtent::from_node_id(body.id), fn_sig); - let arg_tys = fn_sig.inputs.as_slice(); + let arg_tys = fn_sig.inputs[]; let ret_ty = fn_sig.output; debug!("check_fn(arg_tys={}, ret_ty={}, fn_id={})", @@ -616,7 +616,7 @@ pub fn check_item(ccx: &CrateCtxt, it: &ast::Item) { ast::ItemEnum(ref enum_definition, _) => { check_enum_variants(ccx, it.span, - enum_definition.variants.as_slice(), + enum_definition.variants[], it.id); } ast::ItemFn(ref decl, _, _, _, ref body) => { @@ -915,7 +915,7 @@ fn compare_impl_method<'tcx>(tcx: &ty::ctxt<'tcx>, but not in the trait", token::get_name(trait_m.name), ppaux::explicit_self_category_to_str( - &impl_m.explicit_self)).as_slice()); + &impl_m.explicit_self))[]); return; } (_, &ty::StaticExplicitSelfCategory) => { @@ -925,7 +925,7 @@ fn compare_impl_method<'tcx>(tcx: &ty::ctxt<'tcx>, but not in the impl", token::get_name(trait_m.name), ppaux::explicit_self_category_to_str( - &trait_m.explicit_self)).as_slice()); + &trait_m.explicit_self))[]); return; } _ => { @@ -1132,9 +1132,9 @@ fn compare_impl_method<'tcx>(tcx: &ty::ctxt<'tcx>, } // Compute skolemized form of impl and trait method tys. - let impl_fty = ty::mk_bare_fn(tcx, impl_m.fty.clone()); + let impl_fty = ty::mk_bare_fn(tcx, None, impl_m.fty.clone()); let impl_fty = impl_fty.subst(tcx, &impl_to_skol_substs); - let trait_fty = ty::mk_bare_fn(tcx, trait_m.fty.clone()); + let trait_fty = ty::mk_bare_fn(tcx, None, trait_m.fty.clone()); let trait_fty = trait_fty.subst(tcx, &trait_to_skol_substs); // Check the impl method type IM is a subtype of the trait method @@ -1229,7 +1229,7 @@ fn compare_impl_method<'tcx>(tcx: &ty::ctxt<'tcx>, span, format!("lifetime parameters or bounds on method `{}` do \ not match the trait declaration", - token::get_name(impl_m.name)).as_slice()); + token::get_name(impl_m.name))[]); return false; } @@ -1281,7 +1281,7 @@ fn compare_impl_method<'tcx>(tcx: &ty::ctxt<'tcx>, from its counterpart `{}` \ declared in the trait", impl_param.name.user_string(tcx), - trait_param.name.user_string(tcx)).as_slice()); + trait_param.name.user_string(tcx))[]); true } else { false @@ -1291,14 +1291,14 @@ fn compare_impl_method<'tcx>(tcx: &ty::ctxt<'tcx>, tcx.sess.span_note( span, format!("the impl is missing the following bounds: `{}`", - missing.user_string(tcx)).as_slice()); + missing.user_string(tcx))[]); } if extra.len() != 0 { tcx.sess.span_note( span, format!("the impl has the following extra bounds: `{}`", - extra.user_string(tcx)).as_slice()); + extra.user_string(tcx))[]); } if err { @@ -1389,6 +1389,8 @@ fn check_cast(fcx: &FnCtxt, }, t_e, None); } + let t_e_is_bare_fn_item = ty::type_is_bare_fn_item(t_e); + let t_1_is_scalar = ty::type_is_scalar(t_1); let t_1_is_char = ty::type_is_char(t_1); let t_1_is_bare_fn = ty::type_is_bare_fn(t_1); @@ -1396,7 +1398,9 @@ fn check_cast(fcx: &FnCtxt, // casts to scalars other than `char` and `bare fn` are trivial let t_1_is_trivial = t_1_is_scalar && !t_1_is_char && !t_1_is_bare_fn; - if ty::type_is_c_like_enum(fcx.tcx(), t_e) && t_1_is_trivial { + if t_e_is_bare_fn_item && t_1_is_bare_fn { + demand::coerce(fcx, e.span, t_1, &*e); + } else if ty::type_is_c_like_enum(fcx.tcx(), t_e) && t_1_is_trivial { if t_1_is_float || ty::type_is_unsafe_ptr(t_1) { fcx.type_error_message(span, |actual| { format!("illegal cast; cast through an \ @@ -1557,7 +1561,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { self.tcx().sess.span_bug( span, format!("no type for local variable {}", - nid).as_slice()); + nid)[]); } } } @@ -1634,7 +1638,9 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { span: Span, adj: &ty::AutoAdjustment<'tcx>) { match *adj { - ty::AdjustAddEnv(..) => { } + ty::AdjustAddEnv(..) | + ty::AdjustReifyFnPointer(..) => { + } ty::AdjustDerefRef(ref d_r) => { match d_r.autoref { Some(ref a_r) => { @@ -1805,7 +1811,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { Some(&t) => t, None => { self.tcx().sess.bug(format!("no type for expr in fcx {}", - self.tag()).as_slice()); + self.tag())[]); } } } @@ -1835,7 +1841,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { self.tcx().sess.bug( format!("no type for node {}: {} in fcx {}", id, self.tcx().map.node_to_string(id), - self.tag()).as_slice()); + self.tag())[]); } } } @@ -2042,8 +2048,8 @@ fn try_overloaded_call<'a, 'tcx>(fcx: &FnCtxt<'a, 'tcx>, -> bool { // Bail out if the callee is a bare function or a closure. We check those // manually. - match *structure_of(fcx, callee.span, callee_type) { - ty::ty_bare_fn(_) | ty::ty_closure(_) => return false, + match structurally_resolved_type(fcx, callee.span, callee_type).sty { + ty::ty_bare_fn(..) | ty::ty_closure(_) => return false, _ => {} } @@ -2392,7 +2398,7 @@ fn lookup_method_for_for_loop<'a, 'tcx>(fcx: &FnCtxt<'a, 'tcx>, Ok(trait_did) => trait_did, Err(ref err_string) => { fcx.tcx().sess.span_err(iterator_expr.span, - err_string.as_slice()); + err_string[]); return ty::mk_err() } }; @@ -2419,7 +2425,7 @@ fn lookup_method_for_for_loop<'a, 'tcx>(fcx: &FnCtxt<'a, 'tcx>, format!("`for` loop expression has type `{}` which does \ not implement the `Iterator` trait; \ maybe try .iter()", - ty_string).as_slice()); + ty_string)[]); } ty::mk_err() } @@ -2457,7 +2463,7 @@ fn lookup_method_for_for_loop<'a, 'tcx>(fcx: &FnCtxt<'a, 'tcx>, format!("`next` method of the `Iterator` \ trait has an unexpected type `{}`", fcx.infcx().ty_to_string(return_type)) - .as_slice()); + []); ty::mk_err() } } @@ -2484,7 +2490,7 @@ fn check_method_argument_types<'a, 'tcx>(fcx: &FnCtxt<'a, 'tcx>, check_argument_types(fcx, sp, - err_inputs.as_slice(), + err_inputs[], callee_expr, args_no_rcvr, autoref_args, @@ -2493,7 +2499,7 @@ fn check_method_argument_types<'a, 'tcx>(fcx: &FnCtxt<'a, 'tcx>, ty::FnConverging(ty::mk_err()) } else { match method_fn_ty.sty { - ty::ty_bare_fn(ref fty) => { + ty::ty_bare_fn(_, ref fty) => { // HACK(eddyb) ignore self in the definition (see above). check_argument_types(fcx, sp, @@ -2717,10 +2723,9 @@ fn check_lit<'a, 'tcx>(fcx: &FnCtxt<'a, 'tcx>, ast::LitInt(_, ast::SignedIntLit(t, _)) => ty::mk_mach_int(t), ast::LitInt(_, ast::UnsignedIntLit(t)) => ty::mk_mach_uint(t), ast::LitInt(_, ast::UnsuffixedIntLit(_)) => { - let opt_ty = expected.map_to_option(fcx, |sty| { - match *sty { - ty::ty_int(i) => Some(ty::mk_mach_int(i)), - ty::ty_uint(i) => Some(ty::mk_mach_uint(i)), + let opt_ty = expected.map_to_option(fcx, |ty| { + match ty.sty { + ty::ty_int(_) | ty::ty_uint(_) => Some(ty), ty::ty_char => Some(ty::mk_mach_uint(ast::TyU8)), ty::ty_ptr(..) => Some(ty::mk_mach_uint(ast::TyU)), ty::ty_bare_fn(..) => Some(ty::mk_mach_uint(ast::TyU)), @@ -2732,9 +2737,9 @@ fn check_lit<'a, 'tcx>(fcx: &FnCtxt<'a, 'tcx>, } ast::LitFloat(_, t) => ty::mk_mach_float(t), ast::LitFloatUnsuffixed(_) => { - let opt_ty = expected.map_to_option(fcx, |sty| { - match *sty { - ty::ty_float(i) => Some(ty::mk_mach_float(i)), + let opt_ty = expected.map_to_option(fcx, |ty| { + match ty.sty { + ty::ty_float(_) => Some(ty), _ => None } }); @@ -2910,7 +2915,7 @@ fn check_expr_with_unifier<'a, 'tcx, F>(fcx: &FnCtxt<'a, 'tcx>, let fn_ty = fcx.expr_ty(f); // Extract the function signature from `in_fty`. - let fn_sty = structure_of(fcx, f.span, fn_ty); + let fn_ty = structurally_resolved_type(fcx, f.span, fn_ty); // This is the "default" function signature, used in case of error. // In that case, we check each argument against "error" in order to @@ -2921,8 +2926,8 @@ fn check_expr_with_unifier<'a, 'tcx, F>(fcx: &FnCtxt<'a, 'tcx>, variadic: false }); - let fn_sig = match *fn_sty { - ty::ty_bare_fn(ty::BareFnTy {ref sig, ..}) | + let fn_sig = match fn_ty.sty { + ty::ty_bare_fn(_, ty::BareFnTy {ref sig, ..}) | ty::ty_closure(box ty::ClosureTy {ref sig, ..}) => sig, _ => { fcx.type_error_message(call_expr.span, |actual| { @@ -2942,7 +2947,7 @@ fn check_expr_with_unifier<'a, 'tcx, F>(fcx: &FnCtxt<'a, 'tcx>, // Call the generic checker. check_argument_types(fcx, call_expr.span, - fn_sig.inputs.as_slice(), + fn_sig.inputs[], f, args, AutorefArgs::No, @@ -3307,7 +3312,7 @@ fn check_expr_with_unifier<'a, 'tcx, F>(fcx: &FnCtxt<'a, 'tcx>, ty::ty_struct(base_id, ref substs) => { debug!("struct named {}", ppaux::ty_to_string(tcx, base_t)); let fields = ty::lookup_struct_fields(tcx, base_id); - lookup_field_ty(tcx, base_id, fields.as_slice(), + lookup_field_ty(tcx, base_id, fields[], field.node.name, &(*substs)) } _ => None @@ -3370,7 +3375,7 @@ fn check_expr_with_unifier<'a, 'tcx, F>(fcx: &FnCtxt<'a, 'tcx>, if tuple_like { debug!("tuple struct named {}", ppaux::ty_to_string(tcx, base_t)); let fields = ty::lookup_struct_fields(tcx, base_id); - lookup_tup_field_ty(tcx, base_id, fields.as_slice(), + lookup_tup_field_ty(tcx, base_id, fields[], idx.node, &(*substs)) } else { None @@ -3419,7 +3424,8 @@ fn check_expr_with_unifier<'a, 'tcx, F>(fcx: &FnCtxt<'a, 'tcx>, substitutions: subst::Substs<'tcx>, field_types: &[ty::field_ty], ast_fields: &[ast::Field], - check_completeness: bool) { + check_completeness: bool, + enum_id_opt: Option) { let tcx = fcx.ccx.tcx; let mut class_field_map = FnvHashMap::new(); @@ -3438,13 +3444,24 @@ fn check_expr_with_unifier<'a, 'tcx, F>(fcx: &FnCtxt<'a, 'tcx>, match pair { None => { fcx.type_error_message( - field.ident.span, - |actual| { - format!("structure `{}` has no field named `{}`", - actual, token::get_ident(field.ident.node)) - }, - struct_ty, - None); + field.ident.span, + |actual| match enum_id_opt { + Some(enum_id) => { + let variant_type = ty::enum_variant_with_id(tcx, + enum_id, + class_id); + format!("struct variant `{}::{}` has no field named `{}`", + actual, variant_type.name.as_str(), + token::get_ident(field.ident.node)) + } + None => { + format!("structure `{}` has no field named `{}`", + actual, + token::get_ident(field.ident.node)) + } + }, + struct_ty, + None); error_happened = true; } Some((_, true)) => { @@ -3523,9 +3540,10 @@ fn check_expr_with_unifier<'a, 'tcx, F>(fcx: &FnCtxt<'a, 'tcx>, class_id, id, struct_substs, - class_fields.as_slice(), + class_fields[], fields, - base_expr.is_none()); + base_expr.is_none(), + None); if ty::type_is_error(fcx.node_ty(id)) { struct_type = ty::mk_err(); } @@ -3565,9 +3583,10 @@ fn check_expr_with_unifier<'a, 'tcx, F>(fcx: &FnCtxt<'a, 'tcx>, variant_id, id, substitutions, - variant_fields.as_slice(), + variant_fields[], fields, - true); + true, + Some(enum_id)); fcx.write_ty(id, enum_type); } @@ -3655,9 +3674,9 @@ fn check_expr_with_unifier<'a, 'tcx, F>(fcx: &FnCtxt<'a, 'tcx>, } } ast::ExprUnary(unop, ref oprnd) => { - let expected_inner = expected.map(fcx, |sty| { + let expected_inner = expected.map(fcx, |ty| { match unop { - ast::UnUniq => match *sty { + ast::UnUniq => match ty.sty { ty::ty_uniq(ty) => { ExpectHasType(ty) } @@ -3746,9 +3765,11 @@ fn check_expr_with_unifier<'a, 'tcx, F>(fcx: &FnCtxt<'a, 'tcx>, } ast::ExprAddrOf(mutbl, ref oprnd) => { let expected = expected.only_has_type(); - let hint = expected.map(fcx, |sty| { - match *sty { ty::ty_rptr(_, ref mt) | ty::ty_ptr(ref mt) => ExpectHasType(mt.ty), - _ => NoExpectation } + let hint = expected.map(fcx, |ty| { + match ty.sty { + ty::ty_rptr(_, ref mt) | ty::ty_ptr(ref mt) => ExpectHasType(mt.ty), + _ => NoExpectation + } }); let lvalue_pref = match mutbl { ast::MutMutable => PreferMutLvalue, @@ -3860,7 +3881,7 @@ fn check_expr_with_unifier<'a, 'tcx, F>(fcx: &FnCtxt<'a, 'tcx>, } let lhs_ty = fcx.expr_ty(&**lhs); - check_expr_has_type(fcx, &**rhs, lhs_ty); + check_expr_coercable_to_type(fcx, &**rhs, lhs_ty); let rhs_ty = fcx.expr_ty(&**rhs); fcx.require_expr_have_sized_type(&**lhs, traits::AssignmentLhsSized); @@ -3918,8 +3939,8 @@ fn check_expr_with_unifier<'a, 'tcx, F>(fcx: &FnCtxt<'a, 'tcx>, fcx.write_nil(id); } } - ast::ExprMatch(ref discrim, ref arms, _) => { - _match::check_match(fcx, expr, &**discrim, arms.as_slice(), expected); + ast::ExprMatch(ref discrim, ref arms, match_src) => { + _match::check_match(fcx, expr, &**discrim, arms.as_slice(), expected, match_src); } ast::ExprClosure(_, opt_kind, ref decl, ref body) => { closure::check_expr_closure(fcx, expr, opt_kind, &**decl, &**body, expected); @@ -3935,8 +3956,8 @@ fn check_expr_with_unifier<'a, 'tcx, F>(fcx: &FnCtxt<'a, 'tcx>, let f_ty = fcx.expr_ty(&**f); let args: Vec<_> = args.iter().map(|x| x).collect(); - if !try_overloaded_call(fcx, expr, &**f, f_ty, args.as_slice()) { - check_call(fcx, expr, &**f, args.as_slice()); + if !try_overloaded_call(fcx, expr, &**f, f_ty, args[]) { + check_call(fcx, expr, &**f, args[]); let args_err = args.iter().fold(false, |rest_err, a| { // is this not working? @@ -3948,7 +3969,7 @@ fn check_expr_with_unifier<'a, 'tcx, F>(fcx: &FnCtxt<'a, 'tcx>, } } ast::ExprMethodCall(ident, ref tps, ref args) => { - check_method_call(fcx, expr, ident, args.as_slice(), tps.as_slice(), lvalue_pref); + check_method_call(fcx, expr, ident, args[], tps[], lvalue_pref); let arg_tys = args.iter().map(|a| fcx.expr_ty(&**a)); let args_err = arg_tys.fold(false, |rest_err, a| { @@ -4037,9 +4058,9 @@ fn check_expr_with_unifier<'a, 'tcx, F>(fcx: &FnCtxt<'a, 'tcx>, } ast::ExprTup(ref elts) => { let expected = expected.only_has_type(); - let flds = expected.map_to_option(fcx, |sty| { - match *sty { - ty::ty_tup(ref flds) => Some((*flds).clone()), + let flds = expected.map_to_option(fcx, |ty| { + match ty.sty { + ty::ty_tup(ref flds) => Some(flds[]), _ => None } }); @@ -4073,7 +4094,7 @@ fn check_expr_with_unifier<'a, 'tcx, F>(fcx: &FnCtxt<'a, 'tcx>, let struct_id = match def { Some(def::DefVariant(enum_id, variant_id, true)) => { check_struct_enum_variant(fcx, id, expr.span, enum_id, - variant_id, fields.as_slice()); + variant_id, fields[]); enum_id } Some(def::DefTrait(def_id)) => { @@ -4082,7 +4103,7 @@ fn check_expr_with_unifier<'a, 'tcx, F>(fcx: &FnCtxt<'a, 'tcx>, pprust::path_to_string(path)); check_struct_fields_on_error(fcx, id, - fields.as_slice(), + fields[], base_expr); def_id }, @@ -4095,7 +4116,7 @@ fn check_expr_with_unifier<'a, 'tcx, F>(fcx: &FnCtxt<'a, 'tcx>, id, expr.span, struct_did, - fields.as_slice(), + fields[], base_expr.as_ref().map(|e| &**e)); } _ => { @@ -4104,7 +4125,7 @@ fn check_expr_with_unifier<'a, 'tcx, F>(fcx: &FnCtxt<'a, 'tcx>, pprust::path_to_string(path)); check_struct_fields_on_error(fcx, id, - fields.as_slice(), + fields[], base_expr); } } @@ -4145,7 +4166,7 @@ fn check_expr_with_unifier<'a, 'tcx, F>(fcx: &FnCtxt<'a, 'tcx>, fcx.infcx() .ty_to_string( actual_structure_type), - type_error_description).as_slice()); + type_error_description)[]); ty::note_and_explain_type_err(tcx, &type_error); } } @@ -4304,20 +4325,20 @@ impl<'tcx> Expectation<'tcx> { } fn map<'a, F>(self, fcx: &FnCtxt<'a, 'tcx>, unpack: F) -> Expectation<'tcx> where - F: FnOnce(&ty::sty<'tcx>) -> Expectation<'tcx> + F: FnOnce(Ty<'tcx>) -> Expectation<'tcx> { match self.resolve(fcx) { NoExpectation => NoExpectation, - ExpectCastableToType(t) | ExpectHasType(t) => unpack(&t.sty), + ExpectCastableToType(ty) | ExpectHasType(ty) => unpack(ty), } } fn map_to_option<'a, O, F>(self, fcx: &FnCtxt<'a, 'tcx>, unpack: F) -> Option where - F: FnOnce(&ty::sty<'tcx>) -> Option, + F: FnOnce(Ty<'tcx>) -> Option, { match self.resolve(fcx) { NoExpectation => None, - ExpectCastableToType(t) | ExpectHasType(t) => unpack(&t.sty), + ExpectCastableToType(ty) | ExpectHasType(ty) => unpack(ty), } } } @@ -4754,7 +4775,7 @@ pub fn check_enum_variants(ccx: &CrateCtxt, } let hint = *ty::lookup_repr_hints(ccx.tcx, ast::DefId { krate: ast::LOCAL_CRATE, node: id }) - .as_slice().get(0).unwrap_or(&attr::ReprAny); + [].get(0).unwrap_or(&attr::ReprAny); if hint != attr::ReprAny && vs.len() <= 1 { if vs.len() == 1 { @@ -5320,12 +5341,6 @@ pub fn structurally_resolved_type<'a, 'tcx>(fcx: &FnCtxt<'a, 'tcx>, sp: Span, ty } -// Returns the one-level-deep structure of the given type. -pub fn structure_of<'a, 'tcx>(fcx: &FnCtxt<'a, 'tcx>, sp: Span, typ: Ty<'tcx>) - -> &'tcx ty::sty<'tcx> { - &structurally_resolved_type(fcx, sp, typ).sty -} - // Returns true if b contains a break that can exit from b pub fn may_break(cx: &ty::ctxt, id: ast::NodeId, b: &ast::Block) -> bool { // First: is there an unlabeled break immediately @@ -5443,7 +5458,7 @@ pub fn check_intrinsic_type(ccx: &CrateCtxt, it: &ast::ForeignItem) { "get_tydesc" => { let tydesc_ty = match ty::get_tydesc_ty(ccx.tcx) { Ok(t) => t, - Err(s) => { tcx.sess.span_fatal(it.span, s.as_slice()); } + Err(s) => { tcx.sess.span_fatal(it.span, s[]); } }; let td_ptr = ty::mk_ptr(ccx.tcx, ty::mt { ty: tydesc_ty, @@ -5459,7 +5474,7 @@ pub fn check_intrinsic_type(ccx: &CrateCtxt, it: &ast::ForeignItem) { ty::mk_struct(ccx.tcx, did, subst::Substs::empty())), Err(msg) => { - tcx.sess.span_fatal(it.span, msg.as_slice()); + tcx.sess.span_fatal(it.span, msg[]); } } }, @@ -5632,7 +5647,7 @@ pub fn check_intrinsic_type(ccx: &CrateCtxt, it: &ast::ForeignItem) { }; (n_tps, inputs, ty::FnConverging(output)) }; - let fty = ty::mk_bare_fn(tcx, ty::BareFnTy { + let fty = ty::mk_bare_fn(tcx, None, ty::BareFnTy { unsafety: ast::Unsafety::Unsafe, abi: abi::RustIntrinsic, sig: ty::Binder(FnSig { diff --git a/src/librustc_typeck/check/regionck.rs b/src/librustc_typeck/check/regionck.rs index 33c015a9a081c..22502c0dd1a1f 100644 --- a/src/librustc_typeck/check/regionck.rs +++ b/src/librustc_typeck/check/regionck.rs @@ -135,7 +135,7 @@ use syntax::visit; use syntax::visit::Visitor; use std::cell::{RefCell}; -use std::collections::hash_map::{Vacant, Occupied}; +use std::collections::hash_map::Entry::{Vacant, Occupied}; use self::RepeatingScope::Repeating; use self::SubjectNode::Subject; @@ -251,7 +251,7 @@ fn region_of_def(fcx: &FnCtxt, def: def::Def) -> ty::Region { } _ => { tcx.sess.bug(format!("unexpected def in region_of_def: {}", - def).as_slice()) + def)[]) } } } @@ -345,13 +345,13 @@ impl<'a, 'tcx> Rcx<'a, 'tcx> { Some(f) => f, None => { self.tcx().sess.bug( - format!("No fn-sig entry for id={}", id).as_slice()); + format!("No fn-sig entry for id={}", id)[]); } }; let len = self.region_param_pairs.len(); - self.relate_free_regions(fn_sig.as_slice(), body.id); - link_fn_args(self, CodeExtent::from_node_id(body.id), fn_decl.inputs.as_slice()); + self.relate_free_regions(fn_sig[], body.id); + link_fn_args(self, CodeExtent::from_node_id(body.id), fn_decl.inputs[]); self.visit_block(body); self.visit_region_obligations(body.id); self.region_param_pairs.truncate(len); @@ -738,7 +738,7 @@ fn visit_expr(rcx: &mut Rcx, expr: &ast::Expr) { } ast::ExprMatch(ref discr, ref arms, _) => { - link_match(rcx, &**discr, arms.as_slice()); + link_match(rcx, &**discr, arms[]); visit::walk_expr(rcx, expr); } @@ -1186,7 +1186,7 @@ fn constrain_autoderefs<'a, 'tcx>(rcx: &mut Rcx<'a, 'tcx>, ty::ty_rptr(r, ref m) => (m.mutbl, r), _ => rcx.tcx().sess.span_bug(deref_expr.span, format!("bad overloaded deref type {}", - method.ty.repr(rcx.tcx())).as_slice()) + method.ty.repr(rcx.tcx()))[]) }; { let mc = mc::MemCategorizationContext::new(rcx); @@ -1560,7 +1560,7 @@ fn link_reborrowed_region<'a, 'tcx>(rcx: &Rcx<'a, 'tcx>, span, format!("Illegal upvar id: {}", upvar_id.repr( - rcx.tcx())).as_slice()); + rcx.tcx()))[]); } } } diff --git a/src/librustc_typeck/check/regionmanip.rs b/src/librustc_typeck/check/regionmanip.rs index 112ad1fb5b9b8..eaf638e388e35 100644 --- a/src/librustc_typeck/check/regionmanip.rs +++ b/src/librustc_typeck/check/regionmanip.rs @@ -138,7 +138,7 @@ impl<'a, 'tcx> Wf<'a, 'tcx> { ty::ty_open(_) => { self.tcx.sess.bug( format!("Unexpected type encountered while doing wf check: {}", - ty.repr(self.tcx)).as_slice()); + ty.repr(self.tcx))[]); } } } diff --git a/src/librustc_typeck/check/vtable.rs b/src/librustc_typeck/check/vtable.rs index 4db795a1fda55..e23bf46b564b3 100644 --- a/src/librustc_typeck/check/vtable.rs +++ b/src/librustc_typeck/check/vtable.rs @@ -77,7 +77,7 @@ pub fn check_object_cast<'a, 'tcx>(fcx: &FnCtxt<'a, 'tcx>, source_expr.span, format!("can only cast an boxed pointer \ to a boxed object, not a {}", - ty::ty_sort_string(fcx.tcx(), source_ty)).as_slice()); + ty::ty_sort_string(fcx.tcx(), source_ty))[]); } (_, &ty::ty_rptr(..)) => { @@ -85,7 +85,7 @@ pub fn check_object_cast<'a, 'tcx>(fcx: &FnCtxt<'a, 'tcx>, source_expr.span, format!("can only cast a &-pointer \ to an &-object, not a {}", - ty::ty_sort_string(fcx.tcx(), source_ty)).as_slice()); + ty::ty_sort_string(fcx.tcx(), source_ty))[]); } _ => { @@ -164,7 +164,7 @@ fn check_object_safety_inner<'tcx>(tcx: &ty::ctxt<'tcx>, trait_name); for msg in errors { - tcx.sess.note(msg.as_slice()); + tcx.sess.note(msg[]); } } @@ -455,7 +455,7 @@ pub fn maybe_report_ambiguity<'a, 'tcx>(fcx: &FnCtxt<'a, 'tcx>, format!( "unable to infer enough type information about `{}`; type annotations \ required", - self_ty.user_string(fcx.tcx())).as_slice()); + self_ty.user_string(fcx.tcx()))[]); } else { fcx.tcx().sess.span_err( obligation.cause.span, @@ -464,7 +464,7 @@ pub fn maybe_report_ambiguity<'a, 'tcx>(fcx: &FnCtxt<'a, 'tcx>, locate the impl of the trait `{}` for \ the type `{}`; type annotations required", trait_ref.user_string(fcx.tcx()), - self_ty.user_string(fcx.tcx())).as_slice()); + self_ty.user_string(fcx.tcx()))[]); note_obligation_cause(fcx, obligation); } } @@ -477,7 +477,7 @@ pub fn maybe_report_ambiguity<'a, 'tcx>(fcx: &FnCtxt<'a, 'tcx>, cannot locate the impl of the trait `{}` for \ the type `{}`", trait_ref.user_string(fcx.tcx()), - self_ty.user_string(fcx.tcx())).as_slice()); + self_ty.user_string(fcx.tcx()))[]); } } diff --git a/src/librustc_typeck/check/wf.rs b/src/librustc_typeck/check/wf.rs index c09ce3db6ddd2..a2fb44fff796f 100644 --- a/src/librustc_typeck/check/wf.rs +++ b/src/librustc_typeck/check/wf.rs @@ -17,7 +17,7 @@ use middle::subst::{Subst}; use middle::traits; use middle::ty::{mod, Ty}; use middle::ty::liberate_late_bound_regions; -use middle::ty_fold::{TypeFolder, TypeFoldable}; +use middle::ty_fold::{TypeFolder, TypeFoldable, super_fold_ty}; use util::ppaux::Repr; use std::collections::HashSet; @@ -355,7 +355,7 @@ impl<'cx,'tcx> TypeFolder<'tcx> for BoundsChecker<'cx,'tcx> { self.fold_substs(substs); } - ty::ty_bare_fn(ty::BareFnTy{sig: ref fn_sig, ..}) | + ty::ty_bare_fn(_, ty::BareFnTy{sig: ref fn_sig, ..}) | ty::ty_closure(box ty::ClosureTy{sig: ref fn_sig, ..}) => { self.binding_count += 1; @@ -368,8 +368,8 @@ impl<'cx,'tcx> TypeFolder<'tcx> for BoundsChecker<'cx,'tcx> { self.binding_count -= 1; } - ref sty => { - self.fold_sty(sty); + _ => { + super_fold_ty(self, t); } } diff --git a/src/librustc_typeck/check/writeback.rs b/src/librustc_typeck/check/writeback.rs index 700d12116060c..b123d97d8970c 100644 --- a/src/librustc_typeck/check/writeback.rs +++ b/src/librustc_typeck/check/writeback.rs @@ -15,7 +15,6 @@ use self::ResolveReason::*; use astconv::AstConv; use check::FnCtxt; -use middle::def; use middle::pat_util; use middle::ty::{mod, Ty, MethodCall, MethodCallee}; use middle::ty_fold::{TypeFolder,TypeFoldable}; @@ -267,25 +266,12 @@ impl<'cx, 'tcx> WritebackCx<'cx, 'tcx> { Some(adjustment) => { let adj_object = ty::adjust_is_object(&adjustment); let resolved_adjustment = match adjustment { - ty::AdjustAddEnv(store) => { - // FIXME(eddyb) #2190 Allow only statically resolved - // bare functions to coerce to a closure to avoid - // constructing (slower) indirect call wrappers. - match self.tcx().def_map.borrow().get(&id) { - Some(&def::DefFn(..)) | - Some(&def::DefStaticMethod(..)) | - Some(&def::DefVariant(..)) | - Some(&def::DefStruct(_)) => { - } - _ => { - span_err!(self.tcx().sess, reason.span(self.tcx()), E0100, - "cannot coerce non-statically resolved bare fn to closure"); - span_help!(self.tcx().sess, reason.span(self.tcx()), - "consider embedding the function in a closure"); - } - } + ty::AdjustAddEnv(def_id, store) => { + ty::AdjustAddEnv(def_id, self.resolve(&store, reason)) + } - ty::AdjustAddEnv(self.resolve(&store, reason)) + ty::AdjustReifyFnPointer(def_id) => { + ty::AdjustReifyFnPointer(def_id) } ty::AdjustDerefRef(adj) => { diff --git a/src/librustc_typeck/coherence/mod.rs b/src/librustc_typeck/coherence/mod.rs index 5d0bb6622c2e1..b08db39c18984 100644 --- a/src/librustc_typeck/coherence/mod.rs +++ b/src/librustc_typeck/coherence/mod.rs @@ -235,7 +235,7 @@ impl<'a, 'tcx> CoherenceChecker<'a, 'tcx> { // impl, plus its own. let new_polytype = ty::Polytype { generics: new_method_ty.generics.clone(), - ty: ty::mk_bare_fn(tcx, new_method_ty.fty.clone()) + ty: ty::mk_bare_fn(tcx, Some(new_did), new_method_ty.fty.clone()) }; debug!("new_polytype={}", new_polytype.repr(tcx)); @@ -488,7 +488,7 @@ impl<'a, 'tcx> CoherenceChecker<'a, 'tcx> { format!("the trait `Copy` may not be \ implemented for this type; field \ `{}` does not implement `Copy`", - token::get_name(name)).as_slice()) + token::get_name(name))[]) } Err(ty::VariantDoesNotImplementCopy(name)) => { tcx.sess @@ -496,7 +496,7 @@ impl<'a, 'tcx> CoherenceChecker<'a, 'tcx> { format!("the trait `Copy` may not be \ implemented for this type; variant \ `{}` does not implement `Copy`", - token::get_name(name)).as_slice()) + token::get_name(name))[]) } Err(ty::TypeIsStructural) => { tcx.sess diff --git a/src/librustc_typeck/collect.rs b/src/librustc_typeck/collect.rs index 4612acb04b2f0..8380ed349cb3d 100644 --- a/src/librustc_typeck/collect.rs +++ b/src/librustc_typeck/collect.rs @@ -171,7 +171,7 @@ impl<'a, 'tcx> AstConv<'tcx> for CrateCtxt<'a, 'tcx> { x => { self.tcx.sess.bug(format!("unexpected sort of node \ in get_item_ty(): {}", - x).as_slice()); + x)[]); } } } @@ -211,13 +211,15 @@ pub fn get_enum_variant_types<'a, 'tcx>(ccx: &CrateCtxt<'a, 'tcx>, // Create a set of parameter types shared among all the variants. for variant in variants.iter() { + let variant_def_id = local_def(variant.node.id); + // Nullary enum constructors get turned into constants; n-ary enum // constructors get turned into functions. let result_ty = match variant.node.kind { ast::TupleVariantKind(ref args) if args.len() > 0 => { let rs = ExplicitRscope; let input_tys: Vec<_> = args.iter().map(|va| ccx.to_ty(&rs, &*va.ty)).collect(); - ty::mk_ctor_fn(tcx, input_tys.as_slice(), enum_ty) + ty::mk_ctor_fn(tcx, variant_def_id, input_tys[], enum_ty) } ast::TupleVariantKind(_) => { @@ -246,7 +248,7 @@ pub fn get_enum_variant_types<'a, 'tcx>(ccx: &CrateCtxt<'a, 'tcx>, ty: result_ty }; - tcx.tcache.borrow_mut().insert(local_def(variant.node.id), pty); + tcx.tcache.borrow_mut().insert(variant_def_id, pty); write_ty_to_tcx(tcx, variant.node.id, result_ty); } @@ -270,7 +272,7 @@ fn collect_trait_methods<'a, 'tcx>(ccx: &CrateCtxt<'a, 'tcx>, ccx, trait_id, &trait_def.generics, - trait_items.as_slice(), + trait_items[], &m.id, &m.ident.name, &m.explicit_self, @@ -284,7 +286,7 @@ fn collect_trait_methods<'a, 'tcx>(ccx: &CrateCtxt<'a, 'tcx>, ccx, trait_id, &trait_def.generics, - trait_items.as_slice(), + trait_items[], &m.id, &m.pe_ident().name, m.pe_explicit_self(), @@ -353,7 +355,7 @@ fn collect_trait_methods<'a, 'tcx>(ccx: &CrateCtxt<'a, 'tcx>, m.def_id, Polytype { generics: m.generics.clone(), - ty: ty::mk_bare_fn(ccx.tcx, m.fty.clone()) }); + ty: ty::mk_bare_fn(ccx.tcx, Some(m.def_id), m.fty.clone()) }); } fn ty_method_of_trait_method<'a, 'tcx>(ccx: &CrateCtxt<'a, 'tcx>, @@ -379,7 +381,7 @@ fn collect_trait_methods<'a, 'tcx>(ccx: &CrateCtxt<'a, 'tcx>, let tmcx = TraitMethodCtxt { ccx: ccx, trait_id: local_def(trait_id), - trait_items: trait_items.as_slice(), + trait_items: trait_items[], method_generics: &ty_generics, }; let trait_self_ty = ty::mk_self_type(tmcx.tcx(), @@ -519,6 +521,7 @@ fn convert_methods<'a,'tcx,'i,I>(ccx: &CrateCtxt<'a, 'tcx>, tcx.sess.span_err(m.span, "duplicate method in trait impl"); } + let m_def_id = local_def(m.id); let mty = Rc::new(ty_of_method(ccx, convert_method_context, container, @@ -526,13 +529,13 @@ fn convert_methods<'a,'tcx,'i,I>(ccx: &CrateCtxt<'a, 'tcx>, untransformed_rcvr_ty, rcvr_ty_generics, rcvr_visibility)); - let fty = ty::mk_bare_fn(tcx, mty.fty.clone()); + let fty = ty::mk_bare_fn(tcx, Some(m_def_id), mty.fty.clone()); debug!("method {} (id {}) has type {}", m.pe_ident().repr(tcx), m.id, fty.repr(tcx)); tcx.tcache.borrow_mut().insert( - local_def(m.id), + m_def_id, Polytype { generics: mty.generics.clone(), ty: fty @@ -1040,7 +1043,7 @@ pub fn convert(ccx: &CrateCtxt, it: &ast::Item) { write_ty_to_tcx(tcx, it.id, pty.ty); get_enum_variant_types(ccx, pty.ty, - enum_definition.variants.as_slice(), + enum_definition.variants[], generics); }, ast::ItemImpl(_, @@ -1086,7 +1089,7 @@ pub fn convert(ccx: &CrateCtxt, it: &ast::Item) { ast_trait_ref.ref_id).def_id()) } }, - impl_items: impl_items.as_slice(), + impl_items: impl_items[], impl_generics: &ty_generics, }; @@ -1184,7 +1187,7 @@ pub fn convert(ccx: &CrateCtxt, it: &ast::Item) { local_def(it.id)); let convert_method_context = TraitConvertMethodContext(local_def(it.id), - trait_methods.as_slice()); + trait_methods[]); convert_methods(ccx, convert_method_context, TraitContainer(local_def(it.id)), @@ -1279,7 +1282,8 @@ pub fn convert_struct<'a, 'tcx>(ccx: &CrateCtxt<'a, 'tcx>, |field| (*tcx.tcache.borrow())[ local_def(field.node.id)].ty).collect(); let ctor_fn_ty = ty::mk_ctor_fn(tcx, - inputs.as_slice(), + local_def(ctor_id), + inputs[], selfty); write_ty_to_tcx(tcx, ctor_id, ctor_fn_ty); tcx.tcache.borrow_mut().insert(local_def(ctor_id), @@ -1320,7 +1324,7 @@ fn get_trait_def<'a, 'tcx>(ccx: &CrateCtxt<'a, 'tcx>, ast_map::NodeItem(item) => trait_def_of_item(ccx, &*item), _ => { ccx.tcx.sess.bug(format!("get_trait_def({}): not an item", - trait_id.node).as_slice()) + trait_id.node)[]) } } } @@ -1345,7 +1349,7 @@ pub fn trait_def_of_item<'a, 'tcx>(ccx: &CrateCtxt<'a, 'tcx>, ref s => { tcx.sess.span_bug( it.span, - format!("trait_def_of_item invoked on {}", s).as_slice()); + format!("trait_def_of_item invoked on {}", s)[]); } }; @@ -1364,8 +1368,7 @@ pub fn trait_def_of_item<'a, 'tcx>(ccx: &CrateCtxt<'a, 'tcx>, self_param_ty, bounds.as_slice(), unbound, - it.span, - &generics.where_clause); + it.span); let substs = mk_item_substs(ccx, &ty_generics); let trait_def = Rc::new(ty::TraitDef { @@ -1462,7 +1465,7 @@ pub fn ty_of_item<'a, 'tcx>(ccx: &CrateCtxt<'a, 'tcx>, it: &ast::Item) }; let pty = Polytype { generics: ty_generics, - ty: ty::mk_bare_fn(ccx.tcx, tofd) + ty: ty::mk_bare_fn(ccx.tcx, Some(local_def(it.id)), tofd) }; debug!("type of {} (id {}) is {}", token::get_ident(it.ident), @@ -1586,8 +1589,8 @@ fn ty_generics_for_type_or_impl<'a, 'tcx>(ccx: &CrateCtxt<'a, 'tcx>, -> ty::Generics<'tcx> { ty_generics(ccx, subst::TypeSpace, - generics.lifetimes.as_slice(), - generics.ty_params.as_slice(), + generics.lifetimes[], + generics.ty_params[], ty::Generics::empty(), &generics.where_clause, create_type_parameters_for_associated_types) @@ -1603,8 +1606,8 @@ fn ty_generics_for_trait<'a, 'tcx>(ccx: &CrateCtxt<'a, 'tcx>, let mut generics = ty_generics(ccx, subst::TypeSpace, - ast_generics.lifetimes.as_slice(), - ast_generics.ty_params.as_slice(), + ast_generics.lifetimes[], + ast_generics.ty_params[], ty::Generics::empty(), &ast_generics.where_clause, DontCreateTypeParametersForAssociatedTypes); @@ -1619,7 +1622,6 @@ fn ty_generics_for_trait<'a, 'tcx>(ccx: &CrateCtxt<'a, 'tcx>, subst::AssocSpace, &associated_type.ty_param, generics.types.len(subst::AssocSpace), - &ast_generics.where_clause, Some(local_def(trait_id))); ccx.tcx.ty_param_defs.borrow_mut().insert(associated_type.ty_param.id, def.clone()); @@ -1674,8 +1676,8 @@ fn ty_generics_for_fn_or_method<'tcx,AC>( let early_lifetimes = resolve_lifetime::early_bound_lifetimes(generics); ty_generics(this, subst::FnSpace, - early_lifetimes.as_slice(), - generics.ty_params.as_slice(), + early_lifetimes[], + generics.ty_params[], base_generics, &generics.where_clause, create_type_parameters_for_associated_types) @@ -1703,7 +1705,7 @@ fn add_unsized_bound<'tcx,AC>(this: &AC, a default. \ Only `Sized?` is \ supported", - desc).as_slice()); + desc)[]); ty::try_add_builtin_trait(this.tcx(), kind_id, bounds); @@ -1774,7 +1776,6 @@ fn ty_generics<'tcx,AC>(this: &AC, space, param, i, - where_clause, None); debug!("ty_generics: def for type param: {}, {}", def.repr(this.tcx()), @@ -1798,6 +1799,54 @@ fn ty_generics<'tcx,AC>(this: &AC, // into the predicates list. This is currently kind of non-DRY. create_predicates(this.tcx(), &mut result, space); + // Add the bounds not associated with a type parameter + for predicate in where_clause.predicates.iter() { + match predicate { + &ast::WherePredicate::BoundPredicate(ref bound_pred) => { + let ty = ast_ty_to_ty(this, &ExplicitRscope, &*bound_pred.bounded_ty); + + for bound in bound_pred.bounds.iter() { + match bound { + &ast::TyParamBound::TraitTyParamBound(ref poly_trait_ref) => { + let trait_ref = astconv::instantiate_poly_trait_ref( + this, + &ExplicitRscope, + //@jroesch: for now trait_ref, poly_trait_ref? + poly_trait_ref, + Some(ty), + AllowEqConstraints::Allow + ); + + result.predicates.push(space, ty::Predicate::Trait(trait_ref)); + } + + &ast::TyParamBound::RegionTyParamBound(ref lifetime) => { + let region = ast_region_to_region(this.tcx(), lifetime); + let pred = ty::Binder(ty::OutlivesPredicate(ty, region)); + result.predicates.push(space, ty::Predicate::TypeOutlives(pred)) + } + } + } + } + + &ast::WherePredicate::RegionPredicate(ref region_pred) => { + let r1 = ast_region_to_region(this.tcx(), ®ion_pred.lifetime); + for bound in region_pred.bounds.iter() { + let r2 = ast_region_to_region(this.tcx(), bound); + let pred = ty::Binder(ty::OutlivesPredicate(r1, r2)); + result.predicates.push(space, ty::Predicate::RegionOutlives(pred)) + } + } + + &ast::WherePredicate::EqPredicate(ref eq_pred) => { + // FIXME(#20041) + this.tcx().sess.span_bug(eq_pred.span, + "Equality constraints are not yet \ + implemented (#20041)") + } + } + } + return result; fn create_type_parameters_for_associated_types<'tcx, AC>( @@ -1915,7 +1964,6 @@ fn get_or_create_type_parameter_def<'tcx,AC>(this: &AC, space: subst::ParamSpace, param: &ast::TyParam, index: uint, - where_clause: &ast::WhereClause, associated_with: Option) -> ty::TypeParameterDef<'tcx> where AC: AstConv<'tcx> @@ -1929,10 +1977,9 @@ fn get_or_create_type_parameter_def<'tcx,AC>(this: &AC, let bounds = compute_bounds(this, param.ident.name, param_ty, - param.bounds.as_slice(), + param.bounds[], ¶m.unbound, - param.span, - where_clause); + param.span); let default = match param.default { None => None, Some(ref path) => { @@ -1977,15 +2024,13 @@ fn compute_bounds<'tcx,AC>(this: &AC, param_ty: ty::ParamTy, ast_bounds: &[ast::TyParamBound], unbound: &Option, - span: Span, - where_clause: &ast::WhereClause) + span: Span) -> ty::ParamBounds<'tcx> where AC: AstConv<'tcx> { let mut param_bounds = conv_param_bounds(this, span, param_ty, - ast_bounds, - where_clause); + ast_bounds); add_unsized_bound(this, @@ -2013,7 +2058,7 @@ fn check_bounds_compatible<'tcx>(tcx: &ty::ctxt<'tcx>, if !param_bounds.builtin_bounds.contains(&ty::BoundSized) { ty::each_bound_trait_and_supertraits( tcx, - param_bounds.trait_bounds.as_slice(), + param_bounds.trait_bounds[], |trait_ref| { let trait_def = ty::lookup_trait_def(tcx, trait_ref.def_id()); if trait_def.bounds.builtin_bounds.contains(&ty::BoundSized) { @@ -2031,16 +2076,14 @@ fn check_bounds_compatible<'tcx>(tcx: &ty::ctxt<'tcx>, fn conv_param_bounds<'tcx,AC>(this: &AC, span: Span, param_ty: ty::ParamTy, - ast_bounds: &[ast::TyParamBound], - where_clause: &ast::WhereClause) + ast_bounds: &[ast::TyParamBound]) -> ty::ParamBounds<'tcx> - where AC: AstConv<'tcx> { - let all_bounds = - merge_param_bounds(this.tcx(), param_ty, ast_bounds, where_clause); + where AC: AstConv<'tcx> +{ let astconv::PartitionedBounds { builtin_bounds, trait_bounds, region_bounds } = - astconv::partition_bounds(this.tcx(), span, all_bounds.as_slice()); + astconv::partition_bounds(this.tcx(), span, ast_bounds.as_slice()); let trait_bounds: Vec> = trait_bounds.into_iter() .map(|bound| { @@ -2062,43 +2105,6 @@ fn conv_param_bounds<'tcx,AC>(this: &AC, } } -/// Merges the bounds declared on a type parameter with those found from where clauses into a -/// single list. -fn merge_param_bounds<'a>(tcx: &ty::ctxt, - param_ty: ty::ParamTy, - ast_bounds: &'a [ast::TyParamBound], - where_clause: &'a ast::WhereClause) - -> Vec<&'a ast::TyParamBound> { - let mut result = Vec::new(); - - for ast_bound in ast_bounds.iter() { - result.push(ast_bound); - } - - for predicate in where_clause.predicates.iter() { - match predicate { - &ast::WherePredicate::BoundPredicate(ref bound_pred) => { - let predicate_param_id = - tcx.def_map - .borrow() - .get(&bound_pred.id) - .expect("merge_param_bounds(): resolve didn't resolve the \ - type parameter identifier in a `where` clause") - .def_id(); - if param_ty.def_id != predicate_param_id { - continue - } - for bound in bound_pred.bounds.iter() { - result.push(bound); - } - } - &ast::WherePredicate::EqPredicate(_) => panic!("not implemented") - } - } - - result -} - pub fn ty_of_foreign_fn_decl<'a, 'tcx>(ccx: &CrateCtxt<'a, 'tcx>, decl: &ast::FnDecl, def_id: ast::DefId, @@ -2136,6 +2142,7 @@ pub fn ty_of_foreign_fn_decl<'a, 'tcx>(ccx: &CrateCtxt<'a, 'tcx>, let t_fn = ty::mk_bare_fn( ccx.tcx, + None, ty::BareFnTy { abi: abi, unsafety: ast::Unsafety::Unsafe, diff --git a/src/librustc_typeck/lib.rs b/src/librustc_typeck/lib.rs index 5fc2466674ebe..65dff774528a7 100644 --- a/src/librustc_typeck/lib.rs +++ b/src/librustc_typeck/lib.rs @@ -71,7 +71,7 @@ This API is completely unstable and subject to change. html_favicon_url = "http://www.rust-lang.org/favicon.ico", html_root_url = "http://doc.rust-lang.org/nightly/")] -#![feature(default_type_params, globs, import_shadowing, macro_rules, phase, quote)] +#![feature(default_type_params, globs, macro_rules, phase, quote)] #![feature(slicing_syntax, unsafe_destructor)] #![feature(rustc_diagnostic_macros)] #![feature(unboxed_closures)] @@ -90,7 +90,6 @@ pub use rustc::session; pub use rustc::util; use middle::def; -use middle::resolve; use middle::infer; use middle::subst; use middle::subst::VecPerParamSpace; @@ -103,6 +102,7 @@ use util::ppaux; use syntax::codemap::Span; use syntax::print::pprust::*; use syntax::{ast, ast_map, abi}; +use syntax::ast_util::local_def; #[cfg(stage0)] mod diagnostics; @@ -121,7 +121,7 @@ struct TypeAndSubsts<'tcx> { struct CrateCtxt<'a, 'tcx: 'a> { // A mapping from method call sites to traits that have that method. - trait_map: resolve::TraitMap, + trait_map: ty::TraitMap, tcx: &'a ty::ctxt<'tcx> } @@ -197,7 +197,7 @@ fn require_same_types<'a, 'tcx, M>(tcx: &ty::ctxt<'tcx>, format!("{}: {}", msg(), ty::type_err_to_str(tcx, - terr)).as_slice()); + terr))[]); ty::note_and_explain_type_err(tcx, terr); false } @@ -225,7 +225,7 @@ fn check_main_fn_ty(ccx: &CrateCtxt, } _ => () } - let se_ty = ty::mk_bare_fn(tcx, ty::BareFnTy { + let se_ty = ty::mk_bare_fn(tcx, Some(local_def(main_id)), ty::BareFnTy { unsafety: ast::Unsafety::Normal, abi: abi::Rust, sig: ty::Binder(ty::FnSig { @@ -246,7 +246,7 @@ fn check_main_fn_ty(ccx: &CrateCtxt, format!("main has a non-function type: found \ `{}`", ppaux::ty_to_string(tcx, - main_t)).as_slice()); + main_t))[]); } } } @@ -257,7 +257,7 @@ fn check_start_fn_ty(ccx: &CrateCtxt, let tcx = ccx.tcx; let start_t = ty::node_id_to_type(tcx, start_id); match start_t.sty { - ty::ty_bare_fn(_) => { + ty::ty_bare_fn(..) => { match tcx.map.find(start_id) { Some(ast_map::NodeItem(it)) => { match it.node { @@ -273,7 +273,7 @@ fn check_start_fn_ty(ccx: &CrateCtxt, _ => () } - let se_ty = ty::mk_bare_fn(tcx, ty::BareFnTy { + let se_ty = ty::mk_bare_fn(tcx, Some(local_def(start_id)), ty::BareFnTy { unsafety: ast::Unsafety::Normal, abi: abi::Rust, sig: ty::Binder(ty::FnSig { @@ -297,8 +297,7 @@ fn check_start_fn_ty(ccx: &CrateCtxt, tcx.sess.span_bug(start_span, format!("start has a non-function type: found \ `{}`", - ppaux::ty_to_string(tcx, - start_t)).as_slice()); + ppaux::ty_to_string(tcx, start_t))[]); } } } @@ -316,7 +315,7 @@ fn check_for_entry_fn(ccx: &CrateCtxt) { } } -pub fn check_crate(tcx: &ty::ctxt, trait_map: resolve::TraitMap) { +pub fn check_crate(tcx: &ty::ctxt, trait_map: ty::TraitMap) { let time_passes = tcx.sess.time_passes(); let ccx = CrateCtxt { trait_map: trait_map, diff --git a/src/librustc_typeck/variance.rs b/src/librustc_typeck/variance.rs index ef0d1bc3859fa..8e69bc42d9a0d 100644 --- a/src/librustc_typeck/variance.rs +++ b/src/librustc_typeck/variance.rs @@ -556,7 +556,7 @@ impl<'a, 'tcx> ConstraintContext<'a, 'tcx> { None => { self.tcx().sess.bug(format!( "no inferred index entry for {}", - self.tcx().map.node_to_string(param_id)).as_slice()); + self.tcx().map.node_to_string(param_id))[]); } } } @@ -814,12 +814,13 @@ impl<'a, 'tcx> ConstraintContext<'a, 'tcx> { } } - ty::ty_bare_fn(ty::BareFnTy { ref sig, .. }) | + ty::ty_bare_fn(_, ty::BareFnTy { ref sig, .. }) | ty::ty_closure(box ty::ClosureTy { ref sig, store: ty::UniqTraitStore, .. - }) => { + }) => + { self.add_constraints_from_sig(sig, variance); } @@ -834,7 +835,7 @@ impl<'a, 'tcx> ConstraintContext<'a, 'tcx> { self.tcx().sess.bug( format!("unexpected type encountered in \ variance inference: {}", - ty.repr(self.tcx())).as_slice()); + ty.repr(self.tcx()))[]); } } } @@ -911,7 +912,7 @@ impl<'a, 'tcx> ConstraintContext<'a, 'tcx> { .sess .bug(format!("unexpected region encountered in variance \ inference: {}", - region.repr(self.tcx())).as_slice()); + region.repr(self.tcx()))[]); } } } @@ -1046,7 +1047,7 @@ impl<'a, 'tcx> SolveContext<'a, 'tcx> { // attribute and report an error with various results if found. if ty::has_attr(tcx, item_def_id, "rustc_variance") { let found = item_variances.repr(tcx); - tcx.sess.span_err(tcx.map.span(item_id), found.as_slice()); + tcx.sess.span_err(tcx.map.span(item_id), found[]); } let newly_added = tcx.item_variance_map.borrow_mut() diff --git a/src/librustdoc/clean/inline.rs b/src/librustdoc/clean/inline.rs index d0988af1cb473..2bc93ade7774e 100644 --- a/src/librustdoc/clean/inline.rs +++ b/src/librustdoc/clean/inline.rs @@ -176,7 +176,7 @@ pub fn build_external_trait(cx: &DocContext, tcx: &ty::ctxt, fn build_external_function(cx: &DocContext, tcx: &ty::ctxt, did: ast::DefId) -> clean::Function { let t = ty::lookup_item_type(tcx, did); let (decl, style) = match t.ty.sty { - ty::ty_bare_fn(ref f) => ((did, &f.sig).clean(cx), f.unsafety), + ty::ty_bare_fn(_, ref f) => ((did, &f.sig).clean(cx), f.unsafety), _ => panic!("bad function"), }; clean::Function { diff --git a/src/librustdoc/clean/mod.rs b/src/librustdoc/clean/mod.rs index ed92320279591..0dd6c2a7ce730 100644 --- a/src/librustdoc/clean/mod.rs +++ b/src/librustdoc/clean/mod.rs @@ -113,7 +113,7 @@ impl, U> Clean> for syntax::owned_slice::OwnedSlice { } } -#[deriving(Clone, Encodable, Decodable)] +#[deriving(Clone, RustcEncodable, RustcDecodable)] pub struct Crate { pub name: String, pub src: FsPath, @@ -163,33 +163,24 @@ impl<'a, 'tcx> Clean for visit_ast::RustdocVisitor<'a, 'tcx> { }; let mut tmp = Vec::new(); for child in m.items.iter_mut() { - let inner = match child.inner { - ModuleItem(ref mut m) => m, + match child.inner { + ModuleItem(..) => {} _ => continue, - }; + } let prim = match PrimitiveType::find(child.attrs.as_slice()) { Some(prim) => prim, None => continue, }; primitives.push(prim); - let mut i = Item { + tmp.push(Item { source: Span::empty(), name: Some(prim.to_url_str().to_string()), - attrs: Vec::new(), - visibility: None, + attrs: child.attrs.clone(), + visibility: Some(ast::Public), stability: None, def_id: ast_util::local_def(prim.to_node_id()), inner: PrimitiveItem(prim), - }; - // Push one copy to get indexed for the whole crate, and push a - // another copy in the proper location which will actually get - // documented. The first copy will also serve as a redirect to - // the other copy. - tmp.push(i.clone()); - i.visibility = Some(ast::Public); - i.attrs = child.attrs.clone(); - inner.items.push(i); - + }); } m.items.extend(tmp.into_iter()); } @@ -204,7 +195,7 @@ impl<'a, 'tcx> Clean for visit_ast::RustdocVisitor<'a, 'tcx> { } } -#[deriving(Clone, Encodable, Decodable)] +#[deriving(Clone, RustcEncodable, RustcDecodable)] pub struct ExternalCrate { pub name: String, pub attrs: Vec, @@ -237,7 +228,7 @@ impl Clean for cstore::crate_metadata { /// Anything with a source location and set of attributes and, optionally, a /// name. That is, anything that can be documented. This doesn't correspond /// directly to the AST's concept of an item; it's a strict superset. -#[deriving(Clone, Encodable, Decodable)] +#[deriving(Clone, RustcEncodable, RustcDecodable)] pub struct Item { /// Stringified span pub source: Span, @@ -313,7 +304,7 @@ impl Item { } } -#[deriving(Clone, Encodable, Decodable)] +#[deriving(Clone, RustcEncodable, RustcDecodable)] pub enum ItemEnum { StructItem(Struct), EnumItem(Enum), @@ -342,7 +333,7 @@ pub enum ItemEnum { AssociatedTypeItem(TyParam), } -#[deriving(Clone, Encodable, Decodable)] +#[deriving(Clone, RustcEncodable, RustcDecodable)] pub struct Module { pub items: Vec, pub is_crate: bool, @@ -409,7 +400,7 @@ impl Clean for doctree::Module { } } -#[deriving(Clone, Encodable, Decodable, PartialEq)] +#[deriving(Clone, RustcEncodable, RustcDecodable, PartialEq)] pub enum Attribute { Word(String), List(String, Vec ), @@ -462,7 +453,7 @@ impl<'a> attr::AttrMetaMethods for &'a Attribute { fn meta_item_list(&self) -> Option<&[P]> { None } } -#[deriving(Clone, Encodable, Decodable, PartialEq)] +#[deriving(Clone, RustcEncodable, RustcDecodable, PartialEq)] pub struct TyParam { pub name: String, pub did: ast::DefId, @@ -499,7 +490,7 @@ impl<'tcx> Clean for ty::TypeParameterDef<'tcx> { } } -#[deriving(Clone, Encodable, Decodable, PartialEq)] +#[deriving(Clone, RustcEncodable, RustcDecodable, PartialEq)] pub enum TyParamBound { RegionBound(Lifetime), TraitBound(Type) @@ -641,7 +632,7 @@ impl<'tcx> Clean>> for subst::Substs<'tcx> { } } -#[deriving(Clone, Encodable, Decodable, PartialEq)] +#[deriving(Clone, RustcEncodable, RustcDecodable, PartialEq)] pub struct Lifetime(String); impl Lifetime { @@ -691,9 +682,9 @@ impl Clean> for ty::Region { } } -#[deriving(Clone, Encodable, Decodable, PartialEq)] +#[deriving(Clone, RustcEncodable, RustcDecodable, PartialEq)] pub struct WherePredicate { - pub name: String, + pub ty: Type, pub bounds: Vec } @@ -702,11 +693,12 @@ impl Clean for ast::WherePredicate { match *self { ast::WherePredicate::BoundPredicate(ref wbp) => { WherePredicate { - name: wbp.ident.clean(cx), + ty: wbp.bounded_ty.clean(cx), bounds: wbp.bounds.clean(cx) } } - ast::WherePredicate::EqPredicate(_) => { + // FIXME(#20048) + _ => { unimplemented!(); } } @@ -714,7 +706,7 @@ impl Clean for ast::WherePredicate { } // maybe use a Generic enum and use ~[Generic]? -#[deriving(Clone, Encodable, Decodable, PartialEq)] +#[deriving(Clone, RustcEncodable, RustcDecodable, PartialEq)] pub struct Generics { pub lifetimes: Vec, pub type_params: Vec, @@ -742,7 +734,7 @@ impl<'a, 'tcx> Clean for (&'a ty::Generics<'tcx>, subst::ParamSpace) { } } -#[deriving(Clone, Encodable, Decodable)] +#[deriving(Clone, RustcEncodable, RustcDecodable)] pub struct Method { pub generics: Generics, pub self_: SelfTy, @@ -781,7 +773,7 @@ impl Clean for ast::Method { } } -#[deriving(Clone, Encodable, Decodable)] +#[deriving(Clone, RustcEncodable, RustcDecodable)] pub struct TyMethod { pub unsafety: ast::Unsafety, pub decl: FnDecl, @@ -819,7 +811,7 @@ impl Clean for ast::TypeMethod { } } -#[deriving(Clone, Encodable, Decodable, PartialEq)] +#[deriving(Clone, RustcEncodable, RustcDecodable, PartialEq)] pub enum SelfTy { SelfStatic, SelfValue, @@ -840,7 +832,7 @@ impl Clean for ast::ExplicitSelf_ { } } -#[deriving(Clone, Encodable, Decodable)] +#[deriving(Clone, RustcEncodable, RustcDecodable)] pub struct Function { pub decl: FnDecl, pub generics: Generics, @@ -865,7 +857,7 @@ impl Clean for doctree::Function { } } -#[deriving(Clone, Encodable, Decodable, PartialEq)] +#[deriving(Clone, RustcEncodable, RustcDecodable, PartialEq)] pub struct ClosureDecl { pub lifetimes: Vec, pub decl: FnDecl, @@ -886,14 +878,14 @@ impl Clean for ast::ClosureTy { } } -#[deriving(Clone, Encodable, Decodable, PartialEq)] +#[deriving(Clone, RustcEncodable, RustcDecodable, PartialEq)] pub struct FnDecl { pub inputs: Arguments, pub output: FunctionRetTy, pub attrs: Vec, } -#[deriving(Clone, Encodable, Decodable, PartialEq)] +#[deriving(Clone, RustcEncodable, RustcDecodable, PartialEq)] pub struct Arguments { pub values: Vec, } @@ -946,7 +938,7 @@ impl<'a, 'tcx> Clean for (ast::DefId, &'a ty::PolyFnSig<'tcx>) { } } -#[deriving(Clone, Encodable, Decodable, PartialEq)] +#[deriving(Clone, RustcEncodable, RustcDecodable, PartialEq)] pub struct Argument { pub type_: Type, pub name: String, @@ -963,7 +955,7 @@ impl Clean for ast::Arg { } } -#[deriving(Clone, Encodable, Decodable, PartialEq)] +#[deriving(Clone, RustcEncodable, RustcDecodable, PartialEq)] pub enum FunctionRetTy { Return(Type), NoReturn @@ -978,7 +970,7 @@ impl Clean for ast::FunctionRetTy { } } -#[deriving(Clone, Encodable, Decodable)] +#[deriving(Clone, RustcEncodable, RustcDecodable)] pub struct Trait { pub unsafety: ast::Unsafety, pub items: Vec, @@ -1022,7 +1014,7 @@ impl Clean for ast::PolyTraitRef { /// An item belonging to a trait, whether a method or associated. Could be named /// TraitItem except that's already taken by an exported enum variant. -#[deriving(Clone, Encodable, Decodable)] +#[deriving(Clone, RustcEncodable, RustcDecodable)] pub enum TraitMethod { RequiredMethod(Item), ProvidedMethod(Item), @@ -1067,7 +1059,7 @@ impl Clean for ast::TraitItem { } } -#[deriving(Clone, Encodable, Decodable)] +#[deriving(Clone, RustcEncodable, RustcDecodable)] pub enum ImplMethod { MethodImplItem(Item), TypeImplItem(Item), @@ -1140,7 +1132,7 @@ impl<'tcx> Clean for ty::ImplOrTraitItem<'tcx> { /// A representation of a Type suitable for hyperlinking purposes. Ideally one can get the original /// type out of the AST/ty::ctxt given one of these, if more information is needed. Most importantly /// it does not preserve mutability or boxes. -#[deriving(Clone, Encodable, Decodable, PartialEq)] +#[deriving(Clone, RustcEncodable, RustcDecodable, PartialEq)] pub enum Type { /// structs/enums/traits (anything that'd be an ast::TyPath) ResolvedPath { @@ -1188,7 +1180,7 @@ pub enum Type { PolyTraitRef(Vec), } -#[deriving(Clone, Copy, Encodable, Decodable, PartialEq, Eq, Hash)] +#[deriving(Clone, RustcEncodable, RustcDecodable, PartialEq, Eq, Hash, Copy)] pub enum PrimitiveType { Int, I8, I16, I32, I64, Uint, U8, U16, U32, U64, @@ -1200,7 +1192,7 @@ pub enum PrimitiveType { PrimitiveTuple, } -#[deriving(Clone, Copy, Encodable, Decodable)] +#[deriving(Clone, RustcEncodable, RustcDecodable, Copy)] pub enum TypeKind { TypeEnum, TypeFunction, @@ -1368,7 +1360,7 @@ impl<'tcx> Clean for ty::Ty<'tcx> { mutability: mt.mutbl.clean(cx), type_: box mt.ty.clean(cx), }, - ty::ty_bare_fn(ref fty) => BareFunction(box BareFunctionDecl { + ty::ty_bare_fn(_, ref fty) => BareFunction(box BareFunctionDecl { unsafety: fty.unsafety, generics: Generics { lifetimes: Vec::new(), @@ -1444,7 +1436,7 @@ impl Clean for ast::QPath { } } -#[deriving(Clone, Encodable, Decodable)] +#[deriving(Clone, RustcEncodable, RustcDecodable)] pub enum StructField { HiddenStructField, // inserted later by strip passes TypedStructField(Type), @@ -1503,7 +1495,7 @@ impl Clean> for ast::Visibility { } } -#[deriving(Clone, Encodable, Decodable)] +#[deriving(Clone, RustcEncodable, RustcDecodable)] pub struct Struct { pub struct_type: doctree::StructType, pub generics: Generics, @@ -1533,7 +1525,7 @@ impl Clean for doctree::Struct { /// This is a more limited form of the standard Struct, different in that /// it lacks the things most items have (name, id, parameterization). Found /// only as a variant in an enum. -#[deriving(Clone, Encodable, Decodable)] +#[deriving(Clone, RustcEncodable, RustcDecodable)] pub struct VariantStruct { pub struct_type: doctree::StructType, pub fields: Vec, @@ -1550,7 +1542,7 @@ impl Clean for syntax::ast::StructDef { } } -#[deriving(Clone, Encodable, Decodable)] +#[deriving(Clone, RustcEncodable, RustcDecodable)] pub struct Enum { pub variants: Vec, pub generics: Generics, @@ -1575,7 +1567,7 @@ impl Clean for doctree::Enum { } } -#[deriving(Clone, Encodable, Decodable)] +#[deriving(Clone, RustcEncodable, RustcDecodable)] pub struct Variant { pub kind: VariantKind, } @@ -1643,7 +1635,7 @@ impl<'tcx> Clean for ty::VariantInfo<'tcx> { } } -#[deriving(Clone, Encodable, Decodable)] +#[deriving(Clone, RustcEncodable, RustcDecodable)] pub enum VariantKind { CLikeVariant, TupleVariant(Vec), @@ -1665,7 +1657,7 @@ impl Clean for ast::VariantKind { } } -#[deriving(Clone, Encodable, Decodable, Show)] +#[deriving(Clone, RustcEncodable, RustcDecodable, Show)] pub struct Span { pub filename: String, pub loline: uint, @@ -1700,7 +1692,7 @@ impl Clean for syntax::codemap::Span { } } -#[deriving(Clone, Encodable, Decodable, PartialEq)] +#[deriving(Clone, RustcEncodable, RustcDecodable, PartialEq)] pub struct Path { pub global: bool, pub segments: Vec, @@ -1715,7 +1707,7 @@ impl Clean for ast::Path { } } -#[deriving(Clone, Encodable, Decodable, PartialEq)] +#[deriving(Clone, RustcEncodable, RustcDecodable, PartialEq)] pub struct PathSegment { pub name: String, pub lifetimes: Vec, @@ -1771,7 +1763,7 @@ impl Clean for ast::Name { } } -#[deriving(Clone, Encodable, Decodable)] +#[deriving(Clone, RustcEncodable, RustcDecodable)] pub struct Typedef { pub type_: Type, pub generics: Generics, @@ -1794,7 +1786,7 @@ impl Clean for doctree::Typedef { } } -#[deriving(Clone, Encodable, Decodable, PartialEq)] +#[deriving(Clone, RustcEncodable, RustcDecodable, PartialEq)] pub struct BareFunctionDecl { pub unsafety: ast::Unsafety, pub generics: Generics, @@ -1817,7 +1809,7 @@ impl Clean for ast::BareFnTy { } } -#[deriving(Clone, Encodable, Decodable, Show)] +#[deriving(Clone, RustcEncodable, RustcDecodable, Show)] pub struct Static { pub type_: Type, pub mutability: Mutability, @@ -1846,7 +1838,7 @@ impl Clean for doctree::Static { } } -#[deriving(Clone, Encodable, Decodable, Show)] +#[deriving(Clone, RustcEncodable, RustcDecodable, Show)] pub struct Constant { pub type_: Type, pub expr: String, @@ -1869,7 +1861,7 @@ impl Clean for doctree::Constant { } } -#[deriving(Copy, Show, Clone, Encodable, Decodable, PartialEq)] +#[deriving(Show, Clone, RustcEncodable, RustcDecodable, PartialEq, Copy)] pub enum Mutability { Mutable, Immutable, @@ -1884,7 +1876,7 @@ impl Clean for ast::Mutability { } } -#[deriving(Clone, Encodable, Decodable)] +#[deriving(Clone, RustcEncodable, RustcDecodable)] pub struct Impl { pub generics: Generics, pub trait_: Option, @@ -1922,7 +1914,7 @@ impl Clean for doctree::Impl { } } -#[deriving(Clone, Encodable, Decodable)] +#[deriving(Clone, RustcEncodable, RustcDecodable)] pub struct ViewItem { pub inner: ViewItemInner, } @@ -1988,7 +1980,7 @@ impl Clean> for ast::ViewItem { } } -#[deriving(Clone, Encodable, Decodable)] +#[deriving(Clone, RustcEncodable, RustcDecodable)] pub enum ViewItemInner { ExternCrate(String, Option, ast::NodeId), Import(ViewPath) @@ -2011,7 +2003,7 @@ impl Clean for ast::ViewItem_ { } } -#[deriving(Clone, Encodable, Decodable)] +#[deriving(Clone, RustcEncodable, RustcDecodable)] pub enum ViewPath { // use source as str; SimpleImport(String, ImportSource), @@ -2021,7 +2013,7 @@ pub enum ViewPath { ImportList(ImportSource, Vec), } -#[deriving(Clone, Encodable, Decodable)] +#[deriving(Clone, RustcEncodable, RustcDecodable)] pub struct ImportSource { pub path: Path, pub did: Option, @@ -2042,7 +2034,7 @@ impl Clean for ast::ViewPath { } } -#[deriving(Clone, Encodable, Decodable)] +#[deriving(Clone, RustcEncodable, RustcDecodable)] pub struct ViewListIdent { pub name: String, pub source: Option, @@ -2255,7 +2247,7 @@ fn resolve_def(cx: &DocContext, id: ast::NodeId) -> Option { }) } -#[deriving(Clone, Encodable, Decodable)] +#[deriving(Clone, RustcEncodable, RustcDecodable)] pub struct Macro { pub source: String, } @@ -2276,7 +2268,7 @@ impl Clean for doctree::Macro { } } -#[deriving(Clone, Encodable, Decodable)] +#[deriving(Clone, RustcEncodable, RustcDecodable)] pub struct Stability { pub level: attr::StabilityLevel, pub text: String diff --git a/src/librustdoc/doctree.rs b/src/librustdoc/doctree.rs index 83552884d7ffa..7f7c055062aaa 100644 --- a/src/librustdoc/doctree.rs +++ b/src/librustdoc/doctree.rs @@ -70,7 +70,7 @@ impl Module { } } -#[deriving(Copy, Show, Clone, Encodable, Decodable)] +#[deriving(Show, Clone, RustcEncodable, RustcDecodable, Copy)] pub enum StructType { /// A normal struct Plain, diff --git a/src/librustdoc/externalfiles.rs b/src/librustdoc/externalfiles.rs index 08fb94a801c3e..25a20e5998bd8 100644 --- a/src/librustdoc/externalfiles.rs +++ b/src/librustdoc/externalfiles.rs @@ -36,7 +36,7 @@ impl ExternalHtml { pub fn load_string(input: &Path) -> io::IoResult> { let mut f = try!(io::File::open(input)); let d = try!(f.read_to_end()); - Ok(str::from_utf8(d.as_slice()).map(|s| s.to_string())) + Ok(str::from_utf8(d.as_slice()).map(|s| s.to_string()).ok()) } macro_rules! load_or_return { diff --git a/src/librustdoc/html/format.rs b/src/librustdoc/html/format.rs index 5572bcb6aa8f0..a2d5530692c11 100644 --- a/src/librustdoc/html/format.rs +++ b/src/librustdoc/html/format.rs @@ -16,7 +16,7 @@ //! them in the future to instead emit any format desired. use std::fmt; -use std::string::String; +use std::iter::repeat; use syntax::ast; use syntax::ast_util; @@ -129,7 +129,7 @@ impl<'a> fmt::Show for WhereClause<'a> { try!(f.write(", ".as_bytes())); } let bounds = pred.bounds.as_slice(); - try!(write!(f, "{}: {}", pred.name, TyParamBounds(bounds))); + try!(write!(f, "{}: {}", pred.ty, TyParamBounds(bounds))); } Ok(()) } @@ -198,12 +198,12 @@ fn resolved_path(w: &mut fmt::Formatter, did: ast::DefId, p: &clean::Path, path(w, p, print_all, |cache, loc| { if ast_util::is_local(did) || cache.inlined.contains(&did) { - Some(("../".repeat(loc.len())).to_string()) + Some(repeat("../").take(loc.len()).collect::()) } else { match cache.extern_locations[did.krate] { render::Remote(ref s) => Some(s.to_string()), render::Local => { - Some(("../".repeat(loc.len())).to_string()) + Some(repeat("../").take(loc.len()).collect::()) } render::Unknown => None, } @@ -324,7 +324,7 @@ fn primitive_link(f: &mut fmt::Formatter, let len = CURRENT_LOCATION_KEY.with(|s| s.borrow().len()); let len = if len == 0 {0} else {len - 1}; try!(write!(f, "", - "../".repeat(len), + repeat("../").take(len).collect::(), prim.to_url_str())); needs_termination = true; } @@ -337,7 +337,7 @@ fn primitive_link(f: &mut fmt::Formatter, render::Remote(ref s) => Some(s.to_string()), render::Local => { let len = CURRENT_LOCATION_KEY.with(|s| s.borrow().len()); - Some("../".repeat(len)) + Some(repeat("../").take(len).collect::()) } render::Unknown => None, }; diff --git a/src/librustdoc/html/highlight.rs b/src/librustdoc/html/highlight.rs index 111650f565cf6..c936f6a0819d2 100644 --- a/src/librustdoc/html/highlight.rs +++ b/src/librustdoc/html/highlight.rs @@ -34,7 +34,7 @@ pub fn highlight(src: &str, class: Option<&str>, id: Option<&str>) -> String { class, id, &mut out).unwrap(); - String::from_utf8_lossy(out[]).into_string() + String::from_utf8_lossy(out[]).into_owned() } /// Exhausts the `lexer` writing the output into `out`. diff --git a/src/librustdoc/html/markdown.rs b/src/librustdoc/html/markdown.rs index 8b2f644dfe33b..f7984b8973cc6 100644 --- a/src/librustdoc/html/markdown.rs +++ b/src/librustdoc/html/markdown.rs @@ -65,17 +65,21 @@ const HOEDOWN_EXTENSIONS: libc::c_uint = type hoedown_document = libc::c_void; // this is opaque to us +type blockcodefn = extern "C" fn(*mut hoedown_buffer, *const hoedown_buffer, + *const hoedown_buffer, *mut libc::c_void); + +type headerfn = extern "C" fn(*mut hoedown_buffer, *const hoedown_buffer, + libc::c_int, *mut libc::c_void); + #[repr(C)] struct hoedown_renderer { opaque: *mut hoedown_html_renderer_state, - blockcode: Option, + blockcode: Option, blockquote: Option, blockhtml: Option, - header: Option, + header: Option, other: [libc::size_t, ..28], } @@ -174,7 +178,7 @@ pub fn render(w: &mut fmt::Formatter, s: &str, print_toc: bool) -> fmt::Result { let rlang = slice::from_raw_buf(&(*lang).data, (*lang).size as uint); let rlang = str::from_utf8(rlang).unwrap(); - if LangString::parse(rlang).notrust { + if !LangString::parse(rlang).rust { (my_opaque.dfltblk)(ob, orig_text, lang, opaque as *mut libc::c_void); true @@ -281,8 +285,8 @@ pub fn render(w: &mut fmt::Formatter, s: &str, print_toc: bool) -> fmt::Result { toc_builder: if print_toc {Some(TocBuilder::new())} else {None} }; (*(*renderer).opaque).opaque = &mut opaque as *mut _ as *mut libc::c_void; - (*renderer).blockcode = Some(block); - (*renderer).header = Some(header); + (*renderer).blockcode = Some(block as blockcodefn); + (*renderer).header = Some(header as headerfn); let document = hoedown_document_new(renderer, HOEDOWN_EXTENSIONS, 16); hoedown_document_render(document, ob, s.as_ptr(), @@ -320,7 +324,7 @@ pub fn find_testable_code(doc: &str, tests: &mut ::test::Collector) { let s = str::from_utf8(lang).unwrap(); LangString::parse(s) }; - if block_info.notrust { return } + if !block_info.rust { return } let text = slice::from_raw_buf(&(*text).data, (*text).size as uint); let opaque = opaque as *mut hoedown_html_renderer_state; let tests = &mut *((*opaque).opaque as *mut ::test::Collector); @@ -354,8 +358,8 @@ pub fn find_testable_code(doc: &str, tests: &mut ::test::Collector) { unsafe { let ob = hoedown_buffer_new(DEF_OUNIT); let renderer = hoedown_html_renderer_new(0, 0); - (*renderer).blockcode = Some(block); - (*renderer).header = Some(header); + (*renderer).blockcode = Some(block as blockcodefn); + (*renderer).header = Some(header as headerfn); (*(*renderer).opaque).opaque = tests as *mut _ as *mut libc::c_void; let document = hoedown_document_new(renderer, HOEDOWN_EXTENSIONS, 16); @@ -373,7 +377,7 @@ struct LangString { should_fail: bool, no_run: bool, ignore: bool, - notrust: bool, + rust: bool, test_harness: bool, } @@ -383,7 +387,7 @@ impl LangString { should_fail: false, no_run: false, ignore: false, - notrust: false, + rust: false, test_harness: false, } } @@ -403,14 +407,13 @@ impl LangString { "should_fail" => { data.should_fail = true; seen_rust_tags = true; }, "no_run" => { data.no_run = true; seen_rust_tags = true; }, "ignore" => { data.ignore = true; seen_rust_tags = true; }, - "notrust" => { data.notrust = true; seen_rust_tags = true; }, - "rust" => { data.notrust = false; seen_rust_tags = true; }, + "rust" => { data.rust = true; seen_rust_tags = true; }, "test_harness" => { data.test_harness = true; seen_rust_tags = true; } _ => { seen_other_tags = true } } } - data.notrust |= seen_other_tags && !seen_rust_tags; + data.rust |= !seen_other_tags || seen_rust_tags; data } @@ -452,28 +455,27 @@ mod tests { #[test] fn test_lang_string_parse() { fn t(s: &str, - should_fail: bool, no_run: bool, ignore: bool, notrust: bool, test_harness: bool) { + should_fail: bool, no_run: bool, ignore: bool, rust: bool, test_harness: bool) { assert_eq!(LangString::parse(s), LangString { should_fail: should_fail, no_run: no_run, ignore: ignore, - notrust: notrust, + rust: rust, test_harness: test_harness, }) } - t("", false,false,false,false,false); - t("rust", false,false,false,false,false); - t("sh", false,false,false,true,false); - t("notrust", false,false,false,true,false); - t("ignore", false,false,true,false,false); - t("should_fail", true,false,false,false,false); - t("no_run", false,true,false,false,false); - t("test_harness", false,false,false,false,true); - t("{.no_run .example}", false,true,false,false,false); - t("{.sh .should_fail}", true,false,false,false,false); - t("{.example .rust}", false,false,false,false,false); - t("{.test_harness .rust}", false,false,false,false,true); + t("", false,false,false,true,false); + t("rust", false,false,false,true,false); + t("sh", false,false,false,false,false); + t("ignore", false,false,true,true,false); + t("should_fail", true,false,false,true,false); + t("no_run", false,true,false,true,false); + t("test_harness", false,false,false,true,true); + t("{.no_run .example}", false,true,false,true,false); + t("{.sh .should_fail}", true,false,false,true,false); + t("{.example .rust}", false,false,false,true,false); + t("{.test_harness .rust}", false,false,false,true,true); } #[test] diff --git a/src/librustdoc/html/render.rs b/src/librustdoc/html/render.rs index 8831b5e7d96fe..dc31cfae99cb4 100644 --- a/src/librustdoc/html/render.rs +++ b/src/librustdoc/html/render.rs @@ -35,15 +35,15 @@ pub use self::ExternalLocation::*; use std::cell::RefCell; -use std::collections::hash_map::{Occupied, Vacant}; +use std::collections::hash_map::Entry::{Occupied, Vacant}; use std::collections::{HashMap, HashSet}; use std::default::Default; use std::fmt; use std::io::fs::PathExtensions; use std::io::{fs, File, BufferedWriter, BufferedReader}; use std::io; +use std::iter::repeat; use std::str; -use std::string::String; use std::sync::Arc; use externalfiles::ExternalHtml; @@ -1186,7 +1186,8 @@ impl Context { &Sidebar{ cx: cx, item: it }, &Item{ cx: cx, item: it })); } else { - let mut url = "../".repeat(cx.current.len()); + let mut url = repeat("../").take(cx.current.len()) + .collect::(); match cache().paths.get(&it.def_id) { Some(&(ref names, _)) => { for name in names[..names.len() - 1].iter() { @@ -1382,7 +1383,8 @@ impl<'a> fmt::Show for Item<'a> { let amt = if self.ismodule() { cur.len() - 1 } else { cur.len() }; for (i, component) in cur.iter().enumerate().take(amt) { try!(write!(fmt, "{}::", - "../".repeat(cur.len() - i - 1), + repeat("../").take(cur.len() - i - 1) + .collect::(), component.as_slice())); } } diff --git a/src/librustdoc/lib.rs b/src/librustdoc/lib.rs index 9b99956937c24..8dfb352d0288f 100644 --- a/src/librustdoc/lib.rs +++ b/src/librustdoc/lib.rs @@ -32,9 +32,11 @@ extern crate syntax; extern crate "test" as testing; #[phase(plugin, link)] extern crate log; +extern crate "serialize" as rustc_serialize; // used by deriving + use std::cell::RefCell; use std::collections::HashMap; -use std::collections::hash_map::{Occupied, Vacant}; +use std::collections::hash_map::Entry::{Occupied, Vacant}; use std::io::File; use std::io; use std::rc::Rc; @@ -173,13 +175,8 @@ pub fn main_args(args: &[String]) -> int { usage(args[0].as_slice()); return 0; } else if matches.opt_present("version") { - match rustc_driver::version("rustdoc", &matches) { - Some(err) => { - println!("{}", err); - return 1 - }, - None => return 0 - } + rustc_driver::version("rustdoc", &matches); + return 0; } if matches.opt_strs("passes") == ["list"] { diff --git a/src/librustdoc/passes.rs b/src/librustdoc/passes.rs index e368d7f93320c..9a67b479106ee 100644 --- a/src/librustdoc/passes.rs +++ b/src/librustdoc/passes.rs @@ -319,7 +319,7 @@ pub fn unindent(s: &str) -> String { let ignore_previous_indents = saw_first_line && !saw_second_line && - !line.is_whitespace(); + !line.chars().all(|c| c.is_whitespace()); let min_indent = if ignore_previous_indents { uint::MAX @@ -331,7 +331,7 @@ pub fn unindent(s: &str) -> String { saw_second_line = true; } - if line.is_whitespace() { + if line.chars().all(|c| c.is_whitespace()) { min_indent } else { saw_first_line = true; @@ -353,7 +353,7 @@ pub fn unindent(s: &str) -> String { if lines.len() >= 1 { let mut unindented = vec![ lines[0].trim().to_string() ]; unindented.push_all(lines.tail().iter().map(|&line| { - if line.is_whitespace() { + if line.chars().all(|c| c.is_whitespace()) { line.to_string() } else { assert!(line.len() >= min_indent); diff --git a/src/librustdoc/stability_summary.rs b/src/librustdoc/stability_summary.rs index 2f3079f75b923..2e3adf8e76787 100644 --- a/src/librustdoc/stability_summary.rs +++ b/src/librustdoc/stability_summary.rs @@ -25,7 +25,7 @@ use clean::{TypeTraitItem, ViewItemItem, PrimitiveItem, Stability}; use html::render::cache; -#[deriving(Zero, Encodable, Decodable, PartialEq, Eq)] +#[deriving(Zero, RustcEncodable, RustcDecodable, PartialEq, Eq)] /// The counts for each stability level. #[deriving(Copy)] pub struct Counts { @@ -73,7 +73,7 @@ impl Counts { } } -#[deriving(Encodable, Decodable, PartialEq, Eq)] +#[deriving(RustcEncodable, RustcDecodable, PartialEq, Eq)] /// A summarized module, which includes total counts and summarized children /// modules. pub struct ModuleSummary { diff --git a/src/libserialize/json.rs b/src/libserialize/json.rs index d3d1aa1d78879..7df5590fb40e2 100644 --- a/src/libserialize/json.rs +++ b/src/libserialize/json.rs @@ -57,17 +57,17 @@ //! //! Rust provides a mechanism for low boilerplate encoding & decoding of values to and from JSON via //! the serialization API. -//! To be able to encode a piece of data, it must implement the `serialize::Encodable` trait. -//! To be able to decode a piece of data, it must implement the `serialize::Decodable` trait. +//! To be able to encode a piece of data, it must implement the `serialize::RustcEncodable` trait. +//! To be able to decode a piece of data, it must implement the `serialize::RustcDecodable` trait. //! The Rust compiler provides an annotation to automatically generate the code for these traits: -//! `#[deriving(Decodable, Encodable)]` +//! `#[deriving(RustcDecodable, RustcEncodable)]` //! //! The JSON API provides an enum `json::Json` and a trait `ToJson` to encode objects. //! The `ToJson` trait provides a `to_json` method to convert an object into a `json::Json` value. //! A `json::Json` value can be encoded as a string or buffer using the functions described above. //! You can also use the `json::Encoder` object, which implements the `Encoder` trait. //! -//! When using `ToJson` the `Encodable` trait implementation is not mandatory. +//! When using `ToJson` the `RustcEncodable` trait implementation is not mandatory. //! //! # Examples of use //! @@ -127,7 +127,7 @@ //! } //! } //! -//! // Only generate `Encodable` trait implementation +//! // Only generate `RustcEncodable` trait implementation //! #[deriving(Encodable)] //! pub struct ComplexNumRecord { //! uid: u8, @@ -202,10 +202,11 @@ use std::collections::{HashMap, BTreeMap}; use std::{char, f64, fmt, io, num, str}; use std::mem::{swap, transmute}; use std::num::{Float, FPNaN, FPInfinite, Int}; -use std::str::{FromStr, ScalarValue}; +use std::str::{FromStr}; use std::string; -use std::vec::Vec; use std::ops; +use unicode::str as unicode_str; +use unicode::str::Utf16Item; use Encodable; @@ -333,11 +334,39 @@ pub fn escape_bytes(wr: &mut io::Writer, bytes: &[u8]) -> Result<(), io::IoError let escaped = match *byte { b'"' => "\\\"", b'\\' => "\\\\", + b'\x00' => "\\u0000", + b'\x01' => "\\u0001", + b'\x02' => "\\u0002", + b'\x03' => "\\u0003", + b'\x04' => "\\u0004", + b'\x05' => "\\u0005", + b'\x06' => "\\u0006", + b'\x07' => "\\u0007", b'\x08' => "\\b", - b'\x0c' => "\\f", + b'\t' => "\\t", b'\n' => "\\n", + b'\x0b' => "\\u000b", + b'\x0c' => "\\f", b'\r' => "\\r", - b'\t' => "\\t", + b'\x0e' => "\\u000e", + b'\x0f' => "\\u000f", + b'\x10' => "\\u0010", + b'\x11' => "\\u0011", + b'\x12' => "\\u0012", + b'\x13' => "\\u0013", + b'\x14' => "\\u0014", + b'\x15' => "\\u0015", + b'\x16' => "\\u0016", + b'\x17' => "\\u0017", + b'\x18' => "\\u0018", + b'\x19' => "\\u0019", + b'\x1a' => "\\u001a", + b'\x1b' => "\\u001b", + b'\x1c' => "\\u001c", + b'\x1d' => "\\u001d", + b'\x1e' => "\\u001e", + b'\x1f' => "\\u001f", + b'\x7f' => "\\u007f", _ => { continue; } }; @@ -404,7 +433,7 @@ impl<'a> Encoder<'a> { } /// Encode the specified struct into a json [u8] - pub fn buffer_encode, io::IoError>>(object: &T) -> Vec { + pub fn buffer_encode, io::IoError>>(object: &T) -> Vec { //Serialize the object in a string using a writer let mut m = Vec::new(); // FIXME(14302) remove the transmute and unsafe block. @@ -1001,7 +1030,7 @@ impl Json { /// Returns None otherwise. pub fn as_string<'a>(&'a self) -> Option<&'a str> { match *self { - Json::String(ref s) => Some(s.as_slice()), + Json::String(ref s) => Some(s[]), _ => None } } @@ -1585,8 +1614,8 @@ impl> Parser { } let buf = [n1, try!(self.decode_hex_escape())]; - match str::utf16_items(buf.as_slice()).next() { - Some(ScalarValue(c)) => res.push(c), + match unicode_str::utf16_items(&buf).next() { + Some(Utf16Item::ScalarValue(c)) => res.push(c), _ => return self.error(LoneLeadingSurrogateInHexEscape), } } @@ -1934,7 +1963,7 @@ pub fn from_reader(rdr: &mut io::Reader) -> Result { Ok(c) => c, Err(e) => return Err(io_error_to_error(e)) }; - let s = match str::from_utf8(contents.as_slice()) { + let s = match str::from_utf8(contents.as_slice()).ok() { Some(s) => s, _ => return Err(SyntaxError(NotUtf8, 0, 0)) }; @@ -1970,7 +1999,7 @@ macro_rules! expect { ($e:expr, Null) => ({ match $e { Json::Null => Ok(()), - other => Err(ExpectedError("Null".into_string(), + other => Err(ExpectedError("Null".to_string(), format!("{}", other))) } }); @@ -1991,20 +2020,20 @@ macro_rules! read_primitive { match self.pop() { Json::I64(f) => match num::cast(f) { Some(f) => Ok(f), - None => Err(ExpectedError("Number".into_string(), format!("{}", f))), + None => Err(ExpectedError("Number".to_string(), format!("{}", f))), }, Json::U64(f) => match num::cast(f) { Some(f) => Ok(f), - None => Err(ExpectedError("Number".into_string(), format!("{}", f))), + None => Err(ExpectedError("Number".to_string(), format!("{}", f))), }, - Json::F64(f) => Err(ExpectedError("Integer".into_string(), format!("{}", f))), + Json::F64(f) => Err(ExpectedError("Integer".to_string(), format!("{}", f))), // re: #12967.. a type w/ numeric keys (ie HashMap etc) // is going to have a string here, as per JSON spec. Json::String(s) => match std::str::from_str(s.as_slice()) { Some(f) => Ok(f), - None => Err(ExpectedError("Number".into_string(), s)), + None => Err(ExpectedError("Number".to_string(), s)), }, - value => Err(ExpectedError("Number".into_string(), format!("{}", value))), + value => Err(ExpectedError("Number".to_string(), format!("{}", value))), } } } @@ -2012,7 +2041,6 @@ macro_rules! read_primitive { impl ::Decoder for Decoder { fn read_nil(&mut self) -> DecodeResult<()> { - debug!("read_nil"); expect!(self.pop(), Null) } @@ -2030,7 +2058,6 @@ impl ::Decoder for Decoder { fn read_f32(&mut self) -> DecodeResult { self.read_f64().map(|x| x as f32) } fn read_f64(&mut self) -> DecodeResult { - debug!("read_f64"); match self.pop() { Json::I64(f) => Ok(f as f64), Json::U64(f) => Ok(f as f64), @@ -2038,18 +2065,17 @@ impl ::Decoder for Decoder { Json::String(s) => { // re: #12967.. a type w/ numeric keys (ie HashMap etc) // is going to have a string here, as per JSON spec. - match std::str::from_str(s.as_slice()) { + match s.parse() { Some(f) => Ok(f), - None => Err(ExpectedError("Number".into_string(), s)), + None => Err(ExpectedError("Number".to_string(), s)), } }, Json::Null => Ok(f64::NAN), - value => Err(ExpectedError("Number".into_string(), format!("{}", value))) + value => Err(ExpectedError("Number".to_string(), format!("{}", value))) } } fn read_bool(&mut self) -> DecodeResult { - debug!("read_bool"); expect!(self.pop(), Boolean) } @@ -2063,18 +2089,16 @@ impl ::Decoder for Decoder { _ => () } } - Err(ExpectedError("single character string".into_string(), format!("{}", s))) + Err(ExpectedError("single character string".to_string(), format!("{}", s))) } fn read_str(&mut self) -> DecodeResult { - debug!("read_str"); expect!(self.pop(), String) } - fn read_enum(&mut self, name: &str, f: F) -> DecodeResult where + fn read_enum(&mut self, _name: &str, f: F) -> DecodeResult where F: FnOnce(&mut Decoder) -> DecodeResult, { - debug!("read_enum({})", name); f(self) } @@ -2082,76 +2106,70 @@ impl ::Decoder for Decoder { mut f: F) -> DecodeResult where F: FnMut(&mut Decoder, uint) -> DecodeResult, { - debug!("read_enum_variant(names={})", names); let name = match self.pop() { Json::String(s) => s, Json::Object(mut o) => { - let n = match o.remove(&"variant".into_string()) { + let n = match o.remove(&"variant".to_string()) { Some(Json::String(s)) => s, Some(val) => { - return Err(ExpectedError("String".into_string(), format!("{}", val))) + return Err(ExpectedError("String".to_string(), format!("{}", val))) } None => { - return Err(MissingFieldError("variant".into_string())) + return Err(MissingFieldError("variant".to_string())) } }; - match o.remove(&"fields".into_string()) { + match o.remove(&"fields".to_string()) { Some(Json::Array(l)) => { for field in l.into_iter().rev() { self.stack.push(field); } }, Some(val) => { - return Err(ExpectedError("Array".into_string(), format!("{}", val))) + return Err(ExpectedError("Array".to_string(), format!("{}", val))) } None => { - return Err(MissingFieldError("fields".into_string())) + return Err(MissingFieldError("fields".to_string())) } } n } json => { - return Err(ExpectedError("String or Object".into_string(), format!("{}", json))) + return Err(ExpectedError("String or Object".to_string(), format!("{}", json))) } }; - let idx = match names.iter() - .position(|n| str::eq_slice(*n, name.as_slice())) { + let idx = match names.iter().position(|n| *n == name[]) { Some(idx) => idx, None => return Err(UnknownVariantError(name)) }; f(self, idx) } - fn read_enum_variant_arg(&mut self, idx: uint, f: F) -> DecodeResult where + fn read_enum_variant_arg(&mut self, _idx: uint, f: F) -> DecodeResult where F: FnOnce(&mut Decoder) -> DecodeResult, { - debug!("read_enum_variant_arg(idx={})", idx); f(self) } fn read_enum_struct_variant(&mut self, names: &[&str], f: F) -> DecodeResult where F: FnMut(&mut Decoder, uint) -> DecodeResult, { - debug!("read_enum_struct_variant(names={})", names); self.read_enum_variant(names, f) } fn read_enum_struct_variant_field(&mut self, - name: &str, + _name: &str, idx: uint, f: F) -> DecodeResult where F: FnOnce(&mut Decoder) -> DecodeResult, { - debug!("read_enum_struct_variant_field(name={}, idx={})", name, idx); self.read_enum_variant_arg(idx, f) } - fn read_struct(&mut self, name: &str, len: uint, f: F) -> DecodeResult where + fn read_struct(&mut self, _name: &str, _len: uint, f: F) -> DecodeResult where F: FnOnce(&mut Decoder) -> DecodeResult, { - debug!("read_struct(name={}, len={})", name, len); let value = try!(f(self)); self.pop(); Ok(value) @@ -2159,12 +2177,11 @@ impl ::Decoder for Decoder { fn read_struct_field(&mut self, name: &str, - idx: uint, + _idx: uint, f: F) -> DecodeResult where F: FnOnce(&mut Decoder) -> DecodeResult, { - debug!("read_struct_field(name={}, idx={})", name, idx); let mut obj = try!(expect!(self.pop(), Object)); let value = match obj.remove(&name.to_string()) { @@ -2189,7 +2206,6 @@ impl ::Decoder for Decoder { fn read_tuple(&mut self, tuple_len: uint, f: F) -> DecodeResult where F: FnOnce(&mut Decoder) -> DecodeResult, { - debug!("read_tuple()"); self.read_seq(move |d, len| { if len == tuple_len { f(d) @@ -2202,18 +2218,16 @@ impl ::Decoder for Decoder { fn read_tuple_arg(&mut self, idx: uint, f: F) -> DecodeResult where F: FnOnce(&mut Decoder) -> DecodeResult, { - debug!("read_tuple_arg(idx={})", idx); self.read_seq_elt(idx, f) } fn read_tuple_struct(&mut self, - name: &str, + _name: &str, len: uint, f: F) -> DecodeResult where F: FnOnce(&mut Decoder) -> DecodeResult, { - debug!("read_tuple_struct(name={})", name); self.read_tuple(len, f) } @@ -2223,14 +2237,12 @@ impl ::Decoder for Decoder { -> DecodeResult where F: FnOnce(&mut Decoder) -> DecodeResult, { - debug!("read_tuple_struct_arg(idx={})", idx); self.read_tuple_arg(idx, f) } fn read_option(&mut self, mut f: F) -> DecodeResult where F: FnMut(&mut Decoder, bool) -> DecodeResult, { - debug!("read_option()"); match self.pop() { Json::Null => f(self, false), value => { self.stack.push(value); f(self, true) } @@ -2240,7 +2252,6 @@ impl ::Decoder for Decoder { fn read_seq(&mut self, f: F) -> DecodeResult where F: FnOnce(&mut Decoder, uint) -> DecodeResult, { - debug!("read_seq()"); let array = try!(expect!(self.pop(), Array)); let len = array.len(); for v in array.into_iter().rev() { @@ -2249,17 +2260,15 @@ impl ::Decoder for Decoder { f(self, len) } - fn read_seq_elt(&mut self, idx: uint, f: F) -> DecodeResult where + fn read_seq_elt(&mut self, _idx: uint, f: F) -> DecodeResult where F: FnOnce(&mut Decoder) -> DecodeResult, { - debug!("read_seq_elt(idx={})", idx); f(self) } fn read_map(&mut self, f: F) -> DecodeResult where F: FnOnce(&mut Decoder, uint) -> DecodeResult, { - debug!("read_map()"); let obj = try!(expect!(self.pop(), Object)); let len = obj.len(); for (key, value) in obj.into_iter() { @@ -2269,17 +2278,15 @@ impl ::Decoder for Decoder { f(self, len) } - fn read_map_elt_key(&mut self, idx: uint, f: F) -> DecodeResult where + fn read_map_elt_key(&mut self, _idx: uint, f: F) -> DecodeResult where F: FnOnce(&mut Decoder) -> DecodeResult, { - debug!("read_map_elt_key(idx={})", idx); f(self) } - fn read_map_elt_val(&mut self, idx: uint, f: F) -> DecodeResult where + fn read_map_elt_val(&mut self, _idx: uint, f: F) -> DecodeResult where F: FnOnce(&mut Decoder) -> DecodeResult, { - debug!("read_map_elt_val(idx={})", idx); f(self) } @@ -2340,7 +2347,7 @@ impl ToJson for bool { } impl ToJson for str { - fn to_json(&self) -> Json { Json::String(self.into_string()) } + fn to_json(&self) -> Json { Json::String(self.to_string()) } } impl ToJson for string::String { @@ -2441,9 +2448,7 @@ mod tests { use super::ParserError::*; use super::DecoderError::*; use super::JsonEvent::*; - use super::ParserState::*; use super::StackElement::*; - use super::InternalStackElement::*; use super::{PrettyEncoder, Json, from_str, DecodeResult, DecoderError, JsonEvent, Parser, StackElement, Stack, Encoder, Decoder}; use std::{i64, u64, f32, f64, io}; @@ -2451,7 +2456,7 @@ mod tests { use std::num::Float; use std::string; - #[deriving(Decodable, Eq, PartialEq, Show)] + #[deriving(RustcDecodable, Eq, PartialEq, Show)] struct OptionData { opt: Option, } @@ -2473,25 +2478,25 @@ mod tests { #[test] fn test_decode_option_malformed() { check_err::("{ \"opt\": [] }", - ExpectedError("Number".into_string(), "[]".into_string())); + ExpectedError("Number".to_string(), "[]".to_string())); check_err::("{ \"opt\": false }", - ExpectedError("Number".into_string(), "false".into_string())); + ExpectedError("Number".to_string(), "false".to_string())); } - #[deriving(PartialEq, Encodable, Decodable, Show)] + #[deriving(PartialEq, RustcEncodable, RustcDecodable, Show)] enum Animal { Dog, Frog(string::String, int) } - #[deriving(PartialEq, Encodable, Decodable, Show)] + #[deriving(PartialEq, RustcEncodable, RustcDecodable, Show)] struct Inner { a: (), b: uint, c: Vec, } - #[deriving(PartialEq, Encodable, Decodable, Show)] + #[deriving(PartialEq, RustcEncodable, RustcDecodable, Show)] struct Outer { inner: Vec, } @@ -2561,11 +2566,11 @@ mod tests { #[test] fn test_write_str() { - assert_eq!(String("".into_string()).to_string(), "\"\""); - assert_eq!(String("".into_string()).to_pretty_str(), "\"\""); + assert_eq!(String("".to_string()).to_string(), "\"\""); + assert_eq!(String("".to_string()).to_pretty_str(), "\"\""); - assert_eq!(String("homura".into_string()).to_string(), "\"homura\""); - assert_eq!(String("madoka".into_string()).to_pretty_str(), "\"madoka\""); + assert_eq!(String("homura".to_string()).to_string(), "\"homura\""); + assert_eq!(String("madoka".to_string()).to_pretty_str(), "\"madoka\""); } #[test] @@ -2594,7 +2599,7 @@ mod tests { let long_test_array = Array(vec![ Boolean(false), Null, - Array(vec![String("foo\nbar".into_string()), F64(3.5)])]); + Array(vec![String("foo\nbar".to_string()), F64(3.5)])]); assert_eq!(long_test_array.to_string(), "[false,null,[\"foo\\nbar\",3.5]]"); @@ -2619,12 +2624,12 @@ mod tests { assert_eq!( mk_object(&[ - ("a".into_string(), Boolean(true)) + ("a".to_string(), Boolean(true)) ]).to_string(), "{\"a\":true}" ); assert_eq!( - mk_object(&[("a".into_string(), Boolean(true))]).to_pretty_str(), + mk_object(&[("a".to_string(), Boolean(true))]).to_pretty_str(), "\ {\n \ \"a\": true\n\ @@ -2632,9 +2637,9 @@ mod tests { ); let complex_obj = mk_object(&[ - ("b".into_string(), Array(vec![ - mk_object(&[("c".into_string(), String("\x0c\r".into_string()))]), - mk_object(&[("d".into_string(), String("".into_string()))]) + ("b".to_string(), Array(vec![ + mk_object(&[("c".to_string(), String("\x0c\r".to_string()))]), + mk_object(&[("d".to_string(), String("".to_string()))]) ])) ]); @@ -2663,10 +2668,10 @@ mod tests { ); let a = mk_object(&[ - ("a".into_string(), Boolean(true)), - ("b".into_string(), Array(vec![ - mk_object(&[("c".into_string(), String("\x0c\r".into_string()))]), - mk_object(&[("d".into_string(), String("".into_string()))]) + ("a".to_string(), Boolean(true)), + ("b".to_string(), Array(vec![ + mk_object(&[("c".to_string(), String("\x0c\r".to_string()))]), + mk_object(&[("d".to_string(), String("".to_string()))]) ])) ]); @@ -2678,8 +2683,6 @@ mod tests { } fn with_str_writer(f: F) -> string::String where F: FnOnce(&mut io::Writer){ - use std::str; - let mut m = Vec::new(); f(&mut m as &mut io::Writer); string::String::from_utf8(m).unwrap() @@ -2703,7 +2706,7 @@ mod tests { "\"Dog\"" ); - let animal = Frog("Henry".into_string(), 349); + let animal = Frog("Henry".to_string(), 349); assert_eq!( with_str_writer(|writer| { let mut encoder = Encoder::new(writer); @@ -2756,9 +2759,12 @@ mod tests { fn test_write_char() { check_encoder_for_simple!('a', "\"a\""); check_encoder_for_simple!('\t', "\"\\t\""); - check_encoder_for_simple!('\u00a0', "\"\u00a0\""); - check_encoder_for_simple!('\uabcd', "\"\uabcd\""); - check_encoder_for_simple!('\U0010ffff', "\"\U0010ffff\""); + check_encoder_for_simple!('\u{0000}', "\"\\u0000\""); + check_encoder_for_simple!('\u{001b}', "\"\\u001b\""); + check_encoder_for_simple!('\u{007f}', "\"\\u007f\""); + check_encoder_for_simple!('\u{00a0}', "\"\u{00a0}\""); + check_encoder_for_simple!('\u{abcd}', "\"\u{abcd}\""); + check_encoder_for_simple!('\u{10ffff}', "\"\u{10ffff}\""); } #[test] @@ -2864,7 +2870,7 @@ mod tests { assert_eq!(v, i64::MAX); let res: DecodeResult = super::decode("765.25252"); - assert_eq!(res, Err(ExpectedError("Integer".into_string(), "765.25252".into_string()))); + assert_eq!(res, Err(ExpectedError("Integer".to_string(), "765.25252".to_string()))); } #[test] @@ -2872,16 +2878,16 @@ mod tests { assert_eq!(from_str("\""), Err(SyntaxError(EOFWhileParsingString, 1, 2))); assert_eq!(from_str("\"lol"), Err(SyntaxError(EOFWhileParsingString, 1, 5))); - assert_eq!(from_str("\"\""), Ok(String("".into_string()))); - assert_eq!(from_str("\"foo\""), Ok(String("foo".into_string()))); - assert_eq!(from_str("\"\\\"\""), Ok(String("\"".into_string()))); - assert_eq!(from_str("\"\\b\""), Ok(String("\x08".into_string()))); - assert_eq!(from_str("\"\\n\""), Ok(String("\n".into_string()))); - assert_eq!(from_str("\"\\r\""), Ok(String("\r".into_string()))); - assert_eq!(from_str("\"\\t\""), Ok(String("\t".into_string()))); - assert_eq!(from_str(" \"foo\" "), Ok(String("foo".into_string()))); - assert_eq!(from_str("\"\\u12ab\""), Ok(String("\u{12ab}".into_string()))); - assert_eq!(from_str("\"\\uAB12\""), Ok(String("\u{AB12}".into_string()))); + assert_eq!(from_str("\"\""), Ok(String("".to_string()))); + assert_eq!(from_str("\"foo\""), Ok(String("foo".to_string()))); + assert_eq!(from_str("\"\\\"\""), Ok(String("\"".to_string()))); + assert_eq!(from_str("\"\\b\""), Ok(String("\x08".to_string()))); + assert_eq!(from_str("\"\\n\""), Ok(String("\n".to_string()))); + assert_eq!(from_str("\"\\r\""), Ok(String("\r".to_string()))); + assert_eq!(from_str("\"\\t\""), Ok(String("\t".to_string()))); + assert_eq!(from_str(" \"foo\" "), Ok(String("foo".to_string()))); + assert_eq!(from_str("\"\\u12ab\""), Ok(String("\u{12ab}".to_string()))); + assert_eq!(from_str("\"\\uAB12\""), Ok(String("\u{AB12}".to_string()))); } #[test] @@ -2947,7 +2953,7 @@ mod tests { assert_eq!(t, (1u, 2, 3)); let t: (uint, string::String) = super::decode("[1, \"two\"]").unwrap(); - assert_eq!(t, (1u, "two".into_string())); + assert_eq!(t, (1u, "two".to_string())); } #[test] @@ -2977,22 +2983,22 @@ mod tests { assert_eq!(from_str("{}").unwrap(), mk_object(&[])); assert_eq!(from_str("{\"a\": 3}").unwrap(), - mk_object(&[("a".into_string(), U64(3))])); + mk_object(&[("a".to_string(), U64(3))])); assert_eq!(from_str( "{ \"a\": null, \"b\" : true }").unwrap(), mk_object(&[ - ("a".into_string(), Null), - ("b".into_string(), Boolean(true))])); + ("a".to_string(), Null), + ("b".to_string(), Boolean(true))])); assert_eq!(from_str("\n{ \"a\": null, \"b\" : true }\n").unwrap(), mk_object(&[ - ("a".into_string(), Null), - ("b".into_string(), Boolean(true))])); + ("a".to_string(), Null), + ("b".to_string(), Boolean(true))])); assert_eq!(from_str( "{\"a\" : 1.0 ,\"b\": [ true ]}").unwrap(), mk_object(&[ - ("a".into_string(), F64(1.0)), - ("b".into_string(), Array(vec![Boolean(true)])) + ("a".to_string(), F64(1.0)), + ("b".to_string(), Array(vec![Boolean(true)])) ])); assert_eq!(from_str( "{\ @@ -3004,12 +3010,12 @@ mod tests { ]\ }").unwrap(), mk_object(&[ - ("a".into_string(), F64(1.0)), - ("b".into_string(), Array(vec![ + ("a".to_string(), F64(1.0)), + ("b".to_string(), Array(vec![ Boolean(true), - String("foo\nbar".into_string()), + String("foo\nbar".to_string()), mk_object(&[ - ("c".into_string(), mk_object(&[("d".into_string(), Null)])) + ("c".to_string(), mk_object(&[("d".to_string(), Null)])) ]) ])) ])); @@ -3028,13 +3034,13 @@ mod tests { v, Outer { inner: vec![ - Inner { a: (), b: 2, c: vec!["abc".into_string(), "xyz".into_string()] } + Inner { a: (), b: 2, c: vec!["abc".to_string(), "xyz".to_string()] } ] } ); } - #[deriving(Decodable)] + #[deriving(RustcDecodable)] struct FloatStruct { f: f64, a: Vec @@ -3054,7 +3060,7 @@ mod tests { assert_eq!(value, None); let value: Option = super::decode("\"jodhpurs\"").unwrap(); - assert_eq!(value, Some("jodhpurs".into_string())); + assert_eq!(value, Some("jodhpurs".to_string())); } #[test] @@ -3064,7 +3070,7 @@ mod tests { let s = "{\"variant\":\"Frog\",\"fields\":[\"Henry\",349]}"; let value: Animal = super::decode(s).unwrap(); - assert_eq!(value, Frog("Henry".into_string(), 349)); + assert_eq!(value, Frog("Henry".to_string(), 349)); } #[test] @@ -3073,8 +3079,8 @@ mod tests { \"fields\":[\"Henry\", 349]}}"; let mut map: BTreeMap = super::decode(s).unwrap(); - assert_eq!(map.remove(&"a".into_string()), Some(Dog)); - assert_eq!(map.remove(&"b".into_string()), Some(Frog("Henry".into_string(), 349))); + assert_eq!(map.remove(&"a".to_string()), Some(Dog)); + assert_eq!(map.remove(&"b".to_string()), Some(Frog("Henry".to_string(), 349))); } #[test] @@ -3083,7 +3089,7 @@ mod tests { Err(SyntaxError(EOFWhileParsingObject, 3u, 8u))); } - #[deriving(Decodable)] + #[deriving(RustcDecodable)] #[allow(dead_code)] struct DecodeStruct { x: f64, @@ -3091,7 +3097,7 @@ mod tests { z: string::String, w: Vec } - #[deriving(Decodable)] + #[deriving(RustcDecodable)] enum DecodeEnum { A(f64), B(string::String) @@ -3114,30 +3120,30 @@ mod tests { } #[test] fn test_decode_errors_struct() { - check_err::("[]", ExpectedError("Object".into_string(), "[]".into_string())); + check_err::("[]", ExpectedError("Object".to_string(), "[]".to_string())); check_err::("{\"x\": true, \"y\": true, \"z\": \"\", \"w\": []}", - ExpectedError("Number".into_string(), "true".into_string())); + ExpectedError("Number".to_string(), "true".to_string())); check_err::("{\"x\": 1, \"y\": [], \"z\": \"\", \"w\": []}", - ExpectedError("Boolean".into_string(), "[]".into_string())); + ExpectedError("Boolean".to_string(), "[]".to_string())); check_err::("{\"x\": 1, \"y\": true, \"z\": {}, \"w\": []}", - ExpectedError("String".into_string(), "{}".into_string())); + ExpectedError("String".to_string(), "{}".to_string())); check_err::("{\"x\": 1, \"y\": true, \"z\": \"\", \"w\": null}", - ExpectedError("Array".into_string(), "null".into_string())); + ExpectedError("Array".to_string(), "null".to_string())); check_err::("{\"x\": 1, \"y\": true, \"z\": \"\"}", - MissingFieldError("w".into_string())); + MissingFieldError("w".to_string())); } #[test] fn test_decode_errors_enum() { check_err::("{}", - MissingFieldError("variant".into_string())); + MissingFieldError("variant".to_string())); check_err::("{\"variant\": 1}", - ExpectedError("String".into_string(), "1".into_string())); + ExpectedError("String".to_string(), "1".to_string())); check_err::("{\"variant\": \"A\"}", - MissingFieldError("fields".into_string())); + MissingFieldError("fields".to_string())); check_err::("{\"variant\": \"A\", \"fields\": null}", - ExpectedError("Array".into_string(), "null".into_string())); + ExpectedError("Array".to_string(), "null".to_string())); check_err::("{\"variant\": \"C\", \"fields\": []}", - UnknownVariantError("C".into_string())); + UnknownVariantError("C".to_string())); } #[test] @@ -3350,15 +3356,15 @@ mod tests { let mut tree = BTreeMap::new(); - tree.insert("hello".into_string(), String("guten tag".into_string())); - tree.insert("goodbye".into_string(), String("sayonara".into_string())); + tree.insert("hello".to_string(), String("guten tag".to_string())); + tree.insert("goodbye".to_string(), String("sayonara".to_string())); let json = Array( // The following layout below should look a lot like // the pretty-printed JSON (indent * x) vec! ( // 0x - String("greetings".into_string()), // 1x + String("greetings".to_string()), // 1x Object(tree), // 1x + 2x + 2x + 1x ) // 0x // End JSON array (7 lines) @@ -3422,7 +3428,7 @@ mod tests { }; let mut decoder = Decoder::new(json_obj); let result: Result, DecoderError> = Decodable::decode(&mut decoder); - assert_eq!(result, Err(ExpectedError("Number".into_string(), "a".into_string()))); + assert_eq!(result, Err(ExpectedError("Number".to_string(), "a".to_string()))); } fn assert_stream_equal(src: &str, @@ -3449,7 +3455,7 @@ mod tests { r#"{ "foo":"bar", "array" : [0, 1, 2, 3, 4, 5], "idents":[null,true,false]}"#, vec![ (ObjectStart, vec![]), - (StringValue("bar".into_string()), vec![Key("foo")]), + (StringValue("bar".to_string()), vec![Key("foo")]), (ArrayStart, vec![Key("array")]), (U64Value(0), vec![Key("array"), Index(0)]), (U64Value(1), vec![Key("array"), Index(1)]), @@ -3540,7 +3546,7 @@ mod tests { (F64Value(1.0), vec![Key("a")]), (ArrayStart, vec![Key("b")]), (BooleanValue(true), vec![Key("b"), Index(0)]), - (StringValue("foo\nbar".into_string()), vec![Key("b"), Index(1)]), + (StringValue("foo\nbar".to_string()), vec![Key("b"), Index(1)]), (ObjectStart, vec![Key("b"), Index(2)]), (ObjectStart, vec![Key("b"), Index(2), Key("c")]), (NullValue, vec![Key("b"), Index(2), Key("c"), Key("d")]), @@ -3673,7 +3679,7 @@ mod tests { assert!(stack.last_is_index()); assert!(stack.get(0) == Index(1)); - stack.push_key("foo".into_string()); + stack.push_key("foo".to_string()); assert!(stack.len() == 2); assert!(stack.is_equal_to(&[Index(1), Key("foo")])); @@ -3685,7 +3691,7 @@ mod tests { assert!(stack.get(0) == Index(1)); assert!(stack.get(1) == Key("foo")); - stack.push_key("bar".into_string()); + stack.push_key("bar".to_string()); assert!(stack.len() == 3); assert!(stack.is_equal_to(&[Index(1), Key("foo"), Key("bar")])); @@ -3746,8 +3752,8 @@ mod tests { assert_eq!(f64::NAN.to_json(), Null); assert_eq!(true.to_json(), Boolean(true)); assert_eq!(false.to_json(), Boolean(false)); - assert_eq!("abc".to_json(), String("abc".into_string())); - assert_eq!("abc".into_string().to_json(), String("abc".into_string())); + assert_eq!("abc".to_json(), String("abc".to_string())); + assert_eq!("abc".to_string().to_json(), String("abc".to_string())); assert_eq!((1u, 2u).to_json(), array2); assert_eq!((1u, 2u, 3u).to_json(), array3); assert_eq!([1u, 2].to_json(), array2); @@ -3759,8 +3765,8 @@ mod tests { tree_map.insert("b".into_string(), 2); assert_eq!(tree_map.to_json(), object); let mut hash_map = HashMap::new(); - hash_map.insert("a".into_string(), 1u); - hash_map.insert("b".into_string(), 2); + hash_map.insert("a".to_string(), 1u); + hash_map.insert("b".to_string(), 2); assert_eq!(hash_map.to_json(), object); assert_eq!(Some(15i).to_json(), I64(15)); assert_eq!(Some(15u).to_json(), U64(15)); @@ -3803,7 +3809,7 @@ mod tests { } fn big_json() -> string::String { - let mut src = "[\n".into_string(); + let mut src = "[\n".to_string(); for _ in range(0i, 500) { src.push_str(r#"{ "a": true, "b": null, "c":3.1415, "d": "Hello world", "e": \ [1,2,3]},"#); diff --git a/src/libserialize/lib.rs b/src/libserialize/lib.rs index e700d102fefda..4a2bbbeec03a4 100644 --- a/src/libserialize/lib.rs +++ b/src/libserialize/lib.rs @@ -15,7 +15,7 @@ Core encoding and decoding interfaces. */ #![crate_name = "serialize"] -#![experimental] +#![unstable = "deprecated in favor of rustc-serialize on crates.io"] #![crate_type = "rlib"] #![crate_type = "dylib"] #![doc(html_logo_url = "http://www.rust-lang.org/logos/rust-logo-128x128-blk-v2.png", @@ -32,6 +32,7 @@ extern crate test; #[phase(plugin, link)] extern crate log; +extern crate unicode; extern crate collections; @@ -44,3 +45,7 @@ mod collection_impls; pub mod base64; pub mod hex; pub mod json; + +mod rustc_serialize { + pub use serialize::*; +} diff --git a/src/libserialize/serialize.rs b/src/libserialize/serialize.rs index 00c5158309e98..558f9e603e159 100644 --- a/src/libserialize/serialize.rs +++ b/src/libserialize/serialize.rs @@ -308,13 +308,13 @@ impl> Encodable for str { impl> Encodable for String { fn encode(&self, s: &mut S) -> Result<(), E> { - s.emit_str(self.as_slice()) + s.emit_str(self[]) } } impl> Decodable for String { fn decode(d: &mut D) -> Result { - Ok(String::from_str(try!(d.read_str()).as_slice())) + d.read_str() } } diff --git a/src/libstd/ascii.rs b/src/libstd/ascii.rs index 2c4dc5313bbfa..2c49beca98de6 100644 --- a/src/libstd/ascii.rs +++ b/src/libstd/ascii.rs @@ -23,7 +23,7 @@ use ops::FnMut; use option::Option; use option::Option::{Some, None}; use slice::{SliceExt, AsSlice}; -use str::{Str, StrPrelude}; +use str::{Str, StrExt}; use string::{String, IntoString}; use vec::Vec; @@ -633,7 +633,6 @@ mod tests { use prelude::*; use super::*; use char::from_u32; - use str::StrPrelude; macro_rules! v2ascii { ( [$($e:expr),*]) => (&[$(Ascii{chr:$e}),*]); diff --git a/src/libstd/bitflags.rs b/src/libstd/bitflags.rs index f467b77dbf4cf..5dd76047779a0 100644 --- a/src/libstd/bitflags.rs +++ b/src/libstd/bitflags.rs @@ -24,9 +24,9 @@ /// ```{.rust} /// bitflags! { /// flags Flags: u32 { -/// const FLAG_A = 0x00000001, -/// const FLAG_B = 0x00000010, -/// const FLAG_C = 0x00000100, +/// const FLAG_A = 0b00000001, +/// const FLAG_B = 0b00000010, +/// const FLAG_C = 0b00000100, /// const FLAG_ABC = FLAG_A.bits /// | FLAG_B.bits /// | FLAG_C.bits, @@ -50,8 +50,8 @@ /// /// bitflags! { /// flags Flags: u32 { -/// const FLAG_A = 0x00000001, -/// const FLAG_B = 0x00000010, +/// const FLAG_A = 0b00000001, +/// const FLAG_B = 0b00000010, /// } /// } /// @@ -205,17 +205,6 @@ macro_rules! bitflags { } } - // NOTE(stage0): Remove impl after a snapshot - #[cfg(stage0)] - impl BitOr<$BitFlags, $BitFlags> for $BitFlags { - /// Returns the union of the two sets of flags. - #[inline] - fn bitor(&self, other: &$BitFlags) -> $BitFlags { - $BitFlags { bits: self.bits | other.bits } - } - } - - #[cfg(not(stage0))] // NOTE(stage0): Remove cfg after a snapshot impl BitOr<$BitFlags, $BitFlags> for $BitFlags { /// Returns the union of the two sets of flags. #[inline] @@ -224,17 +213,6 @@ macro_rules! bitflags { } } - // NOTE(stage0): Remove impl after a snapshot - #[cfg(stage0)] - impl BitXor<$BitFlags, $BitFlags> for $BitFlags { - /// Returns the left flags, but with all the right flags toggled. - #[inline] - fn bitxor(&self, other: &$BitFlags) -> $BitFlags { - $BitFlags { bits: self.bits ^ other.bits } - } - } - - #[cfg(not(stage0))] // NOTE(stage0): Remove cfg after a snapshot impl BitXor<$BitFlags, $BitFlags> for $BitFlags { /// Returns the left flags, but with all the right flags toggled. #[inline] @@ -243,17 +221,6 @@ macro_rules! bitflags { } } - // NOTE(stage0): Remove impl after a snapshot - #[cfg(stage0)] - impl BitAnd<$BitFlags, $BitFlags> for $BitFlags { - /// Returns the intersection between the two sets of flags. - #[inline] - fn bitand(&self, other: &$BitFlags) -> $BitFlags { - $BitFlags { bits: self.bits & other.bits } - } - } - - #[cfg(not(stage0))] // NOTE(stage0): Remove cfg after a snapshot impl BitAnd<$BitFlags, $BitFlags> for $BitFlags { /// Returns the intersection between the two sets of flags. #[inline] @@ -262,17 +229,6 @@ macro_rules! bitflags { } } - // NOTE(stage0): Remove impl after a snapshot - #[cfg(stage0)] - impl Sub<$BitFlags, $BitFlags> for $BitFlags { - /// Returns the set difference of the two sets of flags. - #[inline] - fn sub(&self, other: &$BitFlags) -> $BitFlags { - $BitFlags { bits: self.bits & !other.bits } - } - } - - #[cfg(not(stage0))] // NOTE(stage0): Remove cfg after a snapshot impl Sub<$BitFlags, $BitFlags> for $BitFlags { /// Returns the set difference of the two sets of flags. #[inline] @@ -315,7 +271,6 @@ macro_rules! bitflags { #[cfg(test)] #[allow(non_upper_case_globals)] mod tests { - use kinds::Copy; use hash; use option::Option::{Some, None}; use ops::{BitOr, BitAnd, BitXor, Sub, Not}; @@ -326,10 +281,10 @@ mod tests { #[doc = "> "] #[doc = "> - Richard Feynman"] flags Flags: u32 { - const FlagA = 0x00000001, + const FlagA = 0b00000001, #[doc = " macros are way better at generating code than trans is"] - const FlagB = 0x00000010, - const FlagC = 0x00000100, + const FlagB = 0b00000010, + const FlagC = 0b00000100, #[doc = "* cmr bed"] #[doc = "* strcat table"] #[doc = " wait what?"] @@ -347,21 +302,21 @@ mod tests { #[test] fn test_bits(){ - assert_eq!(Flags::empty().bits(), 0x00000000); - assert_eq!(FlagA.bits(), 0x00000001); - assert_eq!(FlagABC.bits(), 0x00000111); + assert_eq!(Flags::empty().bits(), 0b00000000); + assert_eq!(FlagA.bits(), 0b00000001); + assert_eq!(FlagABC.bits(), 0b00000111); - assert_eq!(AnotherSetOfFlags::empty().bits(), 0x00); + assert_eq!(AnotherSetOfFlags::empty().bits(), 0b00); assert_eq!(AnotherFlag.bits(), !0_i8); } #[test] fn test_from_bits() { assert!(Flags::from_bits(0) == Some(Flags::empty())); - assert!(Flags::from_bits(0x1) == Some(FlagA)); - assert!(Flags::from_bits(0x10) == Some(FlagB)); - assert!(Flags::from_bits(0x11) == Some(FlagA | FlagB)); - assert!(Flags::from_bits(0x1000) == None); + assert!(Flags::from_bits(0b1) == Some(FlagA)); + assert!(Flags::from_bits(0b10) == Some(FlagB)); + assert!(Flags::from_bits(0b11) == Some(FlagA | FlagB)); + assert!(Flags::from_bits(0b1000) == None); assert!(AnotherSetOfFlags::from_bits(!0_i8) == Some(AnotherFlag)); } @@ -369,11 +324,11 @@ mod tests { #[test] fn test_from_bits_truncate() { assert!(Flags::from_bits_truncate(0) == Flags::empty()); - assert!(Flags::from_bits_truncate(0x1) == FlagA); - assert!(Flags::from_bits_truncate(0x10) == FlagB); - assert!(Flags::from_bits_truncate(0x11) == (FlagA | FlagB)); - assert!(Flags::from_bits_truncate(0x1000) == Flags::empty()); - assert!(Flags::from_bits_truncate(0x1001) == FlagA); + assert!(Flags::from_bits_truncate(0b1) == FlagA); + assert!(Flags::from_bits_truncate(0b10) == FlagB); + assert!(Flags::from_bits_truncate(0b11) == (FlagA | FlagB)); + assert!(Flags::from_bits_truncate(0b1000) == Flags::empty()); + assert!(Flags::from_bits_truncate(0b1001) == FlagA); assert!(AnotherSetOfFlags::from_bits_truncate(0_i8) == AnotherSetOfFlags::empty()); } diff --git a/src/libcore/bool.rs b/src/libstd/bool.rs similarity index 80% rename from src/libcore/bool.rs rename to src/libstd/bool.rs index 9d2ea816fdfd4..bbaab5ee3db8f 100644 --- a/src/libcore/bool.rs +++ b/src/libstd/bool.rs @@ -11,6 +11,5 @@ //! The boolean type #![doc(primitive = "bool")] -#![unstable = "this module is purely for documentation and it will likely be \ - removed from the public api"] +#![stable] diff --git a/src/libstd/c_str.rs b/src/libstd/c_str.rs index 8fe3642e702d4..fb44961017fcd 100644 --- a/src/libstd/c_str.rs +++ b/src/libstd/c_str.rs @@ -67,18 +67,18 @@ //! } //! ``` -use string::String; -use hash; +use core::prelude::*; +use libc; + use fmt; +use hash; use kinds::marker; use mem; -use core::prelude::*; - use ptr; -use raw::Slice; -use slice; +use slice::{mod, ImmutableIntSlice}; use str; -use libc; +use string::String; + /// The representation of a C String. /// @@ -210,7 +210,7 @@ impl CString { #[inline] pub fn as_bytes<'a>(&'a self) -> &'a [u8] { unsafe { - mem::transmute(Slice { data: self.buf, len: self.len() + 1 }) + slice::from_raw_buf(&self.buf, self.len() + 1).as_unsigned() } } @@ -219,7 +219,7 @@ impl CString { #[inline] pub fn as_bytes_no_nul<'a>(&'a self) -> &'a [u8] { unsafe { - mem::transmute(Slice { data: self.buf, len: self.len() }) + slice::from_raw_buf(&self.buf, self.len()).as_unsigned() } } @@ -228,7 +228,7 @@ impl CString { #[inline] pub fn as_str<'a>(&'a self) -> Option<&'a str> { let buf = self.as_bytes_no_nul(); - str::from_utf8(buf) + str::from_utf8(buf).ok() } /// Return a CString iterator. diff --git a/src/libstd/collections/hash/map.rs b/src/libstd/collections/hash/map.rs index 6bfea7e3cb2a8..d749cd77cef2d 100644 --- a/src/libstd/collections/hash/map.rs +++ b/src/libstd/collections/hash/map.rs @@ -10,7 +10,7 @@ // // ignore-lexer-test FIXME #15883 -pub use self::Entry::*; +use self::Entry::*; use self::SearchResult::*; use self::VacantEntryState::*; @@ -30,18 +30,20 @@ use option::Option::{Some, None}; use result::Result; use result::Result::{Ok, Err}; -use super::table; use super::table::{ + mod, Bucket, - Empty, EmptyBucket, - Full, FullBucket, FullBucketImm, FullBucketMut, RawTable, SafeHash }; +use super::table::BucketState::{ + Empty, + Full, +}; const INITIAL_LOG2_CAP: uint = 5; pub const INITIAL_CAPACITY: uint = 1 << INITIAL_LOG2_CAP; // 2^5 @@ -379,7 +381,7 @@ fn robin_hood<'a, K: 'a, V: 'a>(mut bucket: FullBucketMut<'a, K, V>, assert!(probe.index() != idx_end); let full_bucket = match probe.peek() { - table::Empty(bucket) => { + Empty(bucket) => { // Found a hole! let b = bucket.put(old_hash, old_key, old_val); // Now that it's stolen, just read the value's pointer @@ -390,7 +392,7 @@ fn robin_hood<'a, K: 'a, V: 'a>(mut bucket: FullBucketMut<'a, K, V>, .into_mut_refs() .1; }, - table::Full(bucket) => bucket + Full(bucket) => bucket }; let probe_ib = full_bucket.index() - full_bucket.distance(); @@ -836,8 +838,9 @@ impl, V, S, H: Hasher> HashMap { /// } /// ``` #[unstable = "matches collection reform specification, waiting for dust to settle"] - pub fn keys(&self) -> Keys { + pub fn keys<'a>(&'a self) -> Keys<'a, K, V> { fn first((a, _): (A, B)) -> A { a } + let first: fn((&'a K,&'a V)) -> &'a K = first; // coerce to fn ptr Keys { inner: self.iter().map(first) } } @@ -860,8 +863,9 @@ impl, V, S, H: Hasher> HashMap { /// } /// ``` #[unstable = "matches collection reform specification, waiting for dust to settle"] - pub fn values(&self) -> Values { + pub fn values<'a>(&'a self) -> Values<'a, K, V> { fn second((_, b): (A, B)) -> B { b } + let second: fn((&'a K,&'a V)) -> &'a V = second; // coerce to fn ptr Values { inner: self.iter().map(second) } } @@ -912,8 +916,8 @@ impl, V, S, H: Hasher> HashMap { /// } /// ``` #[unstable = "matches collection reform specification, waiting for dust to settle"] - pub fn iter_mut(&mut self) -> MutEntries { - MutEntries { inner: self.table.iter_mut() } + pub fn iter_mut(&mut self) -> IterMut { + IterMut { inner: self.table.iter_mut() } } /// Creates a consuming iterator, that is, one that moves each key-value @@ -934,10 +938,11 @@ impl, V, S, H: Hasher> HashMap { /// let vec: Vec<(&str, int)> = map.into_iter().collect(); /// ``` #[unstable = "matches collection reform specification, waiting for dust to settle"] - pub fn into_iter(self) -> MoveEntries { + pub fn into_iter(self) -> IntoIter { fn last_two((_, b, c): (A, B, C)) -> (B, C) { (b, c) } + let last_two: fn((SafeHash, K, V)) -> (K, V) = last_two; - MoveEntries { + IntoIter { inner: self.table.into_iter().map(last_two) } } @@ -982,6 +987,36 @@ impl, V, S, H: Hasher> HashMap { #[unstable = "matches collection reform specification, waiting for dust to settle"] pub fn is_empty(&self) -> bool { self.len() == 0 } + /// Clears the map, returning all key-value pairs as an iterator. Keeps the + /// allocated memory for reuse. + /// + /// # Example + /// + /// ``` + /// use std::collections::HashMap; + /// + /// let mut a = HashMap::new(); + /// a.insert(1u, "a"); + /// a.insert(2u, "b"); + /// + /// for (k, v) in a.drain().take(1) { + /// assert!(k == 1 || k == 2); + /// assert!(v == "a" || v == "b"); + /// } + /// + /// assert!(a.is_empty()); + /// ``` + #[inline] + #[unstable = "matches collection reform specification, waiting for dust to settle"] + pub fn drain(&mut self) -> Drain { + fn last_two((_, b, c): (A, B, C)) -> (B, C) { (b, c) } + let last_two: fn((SafeHash, K, V)) -> (K, V) = last_two; // coerce to fn pointer + + Drain { + inner: self.table.drain().map(last_two), + } + } + /// Clears the map, removing all key-value pairs. Keeps the allocated memory /// for reuse. /// @@ -996,16 +1031,9 @@ impl, V, S, H: Hasher> HashMap { /// assert!(a.is_empty()); /// ``` #[unstable = "matches collection reform specification, waiting for dust to settle"] + #[inline] pub fn clear(&mut self) { - let cap = self.table.capacity(); - let mut buckets = Bucket::first(&mut self.table); - - while buckets.index() != cap { - buckets = match buckets.peek() { - Empty(b) => b.next(), - Full(full) => full.take().0.next(), - }; - } + self.drain(); } /// Deprecated: Renamed to `get`. @@ -1282,16 +1310,16 @@ pub struct Entries<'a, K: 'a, V: 'a> { } /// HashMap mutable values iterator -pub struct MutEntries<'a, K: 'a, V: 'a> { - inner: table::MutEntries<'a, K, V> +pub struct IterMut<'a, K: 'a, V: 'a> { + inner: table::IterMut<'a, K, V> } /// HashMap move iterator -pub struct MoveEntries { +pub struct IntoIter { inner: iter::Map< (SafeHash, K, V), (K, V), - table::MoveEntries, + table::IntoIter, fn((SafeHash, K, V)) -> (K, V), > } @@ -1306,6 +1334,16 @@ pub struct Values<'a, K: 'a, V: 'a> { inner: Map<(&'a K, &'a V), &'a V, Entries<'a, K, V>, fn((&'a K, &'a V)) -> &'a V> } +/// HashMap drain iterator +pub struct Drain<'a, K: 'a, V: 'a> { + inner: iter::Map< + (SafeHash, K, V), + (K, V), + table::Drain<'a, K, V>, + fn((SafeHash, K, V)) -> (K, V), + > +} + /// A view into a single occupied location in a HashMap pub struct OccupiedEntry<'a, K:'a, V:'a> { elem: FullBucket>, @@ -1340,12 +1378,12 @@ impl<'a, K, V> Iterator<(&'a K, &'a V)> for Entries<'a, K, V> { #[inline] fn size_hint(&self) -> (uint, Option) { self.inner.size_hint() } } -impl<'a, K, V> Iterator<(&'a K, &'a mut V)> for MutEntries<'a, K, V> { +impl<'a, K, V> Iterator<(&'a K, &'a mut V)> for IterMut<'a, K, V> { #[inline] fn next(&mut self) -> Option<(&'a K, &'a mut V)> { self.inner.next() } #[inline] fn size_hint(&self) -> (uint, Option) { self.inner.size_hint() } } -impl Iterator<(K, V)> for MoveEntries { +impl Iterator<(K, V)> for IntoIter { #[inline] fn next(&mut self) -> Option<(K, V)> { self.inner.next() } #[inline] fn size_hint(&self) -> (uint, Option) { self.inner.size_hint() } } @@ -1360,6 +1398,17 @@ impl<'a, K, V> Iterator<&'a V> for Values<'a, K, V> { #[inline] fn size_hint(&self) -> (uint, Option) { self.inner.size_hint() } } +impl<'a, K: 'a, V: 'a> Iterator<(K, V)> for Drain<'a, K, V> { + #[inline] + fn next(&mut self) -> Option<(K, V)> { + self.inner.next() + } + #[inline] + fn size_hint(&self) -> (uint, Option) { + self.inner.size_hint() + } +} + impl<'a, K, V> OccupiedEntry<'a, K, V> { /// Gets a reference to the value in the entry pub fn get(&self) -> &V { @@ -1427,10 +1476,9 @@ mod test_map { use prelude::*; use super::HashMap; - use super::{Occupied, Vacant}; - use cmp::Equiv; + use super::Entry::{Occupied, Vacant}; use hash; - use iter::{Iterator,range_inclusive,range_step_inclusive}; + use iter::{range_inclusive, range_step_inclusive}; use cell::RefCell; use rand::{weak_rng, Rng}; diff --git a/src/libstd/collections/hash/set.rs b/src/libstd/collections/hash/set.rs index 67c0f887832fa..6d83d5510b357 100644 --- a/src/libstd/collections/hash/set.rs +++ b/src/libstd/collections/hash/set.rs @@ -11,20 +11,19 @@ // ignore-lexer-test FIXME #15883 use borrow::BorrowFrom; +use clone::Clone; use cmp::{Eq, Equiv, PartialEq}; use core::kinds::Sized; use default::Default; use fmt::Show; use fmt; use hash::{Hash, Hasher, RandomSipHasher}; -use iter::{Iterator, IteratorExt, FromIterator, Map, FilterMap, Chain, Repeat, Zip, Extend, repeat}; +use iter::{Iterator, IteratorExt, IteratorCloneExt, FromIterator, Map, Chain, Extend}; +use ops::{BitOr, BitAnd, BitXor, Sub}; use option::Option::{Some, None, mod}; use result::Result::{Ok, Err}; -use super::map::{HashMap, MoveEntries, Keys, INITIAL_CAPACITY}; - -// FIXME(conventions): implement BitOr, BitAnd, BitXor, and Sub - +use super::map::{mod, HashMap, Keys, INITIAL_CAPACITY}; // Future Optimization (FIXME!) // ============================= @@ -250,8 +249,8 @@ impl, S, H: Hasher> HashSet { /// } /// ``` #[unstable = "matches collection reform specification, waiting for dust to settle"] - pub fn iter<'a>(&'a self) -> SetItems<'a, T> { - SetItems { iter: self.map.keys() } + pub fn iter<'a>(&'a self) -> Iter<'a, T> { + Iter { iter: self.map.keys() } } /// Creates a consuming iterator, that is, one that moves each value out @@ -275,10 +274,11 @@ impl, S, H: Hasher> HashSet { /// } /// ``` #[unstable = "matches collection reform specification, waiting for dust to settle"] - pub fn into_iter(self) -> SetMoveItems { + pub fn into_iter(self) -> IntoIter { fn first((a, _): (A, B)) -> A { a } + let first: fn((T, ())) -> T = first; - SetMoveItems { iter: self.map.into_iter().map(first) } + IntoIter { iter: self.map.into_iter().map(first) } } /// Visit the values representing the difference. @@ -304,14 +304,11 @@ impl, S, H: Hasher> HashSet { /// assert_eq!(diff, [4i].iter().map(|&x| x).collect()); /// ``` #[unstable = "matches collection reform specification, waiting for dust to settle"] - pub fn difference<'a>(&'a self, other: &'a HashSet) -> SetAlgebraItems<'a, T, H> { - fn filter<'a, T, S, H>((other, elt): (&HashSet, &'a T)) -> Option<&'a T> where - T: Eq + Hash, H: Hasher - { - if !other.contains(elt) { Some(elt) } else { None } + pub fn difference<'a>(&'a self, other: &'a HashSet) -> Difference<'a, T, H> { + Difference { + iter: self.iter(), + other: other, } - - SetAlgebraItems { iter: repeat(other).zip(self.iter()).filter_map(filter) } } /// Visit the values representing the symmetric difference. @@ -336,8 +333,8 @@ impl, S, H: Hasher> HashSet { /// ``` #[unstable = "matches collection reform specification, waiting for dust to settle"] pub fn symmetric_difference<'a>(&'a self, other: &'a HashSet) - -> SymDifferenceItems<'a, T, H> { - SymDifferenceItems { iter: self.difference(other).chain(other.difference(self)) } + -> SymmetricDifference<'a, T, H> { + SymmetricDifference { iter: self.difference(other).chain(other.difference(self)) } } /// Visit the values representing the intersection. @@ -358,14 +355,11 @@ impl, S, H: Hasher> HashSet { /// assert_eq!(diff, [2i, 3].iter().map(|&x| x).collect()); /// ``` #[unstable = "matches collection reform specification, waiting for dust to settle"] - pub fn intersection<'a>(&'a self, other: &'a HashSet) -> SetAlgebraItems<'a, T, H> { - fn filter<'a, T, S, H>((other, elt): (&HashSet, &'a T)) -> Option<&'a T> where - T: Eq + Hash, H: Hasher - { - if other.contains(elt) { Some(elt) } else { None } + pub fn intersection<'a>(&'a self, other: &'a HashSet) -> Intersection<'a, T, H> { + Intersection { + iter: self.iter(), + other: other, } - - SetAlgebraItems { iter: repeat(other).zip(self.iter()).filter_map(filter) } } /// Visit the values representing the union. @@ -386,8 +380,8 @@ impl, S, H: Hasher> HashSet { /// assert_eq!(diff, [1i, 2, 3, 4].iter().map(|&x| x).collect()); /// ``` #[unstable = "matches collection reform specification, waiting for dust to settle"] - pub fn union<'a>(&'a self, other: &'a HashSet) -> UnionItems<'a, T, H> { - UnionItems { iter: self.iter().chain(other.difference(self)) } + pub fn union<'a>(&'a self, other: &'a HashSet) -> Union<'a, T, H> { + Union { iter: self.iter().chain(other.difference(self)) } } /// Return the number of elements in the set @@ -420,6 +414,16 @@ impl, S, H: Hasher> HashSet { #[unstable = "matches collection reform specification, waiting for dust to settle"] pub fn is_empty(&self) -> bool { self.map.len() == 0 } + /// Clears the set, returning all elements in an iterator. + #[inline] + #[unstable = "matches collection reform specification, waiting for dust to settle"] + pub fn drain(&mut self) -> Drain { + fn first((a, _): (A, B)) -> A { a } + let first: fn((T, ())) -> T = first; // coerce to fn pointer + + Drain { iter: self.map.drain().map(first) } + } + /// Clears the set, removing all values. /// /// # Example @@ -616,59 +620,224 @@ impl, S, H: Hasher + Default> Default for HashSet { } } +#[unstable = "matches collection reform specification, waiting for dust to settle"] +impl<'a, 'b, T: Eq + Hash + Clone, S, H: Hasher + Default> +BitOr<&'b HashSet, HashSet> for &'a HashSet { + /// Returns the union of `self` and `rhs` as a new `HashSet`. + /// + /// # Examples + /// + /// ``` + /// use std::collections::HashSet; + /// + /// let a: HashSet = vec![1, 2, 3].into_iter().collect(); + /// let b: HashSet = vec![3, 4, 5].into_iter().collect(); + /// + /// let set: HashSet = &a | &b; + /// + /// let mut i = 0; + /// let expected = [1, 2, 3, 4, 5]; + /// for x in set.iter() { + /// assert!(expected.contains(x)); + /// i += 1; + /// } + /// assert_eq!(i, expected.len()); + /// ``` + fn bitor(self, rhs: &HashSet) -> HashSet { + self.union(rhs).cloned().collect() + } +} + +#[unstable = "matches collection reform specification, waiting for dust to settle"] +impl<'a, 'b, T: Eq + Hash + Clone, S, H: Hasher + Default> +BitAnd<&'b HashSet, HashSet> for &'a HashSet { + /// Returns the intersection of `self` and `rhs` as a new `HashSet`. + /// + /// # Examples + /// + /// ``` + /// use std::collections::HashSet; + /// + /// let a: HashSet = vec![1, 2, 3].into_iter().collect(); + /// let b: HashSet = vec![2, 3, 4].into_iter().collect(); + /// + /// let set: HashSet = &a & &b; + /// + /// let mut i = 0; + /// let expected = [2, 3]; + /// for x in set.iter() { + /// assert!(expected.contains(x)); + /// i += 1; + /// } + /// assert_eq!(i, expected.len()); + /// ``` + fn bitand(self, rhs: &HashSet) -> HashSet { + self.intersection(rhs).cloned().collect() + } +} + +#[unstable = "matches collection reform specification, waiting for dust to settle"] +impl<'a, 'b, T: Eq + Hash + Clone, S, H: Hasher + Default> +BitXor<&'b HashSet, HashSet> for &'a HashSet { + /// Returns the symmetric difference of `self` and `rhs` as a new `HashSet`. + /// + /// # Examples + /// + /// ``` + /// use std::collections::HashSet; + /// + /// let a: HashSet = vec![1, 2, 3].into_iter().collect(); + /// let b: HashSet = vec![3, 4, 5].into_iter().collect(); + /// + /// let set: HashSet = &a ^ &b; + /// + /// let mut i = 0; + /// let expected = [1, 2, 4, 5]; + /// for x in set.iter() { + /// assert!(expected.contains(x)); + /// i += 1; + /// } + /// assert_eq!(i, expected.len()); + /// ``` + fn bitxor(self, rhs: &HashSet) -> HashSet { + self.symmetric_difference(rhs).cloned().collect() + } +} + +#[unstable = "matches collection reform specification, waiting for dust to settle"] +impl<'a, 'b, T: Eq + Hash + Clone, S, H: Hasher + Default> +Sub<&'b HashSet, HashSet> for &'a HashSet { + /// Returns the difference of `self` and `rhs` as a new `HashSet`. + /// + /// # Examples + /// + /// ``` + /// use std::collections::HashSet; + /// + /// let a: HashSet = vec![1, 2, 3].into_iter().collect(); + /// let b: HashSet = vec![3, 4, 5].into_iter().collect(); + /// + /// let set: HashSet = &a - &b; + /// + /// let mut i = 0; + /// let expected = [1, 2]; + /// for x in set.iter() { + /// assert!(expected.contains(x)); + /// i += 1; + /// } + /// assert_eq!(i, expected.len()); + /// ``` + fn sub(self, rhs: &HashSet) -> HashSet { + self.difference(rhs).cloned().collect() + } +} + /// HashSet iterator -pub struct SetItems<'a, K: 'a> { +pub struct Iter<'a, K: 'a> { iter: Keys<'a, K, ()> } /// HashSet move iterator -pub struct SetMoveItems { - iter: Map<(K, ()), K, MoveEntries, fn((K, ())) -> K> +pub struct IntoIter { + iter: Map<(K, ()), K, map::IntoIter, fn((K, ())) -> K> +} + +/// HashSet drain iterator +pub struct Drain<'a, K: 'a> { + iter: Map<(K, ()), K, map::Drain<'a, K, ()>, fn((K, ())) -> K>, +} + +/// Intersection iterator +pub struct Intersection<'a, T: 'a, H: 'a> { + // iterator of the first set + iter: Iter<'a, T>, + // the second set + other: &'a HashSet, } -// `Repeat` is used to feed the filter closure an explicit capture -// of a reference to the other set -/// Set operations iterator, used directly for intersection and difference -pub struct SetAlgebraItems<'a, T: 'a, H: 'a> { - iter: FilterMap< - (&'a HashSet, &'a T), - &'a T, - Zip>, SetItems<'a, T>>, - for<'b> fn((&HashSet, &'b T)) -> Option<&'b T>, - > +/// Difference iterator +pub struct Difference<'a, T: 'a, H: 'a> { + // iterator of the first set + iter: Iter<'a, T>, + // the second set + other: &'a HashSet, } /// Symmetric difference iterator. -pub struct SymDifferenceItems<'a, T: 'a, H: 'a> { - iter: Chain, SetAlgebraItems<'a, T, H>> +pub struct SymmetricDifference<'a, T: 'a, H: 'a> { + iter: Chain, Difference<'a, T, H>> } /// Set union iterator. -pub struct UnionItems<'a, T: 'a, H: 'a> { - iter: Chain, SetAlgebraItems<'a, T, H>> +pub struct Union<'a, T: 'a, H: 'a> { + iter: Chain, Difference<'a, T, H>> } -impl<'a, K> Iterator<&'a K> for SetItems<'a, K> { +impl<'a, K> Iterator<&'a K> for Iter<'a, K> { fn next(&mut self) -> Option<&'a K> { self.iter.next() } fn size_hint(&self) -> (uint, Option) { self.iter.size_hint() } } -impl Iterator for SetMoveItems { +impl Iterator for IntoIter { fn next(&mut self) -> Option { self.iter.next() } fn size_hint(&self) -> (uint, Option) { self.iter.size_hint() } } -impl<'a, T, H> Iterator<&'a T> for SetAlgebraItems<'a, T, H> { - fn next(&mut self) -> Option<&'a T> { self.iter.next() } +impl<'a, K: 'a> Iterator for Drain<'a, K> { + fn next(&mut self) -> Option { self.iter.next() } fn size_hint(&self) -> (uint, Option) { self.iter.size_hint() } } -impl<'a, T, H> Iterator<&'a T> for SymDifferenceItems<'a, T, H> { +impl<'a, T, S, H> Iterator<&'a T> for Intersection<'a, T, H> + where T: Eq + Hash, H: Hasher +{ + fn next(&mut self) -> Option<&'a T> { + loop { + match self.iter.next() { + None => return None, + Some(elt) => if self.other.contains(elt) { + return Some(elt) + }, + } + } + } + + fn size_hint(&self) -> (uint, Option) { + let (_, upper) = self.iter.size_hint(); + (0, upper) + } +} + +impl<'a, T, S, H> Iterator<&'a T> for Difference<'a, T, H> + where T: Eq + Hash, H: Hasher +{ + fn next(&mut self) -> Option<&'a T> { + loop { + match self.iter.next() { + None => return None, + Some(elt) => if !self.other.contains(elt) { + return Some(elt) + }, + } + } + } + + fn size_hint(&self) -> (uint, Option) { + let (_, upper) = self.iter.size_hint(); + (0, upper) + } +} + +impl<'a, T, S, H> Iterator<&'a T> for SymmetricDifference<'a, T, H> + where T: Eq + Hash, H: Hasher +{ fn next(&mut self) -> Option<&'a T> { self.iter.next() } fn size_hint(&self) -> (uint, Option) { self.iter.size_hint() } } -impl<'a, T, H> Iterator<&'a T> for UnionItems<'a, T, H> { +impl<'a, T, S, H> Iterator<&'a T> for Union<'a, T, H> + where T: Eq + Hash, H: Hasher +{ fn next(&mut self) -> Option<&'a T> { self.iter.next() } fn size_hint(&self) -> (uint, Option) { self.iter.size_hint() } } @@ -678,7 +847,6 @@ mod test_set { use prelude::*; use super::HashSet; - use slice::PartialEqSliceExt; #[test] fn test_disjoint() { @@ -914,4 +1082,41 @@ mod test_set { assert!(set_str == "{1, 2}" || set_str == "{2, 1}"); assert_eq!(format!("{}", empty), "{}"); } + + #[test] + fn test_trivial_drain() { + let mut s = HashSet::::new(); + for _ in s.drain() {} + assert!(s.is_empty()); + drop(s); + + let mut s = HashSet::::new(); + drop(s.drain()); + assert!(s.is_empty()); + } + + #[test] + fn test_drain() { + let mut s: HashSet = range(1, 100).collect(); + + // try this a bunch of times to make sure we don't screw up internal state. + for _ in range(0i, 20) { + assert_eq!(s.len(), 99); + + { + let mut last_i = 0; + let mut d = s.drain(); + for (i, x) in d.by_ref().take(50).enumerate() { + last_i = i; + assert!(x != 0); + } + assert_eq!(last_i, 49); + } + + for _ in s.iter() { panic!("s should be empty!"); } + + // reset to try again. + s.extend(range(1, 100)); + } + } } diff --git a/src/libstd/collections/hash/table.rs b/src/libstd/collections/hash/table.rs index da06387e9a5eb..8f2152c5a9ded 100644 --- a/src/libstd/collections/hash/table.rs +++ b/src/libstd/collections/hash/table.rs @@ -10,7 +10,7 @@ // // ignore-lexer-test FIXME #15883 -pub use self::BucketState::*; +use self::BucketState::*; use clone::Clone; use cmp; @@ -664,17 +664,17 @@ impl RawTable { } } - pub fn iter_mut(&mut self) -> MutEntries { - MutEntries { + pub fn iter_mut(&mut self) -> IterMut { + IterMut { iter: self.raw_buckets(), elems_left: self.size(), } } - pub fn into_iter(self) -> MoveEntries { + pub fn into_iter(self) -> IntoIter { let RawBuckets { raw, hashes_end, .. } = self.raw_buckets(); // Replace the marker regardless of lifetime bounds on parameters. - MoveEntries { + IntoIter { iter: RawBuckets { raw: raw, hashes_end: hashes_end, @@ -684,6 +684,19 @@ impl RawTable { } } + pub fn drain(&mut self) -> Drain { + let RawBuckets { raw, hashes_end, .. } = self.raw_buckets(); + // Replace the marker regardless of lifetime bounds on parameters. + Drain { + iter: RawBuckets { + raw: raw, + hashes_end: hashes_end, + marker: marker::ContravariantLifetime::<'static>, + }, + table: self, + } + } + /// Returns an iterator that copies out each entry. Used while the table /// is being dropped. unsafe fn rev_move_buckets(&mut self) -> RevMoveBuckets { @@ -763,17 +776,23 @@ pub struct Entries<'a, K: 'a, V: 'a> { } /// Iterator over mutable references to entries in a table. -pub struct MutEntries<'a, K: 'a, V: 'a> { +pub struct IterMut<'a, K: 'a, V: 'a> { iter: RawBuckets<'a, K, V>, elems_left: uint, } /// Iterator over the entries in a table, consuming the table. -pub struct MoveEntries { +pub struct IntoIter { table: RawTable, iter: RawBuckets<'static, K, V> } +/// Iterator over the entries in a table, clearing the table. +pub struct Drain<'a, K: 'a, V: 'a> { + table: &'a mut RawTable, + iter: RawBuckets<'static, K, V>, +} + impl<'a, K, V> Iterator<(&'a K, &'a V)> for Entries<'a, K, V> { fn next(&mut self) -> Option<(&'a K, &'a V)> { self.iter.next().map(|bucket| { @@ -790,7 +809,7 @@ impl<'a, K, V> Iterator<(&'a K, &'a V)> for Entries<'a, K, V> { } } -impl<'a, K, V> Iterator<(&'a K, &'a mut V)> for MutEntries<'a, K, V> { +impl<'a, K, V> Iterator<(&'a K, &'a mut V)> for IterMut<'a, K, V> { fn next(&mut self) -> Option<(&'a K, &'a mut V)> { self.iter.next().map(|bucket| { self.elems_left -= 1; @@ -806,7 +825,7 @@ impl<'a, K, V> Iterator<(&'a K, &'a mut V)> for MutEntries<'a, K, V> { } } -impl Iterator<(SafeHash, K, V)> for MoveEntries { +impl Iterator<(SafeHash, K, V)> for IntoIter { fn next(&mut self) -> Option<(SafeHash, K, V)> { self.iter.next().map(|bucket| { self.table.size -= 1; @@ -828,6 +847,36 @@ impl Iterator<(SafeHash, K, V)> for MoveEntries { } } +impl<'a, K: 'a, V: 'a> Iterator<(SafeHash, K, V)> for Drain<'a, K, V> { + #[inline] + fn next(&mut self) -> Option<(SafeHash, K, V)> { + self.iter.next().map(|bucket| { + self.table.size -= 1; + unsafe { + ( + SafeHash { + hash: ptr::replace(bucket.hash, EMPTY_BUCKET), + }, + ptr::read(bucket.key as *const K), + ptr::read(bucket.val as *const V) + ) + } + }) + } + + fn size_hint(&self) -> (uint, Option) { + let size = self.table.size(); + (size, Some(size)) + } +} + +#[unsafe_destructor] +impl<'a, K: 'a, V: 'a> Drop for Drain<'a, K, V> { + fn drop(&mut self) { + for _ in *self {} + } +} + impl Clone for RawTable { fn clone(&self) -> RawTable { unsafe { diff --git a/src/libstd/comm/mod.rs b/src/libstd/comm/mod.rs index 9043cb8c7d6f5..55f5662dbd897 100644 --- a/src/libstd/comm/mod.rs +++ b/src/libstd/comm/mod.rs @@ -628,7 +628,7 @@ impl Sender { } } -#[unstable] +#[stable] impl Clone for Sender { fn clone(&self) -> Sender { let (packet, sleeper, guard) = match *unsafe { self.inner() } { @@ -756,7 +756,7 @@ impl SyncSender { } } -#[unstable] +#[stable] impl Clone for SyncSender { fn clone(&self) -> SyncSender { unsafe { (*self.inner.get()).clone_chan(); } diff --git a/src/libstd/dynamic_lib.rs b/src/libstd/dynamic_lib.rs index 291f384d619d9..368abe7cb1244 100644 --- a/src/libstd/dynamic_lib.rs +++ b/src/libstd/dynamic_lib.rs @@ -15,21 +15,10 @@ #![experimental] #![allow(missing_docs)] -use clone::Clone; -use c_str::ToCStr; -use iter::IteratorExt; +use prelude::*; use mem; -use ops::*; -use option::*; -use option::Option::{None, Some}; use os; -use path::{Path,GenericPath}; -use result::*; -use result::Result::{Err, Ok}; -use slice::{AsSlice,SliceExt}; use str; -use string::String; -use vec::Vec; #[allow(missing_copy_implementations)] pub struct DynamicLibrary { @@ -211,15 +200,12 @@ mod test { target_os = "freebsd", target_os = "dragonfly"))] pub mod dl { - pub use self::Rtld::*; + use self::Rtld::*; - use c_str::{CString, ToCStr}; + use prelude::*; + use c_str::CString; use libc; - use ops::FnOnce; use ptr; - use result::*; - use result::Result::{Err, Ok}; - use string::String; pub unsafe fn open_external(filename: T) -> *mut u8 { filename.with_c_str(|raw_name| { @@ -294,7 +280,7 @@ pub mod dl { use result::Result; use result::Result::{Ok, Err}; use slice::SliceExt; - use str::StrPrelude; + use str::StrExt; use str; use string::String; use vec::Vec; diff --git a/src/libstd/error.rs b/src/libstd/error.rs index 9ad2655f6e9db..cd7d9aacc9010 100644 --- a/src/libstd/error.rs +++ b/src/libstd/error.rs @@ -78,10 +78,9 @@ //! } //! ``` -use option::Option; -use option::Option::None; -use kinds::Send; -use string::String; +use prelude::*; + +use str::Utf8Error; /// Base functionality for all errors in Rust. pub trait Error: Send { @@ -107,3 +106,14 @@ impl FromError for E { err } } + +impl Error for Utf8Error { + fn description(&self) -> &str { + match *self { + Utf8Error::TooShort => "invalid utf-8: not enough bytes", + Utf8Error::InvalidByte(..) => "invalid utf-8: corrupt contents", + } + } + + fn detail(&self) -> Option { Some(self.to_string()) } +} diff --git a/src/libstd/failure.rs b/src/libstd/failure.rs index 8e1e3dc4af922..7010eae6dba0d 100644 --- a/src/libstd/failure.rs +++ b/src/libstd/failure.rs @@ -41,7 +41,7 @@ pub fn on_fail(obj: &(Any+Send), file: &'static str, line: uint) { let msg = match obj.downcast_ref::<&'static str>() { Some(s) => *s, None => match obj.downcast_ref::() { - Some(s) => s.as_slice(), + Some(s) => s[], None => "Box", } }; diff --git a/src/libstd/io/buffered.rs b/src/libstd/io/buffered.rs index 25f05940807c1..9d9e882757147 100644 --- a/src/libstd/io/buffered.rs +++ b/src/libstd/io/buffered.rs @@ -409,7 +409,6 @@ mod test { use super::super::{IoResult, EndOfFile}; use super::super::mem::MemReader; use self::test::Bencher; - use str::StrPrelude; /// A type, free to create, primarily intended for benchmarking creation of /// wrappers that, just for construction, don't need a Reader/Writer that diff --git a/src/libstd/io/comm_adapters.rs b/src/libstd/io/comm_adapters.rs index e865bf42bd01d..3a18b0dc1b525 100644 --- a/src/libstd/io/comm_adapters.rs +++ b/src/libstd/io/comm_adapters.rs @@ -132,6 +132,7 @@ impl ChanWriter { } } +#[stable] impl Clone for ChanWriter { fn clone(&self) -> ChanWriter { ChanWriter { tx: self.tx.clone() } diff --git a/src/libstd/io/fs.rs b/src/libstd/io/fs.rs index fd3bae73cd367..4e736908c3720 100644 --- a/src/libstd/io/fs.rs +++ b/src/libstd/io/fs.rs @@ -823,10 +823,6 @@ mod test { use io; use str; use io::fs::*; - use path::Path; - use io; - use ops::Drop; - use str::StrPrelude; macro_rules! check { ($e:expr) => ( match $e { diff --git a/src/libstd/io/mem.rs b/src/libstd/io/mem.rs index 71e8cb4b5ec1d..431e11cf9caca 100644 --- a/src/libstd/io/mem.rs +++ b/src/libstd/io/mem.rs @@ -398,13 +398,12 @@ impl<'a> Buffer for BufReader<'a> { #[cfg(test)] mod test { - extern crate test; + extern crate "test" as test_crate; use prelude::*; use super::*; use io::*; use io; - use self::test::Bencher; - use str::StrPrelude; + use self::test_crate::Bencher; #[test] fn test_vec_writer() { diff --git a/src/libstd/io/mod.rs b/src/libstd/io/mod.rs index dbf61b132e08b..233ad78109382 100644 --- a/src/libstd/io/mod.rs +++ b/src/libstd/io/mod.rs @@ -242,10 +242,11 @@ use result::Result; use result::Result::{Ok, Err}; use sys; use slice::SliceExt; -use str::StrPrelude; +use str::StrExt; use str; use string::String; use uint; +use unicode; use unicode::char::UnicodeChar; use vec::Vec; @@ -1505,7 +1506,7 @@ pub trait Buffer: Reader { /// valid utf-8 encoded codepoint as the next few bytes in the stream. fn read_char(&mut self) -> IoResult { let first_byte = try!(self.read_byte()); - let width = str::utf8_char_width(first_byte); + let width = unicode::str::utf8_char_width(first_byte); if width == 1 { return Ok(first_byte as char) } if width == 0 { return Err(standard_error(InvalidInput)) } // not utf8 let mut buf = [first_byte, 0, 0, 0]; @@ -1519,7 +1520,7 @@ pub trait Buffer: Reader { } } } - match str::from_utf8(buf[..width]) { + match str::from_utf8(buf[..width]).ok() { Some(s) => Ok(s.char_at(0)), None => Err(standard_error(InvalidInput)) } diff --git a/src/libstd/io/net/ip.rs b/src/libstd/io/net/ip.rs index 71776b6c46af7..89a649d55bdc4 100644 --- a/src/libstd/io/net/ip.rs +++ b/src/libstd/io/net/ip.rs @@ -25,8 +25,8 @@ use ops::FnOnce; use option::Option; use option::Option::{None, Some}; use result::Result::{Ok, Err}; -use str::{FromStr, StrPrelude}; use slice::{CloneSliceExt, SliceExt}; +use str::{FromStr, StrExt}; use vec::Vec; pub type Port = u16; diff --git a/src/libstd/io/process.rs b/src/libstd/io/process.rs index 9da1117f2272a..4a0a393642439 100644 --- a/src/libstd/io/process.rs +++ b/src/libstd/io/process.rs @@ -1082,7 +1082,7 @@ mod tests { let prog = env_cmd().env_set_all(new_env.as_slice()).spawn().unwrap(); let result = prog.wait_with_output().unwrap(); - let output = String::from_utf8_lossy(result.output.as_slice()).into_string(); + let output = String::from_utf8_lossy(result.output.as_slice()).to_string(); assert!(output.contains("RUN_TEST_NEW_ENV=123"), "didn't find RUN_TEST_NEW_ENV inside of:\n\n{}", output); @@ -1092,7 +1092,7 @@ mod tests { fn test_add_to_env() { let prog = env_cmd().env("RUN_TEST_NEW_ENV", "123").spawn().unwrap(); let result = prog.wait_with_output().unwrap(); - let output = String::from_utf8_lossy(result.output.as_slice()).into_string(); + let output = String::from_utf8_lossy(result.output.as_slice()).to_string(); assert!(output.contains("RUN_TEST_NEW_ENV=123"), "didn't find RUN_TEST_NEW_ENV inside of:\n\n{}", output); diff --git a/src/libstd/io/stdio.rs b/src/libstd/io/stdio.rs index aa50597c81698..1c5ceaf24506f 100644 --- a/src/libstd/io/stdio.rs +++ b/src/libstd/io/stdio.rs @@ -43,7 +43,7 @@ use ops::{Deref, DerefMut, FnOnce}; use result::Result::{Ok, Err}; use rt; use slice::SliceExt; -use str::StrPrelude; +use str::StrExt; use string::String; use sys::{fs, tty}; use sync::{Arc, Mutex, MutexGuard, Once, ONCE_INIT}; @@ -104,7 +104,7 @@ pub struct StdinReader { inner: Arc>>, } -/// A guard for exlusive access to `StdinReader`'s internal `BufferedReader`. +/// A guard for exclusive access to `StdinReader`'s internal `BufferedReader`. pub struct StdinReaderGuard<'a> { inner: MutexGuard<'a, BufferedReader>, } diff --git a/src/libstd/io/tempfile.rs b/src/libstd/io/tempfile.rs index f3a119399952d..c2b4d5a1fa982 100644 --- a/src/libstd/io/tempfile.rs +++ b/src/libstd/io/tempfile.rs @@ -23,6 +23,56 @@ use sync::atomic; /// A wrapper for a path to temporary directory implementing automatic /// scope-based deletion. +/// +/// # Examples +/// +/// ```no_run +/// use std::io::TempDir; +/// +/// { +/// // create a temporary directory +/// let tmpdir = match TempDir::new("mysuffix") { +/// Ok(dir) => dir, +/// Err(e) => panic!("couldn't create temporary directory: {}", e) +/// }; +/// +/// // get the path of the temporary directory without affecting the wrapper +/// let tmppath = tmpdir.path(); +/// +/// println!("The path of temporary directory is {}", tmppath.display()); +/// +/// // the temporary directory is automatically removed when tmpdir goes +/// // out of scope at the end of the block +/// } +/// { +/// // create a temporary directory, this time using a custom path +/// let tmpdir = match TempDir::new_in(&Path::new("/tmp/best/custom/path"), "mysuffix") { +/// Ok(dir) => dir, +/// Err(e) => panic!("couldn't create temporary directory: {}", e) +/// }; +/// +/// // get the path of the temporary directory and disable automatic deletion in the wrapper +/// let tmppath = tmpdir.into_inner(); +/// +/// println!("The path of the not-so-temporary directory is {}", tmppath.display()); +/// +/// // the temporary directory is not removed here +/// // because the directory is detached from the wrapper +/// } +/// { +/// // create a temporary directory +/// let tmpdir = match TempDir::new("mysuffix") { +/// Ok(dir) => dir, +/// Err(e) => panic!("couldn't create temporary directory: {}", e) +/// }; +/// +/// // close the temporary directory manually and check the result +/// match tmpdir.close() { +/// Ok(_) => println!("success!"), +/// Err(e) => panic!("couldn't remove temporary directory: {}", e) +/// }; +/// } +/// ``` pub struct TempDir { path: Option, disarmed: bool diff --git a/src/libstd/io/timer.rs b/src/libstd/io/timer.rs index 79048c37ab5bb..953effe4345ce 100644 --- a/src/libstd/io/timer.rs +++ b/src/libstd/io/timer.rs @@ -225,11 +225,11 @@ fn in_ms_u64(d: Duration) -> u64 { #[cfg(test)] mod test { - use super::*; - use time::Duration; - use task::spawn; use prelude::*; + use super::Timer; + use time::Duration; + #[test] fn test_io_timer_sleep_simple() { let mut timer = Timer::new().unwrap(); diff --git a/src/libstd/lib.rs b/src/libstd/lib.rs index 78c194745a888..8274baeacfad8 100644 --- a/src/libstd/lib.rs +++ b/src/libstd/lib.rs @@ -106,8 +106,7 @@ #![allow(unknown_features)] #![feature(macro_rules, globs, linkage, thread_local, asm)] #![feature(default_type_params, phase, lang_items, unsafe_destructor)] -#![feature(import_shadowing, slicing_syntax, tuple_indexing)] -#![feature(unboxed_closures)] +#![feature(slicing_syntax, unboxed_closures)] // Don't link to std. We are std. #![no_std] @@ -136,7 +135,6 @@ extern crate libc; // NB: These reexports are in the order they should be listed in rustdoc pub use core::any; -pub use core::bool; pub use core::borrow; pub use core::cell; pub use core::clone; @@ -151,14 +149,10 @@ pub use core::mem; pub use core::ptr; pub use core::raw; pub use core::simd; -pub use core::tuple; -// FIXME #15320: primitive documentation needs top-level modules, this -// should be `std::tuple::unit`. -pub use core::unit; pub use core::result; pub use core::option; -pub use alloc::boxed; +#[cfg(not(test))] pub use alloc::boxed; pub use alloc::rc; pub use core_collections::slice; @@ -244,6 +238,12 @@ pub mod comm; pub mod rt; mod failure; +// Documentation for primitive types + +mod bool; +mod unit; +mod tuple; + // A curious inner-module that's not exported that contains the binding // 'std' so that macro-expanded references to std::error and such // can be resolved within libstd. diff --git a/src/libstd/num/f32.rs b/src/libstd/num/f32.rs index 60b17de171875..951627b26cad9 100644 --- a/src/libstd/num/f32.rs +++ b/src/libstd/num/f32.rs @@ -21,6 +21,9 @@ use intrinsics; use libc::c_int; use num::{Float, FloatMath}; use num::strconv; +use num::strconv::ExponentFormat::{ExpNone, ExpDec}; +use num::strconv::SignificantDigits::{DigAll, DigMax, DigExact}; +use num::strconv::SignFormat::SignNeg; pub use core::f32::{RADIX, MANTISSA_DIGITS, DIGITS, EPSILON, MIN_VALUE}; pub use core::f32::{MIN_POS_VALUE, MAX_VALUE, MIN_EXP, MAX_EXP, MIN_10_EXP}; @@ -252,7 +255,7 @@ impl FloatMath for f32 { #[experimental = "may be removed or relocated"] pub fn to_string(num: f32) -> String { let (r, _) = strconv::float_to_str_common( - num, 10u, true, strconv::SignNeg, strconv::DigAll, strconv::ExpNone, false); + num, 10u, true, SignNeg, DigAll, ExpNone, false); r } @@ -265,7 +268,7 @@ pub fn to_string(num: f32) -> String { #[experimental = "may be removed or relocated"] pub fn to_str_hex(num: f32) -> String { let (r, _) = strconv::float_to_str_common( - num, 16u, true, strconv::SignNeg, strconv::DigAll, strconv::ExpNone, false); + num, 16u, true, SignNeg, DigAll, ExpNone, false); r } @@ -279,8 +282,7 @@ pub fn to_str_hex(num: f32) -> String { #[inline] #[experimental = "may be removed or relocated"] pub fn to_str_radix_special(num: f32, rdx: uint) -> (String, bool) { - strconv::float_to_str_common(num, rdx, true, - strconv::SignNeg, strconv::DigAll, strconv::ExpNone, false) + strconv::float_to_str_common(num, rdx, true, SignNeg, DigAll, ExpNone, false) } /// Converts a float to a string with exactly the number of @@ -294,7 +296,7 @@ pub fn to_str_radix_special(num: f32, rdx: uint) -> (String, bool) { #[experimental = "may be removed or relocated"] pub fn to_str_exact(num: f32, dig: uint) -> String { let (r, _) = strconv::float_to_str_common( - num, 10u, true, strconv::SignNeg, strconv::DigExact(dig), strconv::ExpNone, false); + num, 10u, true, SignNeg, DigExact(dig), ExpNone, false); r } @@ -309,7 +311,7 @@ pub fn to_str_exact(num: f32, dig: uint) -> String { #[experimental = "may be removed or relocated"] pub fn to_str_digits(num: f32, dig: uint) -> String { let (r, _) = strconv::float_to_str_common( - num, 10u, true, strconv::SignNeg, strconv::DigMax(dig), strconv::ExpNone, false); + num, 10u, true, SignNeg, DigMax(dig), ExpNone, false); r } @@ -325,7 +327,7 @@ pub fn to_str_digits(num: f32, dig: uint) -> String { #[experimental = "may be removed or relocated"] pub fn to_str_exp_exact(num: f32, dig: uint, upper: bool) -> String { let (r, _) = strconv::float_to_str_common( - num, 10u, true, strconv::SignNeg, strconv::DigExact(dig), strconv::ExpDec, upper); + num, 10u, true, SignNeg, DigExact(dig), ExpDec, upper); r } @@ -341,7 +343,7 @@ pub fn to_str_exp_exact(num: f32, dig: uint, upper: bool) -> String { #[experimental = "may be removed or relocated"] pub fn to_str_exp_digits(num: f32, dig: uint, upper: bool) -> String { let (r, _) = strconv::float_to_str_common( - num, 10u, true, strconv::SignNeg, strconv::DigMax(dig), strconv::ExpDec, upper); + num, 10u, true, SignNeg, DigMax(dig), ExpDec, upper); r } @@ -349,7 +351,6 @@ pub fn to_str_exp_digits(num: f32, dig: uint, upper: bool) -> String { mod tests { use f32::*; use num::*; - use num; #[test] fn test_min_nan() { @@ -364,8 +365,8 @@ mod tests { } #[test] - fn test_num() { - num::test_num(10f32, 2f32); + fn test_num_f32() { + test_num(10f32, 2f32); } #[test] diff --git a/src/libstd/num/f64.rs b/src/libstd/num/f64.rs index 4b31e33236d44..7cc94b9ebbbcb 100644 --- a/src/libstd/num/f64.rs +++ b/src/libstd/num/f64.rs @@ -20,6 +20,9 @@ use intrinsics; use libc::c_int; use num::{Float, FloatMath}; use num::strconv; +use num::strconv::ExponentFormat::{ExpNone, ExpDec}; +use num::strconv::SignificantDigits::{DigAll, DigMax, DigExact}; +use num::strconv::SignFormat::SignNeg; pub use core::f64::{RADIX, MANTISSA_DIGITS, DIGITS, EPSILON, MIN_VALUE}; pub use core::f64::{MIN_POS_VALUE, MAX_VALUE, MIN_EXP, MAX_EXP, MIN_10_EXP}; @@ -260,7 +263,7 @@ impl FloatMath for f64 { #[experimental = "may be removed or relocated"] pub fn to_string(num: f64) -> String { let (r, _) = strconv::float_to_str_common( - num, 10u, true, strconv::SignNeg, strconv::DigAll, strconv::ExpNone, false); + num, 10u, true, SignNeg, DigAll, ExpNone, false); r } @@ -273,7 +276,7 @@ pub fn to_string(num: f64) -> String { #[experimental = "may be removed or relocated"] pub fn to_str_hex(num: f64) -> String { let (r, _) = strconv::float_to_str_common( - num, 16u, true, strconv::SignNeg, strconv::DigAll, strconv::ExpNone, false); + num, 16u, true, SignNeg, DigAll, ExpNone, false); r } @@ -287,8 +290,7 @@ pub fn to_str_hex(num: f64) -> String { #[inline] #[experimental = "may be removed or relocated"] pub fn to_str_radix_special(num: f64, rdx: uint) -> (String, bool) { - strconv::float_to_str_common(num, rdx, true, - strconv::SignNeg, strconv::DigAll, strconv::ExpNone, false) + strconv::float_to_str_common(num, rdx, true, SignNeg, DigAll, ExpNone, false) } /// Converts a float to a string with exactly the number of @@ -302,7 +304,7 @@ pub fn to_str_radix_special(num: f64, rdx: uint) -> (String, bool) { #[experimental = "may be removed or relocated"] pub fn to_str_exact(num: f64, dig: uint) -> String { let (r, _) = strconv::float_to_str_common( - num, 10u, true, strconv::SignNeg, strconv::DigExact(dig), strconv::ExpNone, false); + num, 10u, true, SignNeg, DigExact(dig), ExpNone, false); r } @@ -317,7 +319,7 @@ pub fn to_str_exact(num: f64, dig: uint) -> String { #[experimental = "may be removed or relocated"] pub fn to_str_digits(num: f64, dig: uint) -> String { let (r, _) = strconv::float_to_str_common( - num, 10u, true, strconv::SignNeg, strconv::DigMax(dig), strconv::ExpNone, false); + num, 10u, true, SignNeg, DigMax(dig), ExpNone, false); r } @@ -333,7 +335,7 @@ pub fn to_str_digits(num: f64, dig: uint) -> String { #[experimental = "may be removed or relocated"] pub fn to_str_exp_exact(num: f64, dig: uint, upper: bool) -> String { let (r, _) = strconv::float_to_str_common( - num, 10u, true, strconv::SignNeg, strconv::DigExact(dig), strconv::ExpDec, upper); + num, 10u, true, SignNeg, DigExact(dig), ExpDec, upper); r } @@ -349,7 +351,7 @@ pub fn to_str_exp_exact(num: f64, dig: uint, upper: bool) -> String { #[experimental = "may be removed or relocated"] pub fn to_str_exp_digits(num: f64, dig: uint, upper: bool) -> String { let (r, _) = strconv::float_to_str_common( - num, 10u, true, strconv::SignNeg, strconv::DigMax(dig), strconv::ExpDec, upper); + num, 10u, true, SignNeg, DigMax(dig), ExpDec, upper); r } @@ -357,7 +359,6 @@ pub fn to_str_exp_digits(num: f64, dig: uint, upper: bool) -> String { mod tests { use f64::*; use num::*; - use num; #[test] fn test_min_nan() { @@ -372,8 +373,8 @@ mod tests { } #[test] - fn test_num() { - num::test_num(10f64, 2f64); + fn test_num_f64() { + test_num(10f64, 2f64); } #[test] diff --git a/src/libstd/num/int.rs b/src/libstd/num/int.rs index f59dab4b20bde..9ccb1544fdc66 100644 --- a/src/libstd/num/int.rs +++ b/src/libstd/num/int.rs @@ -10,7 +10,7 @@ //! Operations and constants for architecture-sized signed integers (`int` type) -#![unstable] +#![stable] #![doc(primitive = "int")] pub use core::int::{BITS, BYTES, MIN, MAX}; diff --git a/src/libstd/num/strconv.rs b/src/libstd/num/strconv.rs index 016c4bd532a17..d6331f3c718ac 100644 --- a/src/libstd/num/strconv.rs +++ b/src/libstd/num/strconv.rs @@ -12,15 +12,15 @@ #![allow(missing_docs)] -pub use self::ExponentFormat::*; -pub use self::SignificantDigits::*; -pub use self::SignFormat::*; +use self::ExponentFormat::*; +use self::SignificantDigits::*; +use self::SignFormat::*; use char::{mod, Char}; use num::{mod, Int, Float, FPNaN, FPInfinite, ToPrimitive}; use ops::FnMut; use slice::{SliceExt, CloneSliceExt}; -use str::StrPrelude; +use str::StrExt; use string::String; use vec::Vec; diff --git a/src/libstd/num/uint.rs b/src/libstd/num/uint.rs index 7f8edee571fb8..cd000b3098bff 100644 --- a/src/libstd/num/uint.rs +++ b/src/libstd/num/uint.rs @@ -10,7 +10,7 @@ //! Operations and constants for architecture-sized unsigned integers (`uint` type) -#![unstable] +#![stable] #![doc(primitive = "uint")] pub use core::uint::{BITS, BYTES, MIN, MAX}; diff --git a/src/libstd/os.rs b/src/libstd/os.rs index dcc73f7844a49..ceb9a4102f635 100644 --- a/src/libstd/os.rs +++ b/src/libstd/os.rs @@ -28,9 +28,9 @@ #![allow(non_snake_case)] #![allow(unused_imports)] -pub use self::MemoryMapKind::*; -pub use self::MapOption::*; -pub use self::MapError::*; +use self::MemoryMapKind::*; +use self::MapOption::*; +use self::MapError::*; use clone::Clone; use error::{FromError, Error}; @@ -52,7 +52,7 @@ use result::Result; use result::Result::{Err, Ok}; use slice::{AsSlice, SliceExt}; use slice::CloneSliceExt; -use str::{Str, StrPrelude, StrAllocating}; +use str::{Str, StrExt}; use string::{String, ToString}; use sync::atomic::{AtomicInt, INIT_ATOMIC_INT, SeqCst}; use vec::Vec; @@ -134,8 +134,8 @@ fn with_env_lock(f: F) -> T where /// ``` pub fn env() -> Vec<(String,String)> { env_as_bytes().into_iter().map(|(k,v)| { - let k = String::from_utf8_lossy(k.as_slice()).into_string(); - let v = String::from_utf8_lossy(v.as_slice()).into_string(); + let k = String::from_utf8_lossy(k.as_slice()).into_owned(); + let v = String::from_utf8_lossy(v.as_slice()).into_owned(); (k,v) }).collect() } @@ -185,7 +185,7 @@ pub fn env_as_bytes() -> Vec<(Vec,Vec)> { /// } /// ``` pub fn getenv(n: &str) -> Option { - getenv_as_bytes(n).map(|v| String::from_utf8_lossy(v.as_slice()).into_string()) + getenv_as_bytes(n).map(|v| String::from_utf8_lossy(v.as_slice()).into_owned()) } #[cfg(unix)] @@ -707,7 +707,7 @@ fn real_args_as_bytes() -> Vec> { fn real_args() -> Vec { real_args_as_bytes().into_iter() .map(|v| { - String::from_utf8_lossy(v.as_slice()).into_string() + String::from_utf8_lossy(v.as_slice()).into_owned() }).collect() } @@ -729,7 +729,7 @@ fn real_args() -> Vec { // Push it onto the list. let ptr = ptr as *const u16; let buf = slice::from_raw_buf(&ptr, len); - let opt_s = String::from_utf16(::str::truncate_utf16_at_nul(buf)); + let opt_s = String::from_utf16(sys::os::truncate_utf16_at_nul(buf)); opt_s.expect("CommandLineToArgvW returned invalid UTF-16") }); @@ -1425,7 +1425,6 @@ mod arch_consts { #[cfg(test)] mod tests { use prelude::*; - use c_str::ToCStr; use option; use os::{env, getcwd, getenv, make_absolute}; use os::{split_paths, join_paths, setenv, unsetenv}; @@ -1618,8 +1617,8 @@ mod tests { use result::Result::{Ok, Err}; let chunk = match os::MemoryMap::new(16, &[ - os::MapReadable, - os::MapWritable + os::MapOption::MapReadable, + os::MapOption::MapWritable ]) { Ok(chunk) => chunk, Err(msg) => panic!("{}", msg) @@ -1661,10 +1660,10 @@ mod tests { file.write_u8(0); let chunk = MemoryMap::new(size / 2, &[ - MapReadable, - MapWritable, - MapFd(get_fd(&file)), - MapOffset(size / 2) + MapOption::MapReadable, + MapOption::MapWritable, + MapOption::MapFd(get_fd(&file)), + MapOption::MapOffset(size / 2) ]).unwrap(); assert!(chunk.len > 0); diff --git a/src/libstd/path/mod.rs b/src/libstd/path/mod.rs index 8f98329a4be72..30f3f56bc1c10 100644 --- a/src/libstd/path/mod.rs +++ b/src/libstd/path/mod.rs @@ -69,7 +69,7 @@ use iter::IteratorExt; use option::Option; use option::Option::{None, Some}; use str; -use str::{CowString, MaybeOwned, Str, StrPrelude}; +use str::{CowString, MaybeOwned, Str, StrExt}; use string::String; use slice::{AsSlice, CloneSliceExt}; use slice::{PartialEqSliceExt, SliceExt}; @@ -197,7 +197,7 @@ pub trait GenericPath: Clone + GenericPathUnsafe { /// ``` #[inline] fn as_str<'a>(&'a self) -> Option<&'a str> { - str::from_utf8(self.as_vec()) + str::from_utf8(self.as_vec()).ok() } /// Returns the path as a byte vector @@ -293,7 +293,7 @@ pub trait GenericPath: Clone + GenericPathUnsafe { /// ``` #[inline] fn dirname_str<'a>(&'a self) -> Option<&'a str> { - str::from_utf8(self.dirname()) + str::from_utf8(self.dirname()).ok() } /// Returns the file component of `self`, as a byte vector. @@ -327,7 +327,7 @@ pub trait GenericPath: Clone + GenericPathUnsafe { /// ``` #[inline] fn filename_str<'a>(&'a self) -> Option<&'a str> { - self.filename().and_then(str::from_utf8) + self.filename().and_then(|s| str::from_utf8(s).ok()) } /// Returns the stem of the filename of `self`, as a byte vector. @@ -373,7 +373,7 @@ pub trait GenericPath: Clone + GenericPathUnsafe { /// ``` #[inline] fn filestem_str<'a>(&'a self) -> Option<&'a str> { - self.filestem().and_then(str::from_utf8) + self.filestem().and_then(|s| str::from_utf8(s).ok()) } /// Returns the extension of the filename of `self`, as an optional byte vector. @@ -420,7 +420,7 @@ pub trait GenericPath: Clone + GenericPathUnsafe { /// ``` #[inline] fn extension_str<'a>(&'a self) -> Option<&'a str> { - self.extension().and_then(str::from_utf8) + self.extension().and_then(|s| str::from_utf8(s).ok()) } /// Replaces the filename portion of the path with the given byte vector or string. @@ -793,7 +793,7 @@ pub trait BytesContainer for Sized? { /// Returns the receiver interpreted as a utf-8 string, if possible #[inline] fn container_as_str<'a>(&'a self) -> Option<&'a str> { - str::from_utf8(self.container_as_bytes()) + str::from_utf8(self.container_as_bytes()).ok() } /// Returns whether .container_as_str() is guaranteed to not fail // FIXME (#8888): Remove unused arg once :: works @@ -870,7 +870,7 @@ impl BytesContainer for String { } #[inline] fn container_as_str(&self) -> Option<&str> { - Some(self.as_slice()) + Some(self[]) } #[inline] fn is_str(_: Option<&String>) -> bool { true } @@ -886,7 +886,7 @@ impl BytesContainer for [u8] { impl BytesContainer for Vec { #[inline] fn container_as_bytes(&self) -> &[u8] { - self.as_slice() + self[] } } @@ -897,6 +897,7 @@ impl BytesContainer for CString { } } +#[allow(deprecated)] impl<'a> BytesContainer for str::MaybeOwned<'a> { #[inline] fn container_as_bytes<'b>(&'b self) -> &'b [u8] { @@ -931,8 +932,6 @@ fn contains_nul(v: &T) -> bool { #[cfg(test)] mod tests { use prelude::*; - use super::{GenericPath, PosixPath, WindowsPath}; - use c_str::ToCStr; #[test] fn test_cstring() { diff --git a/src/libstd/path/posix.rs b/src/libstd/path/posix.rs index 62f64159c047a..f0a00b421c3a3 100644 --- a/src/libstd/path/posix.rs +++ b/src/libstd/path/posix.rs @@ -390,6 +390,7 @@ impl Path { let v = if self.repr[0] == SEP_BYTE { self.repr[1..] } else { self.repr.as_slice() }; + let is_sep_byte: fn(&u8) -> bool = is_sep_byte; // coerce to fn ptr let mut ret = v.split(is_sep_byte); if v.is_empty() { // consume the empty "" component @@ -401,7 +402,11 @@ impl Path { /// Returns an iterator that yields each component of the path as Option<&str>. /// See components() for details. pub fn str_components<'a>(&'a self) -> StrComponents<'a> { - self.components().map(str::from_utf8) + fn from_utf8(s: &[u8]) -> Option<&str> { + str::from_utf8(s).ok() + } + let f: fn(&[u8]) -> Option<&str> = from_utf8; // coerce to fn ptr + self.components().map(f) } } @@ -445,7 +450,6 @@ mod tests { use prelude::*; use super::*; use str; - use str::StrPrelude; macro_rules! t { (s: $path:expr, $exp:expr) => ( diff --git a/src/libstd/path/windows.rs b/src/libstd/path/windows.rs index b498b3e8ad083..7d10188c437e2 100644 --- a/src/libstd/path/windows.rs +++ b/src/libstd/path/windows.rs @@ -12,7 +12,7 @@ //! Windows file path handling -pub use self::PathPrefix::*; +use self::PathPrefix::*; use ascii::AsciiCast; use c_str::{CString, ToCStr}; @@ -25,9 +25,9 @@ use iter::{Iterator, IteratorExt, Map}; use mem; use option::Option; use option::Option::{Some, None}; -use slice::{AsSlice, SliceExt}; -use str::{CharSplits, FromStr, Str, StrAllocating, StrVector, StrPrelude}; -use string::String; +use slice::SliceExt; +use str::{CharSplits, FromStr, StrVector, StrExt}; +use string::{String, ToString}; use unicode::char::UnicodeChar; use vec::Vec; @@ -187,30 +187,30 @@ impl GenericPathUnsafe for Path { s.push_str(".."); s.push(SEP); s.push_str(filename); - self.update_normalized(s); + self.update_normalized(s[]); } None => { self.update_normalized(filename); } - Some((_,idxa,end)) if self.repr.slice(idxa,end) == ".." => { + Some((_,idxa,end)) if self.repr[idxa..end] == ".." => { let mut s = String::with_capacity(end + 1 + filename.len()); - s.push_str(self.repr.slice_to(end)); + s.push_str(self.repr[0..end]); s.push(SEP); s.push_str(filename); - self.update_normalized(s); + self.update_normalized(s[]); } Some((idxb,idxa,_)) if self.prefix == Some(DiskPrefix) && idxa == self.prefix_len() => { let mut s = String::with_capacity(idxb + filename.len()); - s.push_str(self.repr.slice_to(idxb)); + s.push_str(self.repr[0..idxb]); s.push_str(filename); - self.update_normalized(s); + self.update_normalized(s[]); } Some((idxb,_,_)) => { let mut s = String::with_capacity(idxb + 1 + filename.len()); - s.push_str(self.repr.slice_to(idxb)); + s.push_str(self.repr[0..idxb]); s.push(SEP); s.push_str(filename); - self.update_normalized(s); + self.update_normalized(s[]); } } } @@ -229,12 +229,12 @@ impl GenericPathUnsafe for Path { let path = path.container_as_str().unwrap(); fn is_vol_abs(path: &str, prefix: Option) -> bool { // assume prefix is Some(DiskPrefix) - let rest = path.slice_from(prefix_len(prefix)); + let rest = path[prefix_len(prefix)..]; !rest.is_empty() && rest.as_bytes()[0].is_ascii() && is_sep(rest.as_bytes()[0] as char) } fn shares_volume(me: &Path, path: &str) -> bool { // path is assumed to have a prefix of Some(DiskPrefix) - let repr = me.repr.as_slice(); + let repr = me.repr[]; match me.prefix { Some(DiskPrefix) => { repr.as_bytes()[0] == path.as_bytes()[0].to_ascii().to_uppercase().as_byte() @@ -266,7 +266,7 @@ impl GenericPathUnsafe for Path { else { None }; let pathlen = path_.as_ref().map_or(path.len(), |p| p.len()); let mut s = String::with_capacity(me.repr.len() + 1 + pathlen); - s.push_str(me.repr.as_slice()); + s.push_str(me.repr[]); let plen = me.prefix_len(); // if me is "C:" we don't want to add a path separator match me.prefix { @@ -278,9 +278,9 @@ impl GenericPathUnsafe for Path { } match path_ { None => s.push_str(path), - Some(p) => s.push_str(p.as_slice()) + Some(p) => s.push_str(p[]), }; - me.update_normalized(s) + me.update_normalized(s[]) } if !path.is_empty() { @@ -288,7 +288,7 @@ impl GenericPathUnsafe for Path { match prefix { Some(DiskPrefix) if !is_vol_abs(path, prefix) && shares_volume(self, path) => { // cwd-relative path, self is on the same volume - append_path(self, path.slice_from(prefix_len(prefix))); + append_path(self, path[prefix_len(prefix)..]); } Some(_) => { // absolute path, or cwd-relative and self is not same volume @@ -334,7 +334,7 @@ impl GenericPath for Path { /// Always returns a `Some` value. #[inline] fn as_str<'a>(&'a self) -> Option<&'a str> { - Some(self.repr.as_slice()) + Some(self.repr[]) } #[inline] @@ -356,21 +356,17 @@ impl GenericPath for Path { /// Always returns a `Some` value. fn dirname_str<'a>(&'a self) -> Option<&'a str> { Some(match self.sepidx_or_prefix_len() { - None if ".." == self.repr => self.repr.as_slice(), + None if ".." == self.repr => self.repr[], None => ".", - Some((_,idxa,end)) if self.repr.slice(idxa, end) == ".." => { - self.repr.as_slice() - } - Some((idxb,_,end)) if self.repr.slice(idxb, end) == "\\" => { - self.repr.as_slice() - } - Some((0,idxa,_)) => self.repr.slice_to(idxa), + Some((_,idxa,end)) if self.repr[idxa..end] == ".." => self.repr[], + Some((idxb,_,end)) if self.repr[idxb..end] == "\\" => self.repr[], + Some((0,idxa,_)) => self.repr[0..idxa], Some((idxb,idxa,_)) => { match self.prefix { Some(DiskPrefix) | Some(VerbatimDiskPrefix) if idxb == self.prefix_len() => { - self.repr.slice_to(idxa) + self.repr[0..idxa] } - _ => self.repr.slice_to(idxb) + _ => self.repr[0..idxb] } } }) @@ -384,13 +380,13 @@ impl GenericPath for Path { /// See `GenericPath::filename_str` for info. /// Always returns a `Some` value if `filename` returns a `Some` value. fn filename_str<'a>(&'a self) -> Option<&'a str> { - let repr = self.repr.as_slice(); + let repr = self.repr[]; match self.sepidx_or_prefix_len() { None if "." == repr || ".." == repr => None, None => Some(repr), - Some((_,idxa,end)) if repr.slice(idxa, end) == ".." => None, + Some((_,idxa,end)) if repr[idxa..end] == ".." => None, Some((_,idxa,end)) if idxa == end => None, - Some((_,idxa,end)) => Some(repr.slice(idxa, end)) + Some((_,idxa,end)) => Some(repr[idxa..end]) } } @@ -422,7 +418,7 @@ impl GenericPath for Path { true } Some((idxb,idxa,end)) if idxb == idxa && idxb == end => false, - Some((idxb,_,end)) if self.repr.slice(idxb, end) == "\\" => false, + Some((idxb,_,end)) if self.repr[idxb..end] == "\\" => false, Some((idxb,idxa,_)) => { let trunc = match self.prefix { Some(DiskPrefix) | Some(VerbatimDiskPrefix) | None => { @@ -442,15 +438,15 @@ impl GenericPath for Path { if self.prefix.is_some() { Some(Path::new(match self.prefix { Some(DiskPrefix) if self.is_absolute() => { - self.repr.slice_to(self.prefix_len()+1) + self.repr[0..self.prefix_len()+1] } Some(VerbatimDiskPrefix) => { - self.repr.slice_to(self.prefix_len()+1) + self.repr[0..self.prefix_len()+1] } - _ => self.repr.slice_to(self.prefix_len()) + _ => self.repr[0..self.prefix_len()] })) } else if is_vol_relative(self) { - Some(Path::new(self.repr.slice_to(1))) + Some(Path::new(self.repr[0..1])) } else { None } @@ -469,7 +465,7 @@ impl GenericPath for Path { fn is_absolute(&self) -> bool { match self.prefix { Some(DiskPrefix) => { - let rest = self.repr.slice_from(self.prefix_len()); + let rest = self.repr[self.prefix_len()..]; rest.len() > 0 && rest.as_bytes()[0] == SEP_BYTE } Some(_) => true, @@ -644,18 +640,19 @@ impl Path { /// Does not distinguish between absolute and cwd-relative paths, e.g. /// C:\foo and C:foo. pub fn str_components<'a>(&'a self) -> StrComponents<'a> { - let repr = self.repr.as_slice(); + let repr = self.repr[]; let s = match self.prefix { Some(_) => { let plen = self.prefix_len(); if repr.len() > plen && repr.as_bytes()[plen] == SEP_BYTE { - repr.slice_from(plen+1) - } else { repr.slice_from(plen) } + repr[plen+1..] + } else { repr[plen..] } } - None if repr.as_bytes()[0] == SEP_BYTE => repr.slice_from(1), + None if repr.as_bytes()[0] == SEP_BYTE => repr[1..], None => repr }; - let ret = s.split_terminator(SEP).map(Some); + let some: fn(&'a str) -> Option<&'a str> = Some; // coerce to fn ptr + let ret = s.split_terminator(SEP).map(some); ret } @@ -666,12 +663,13 @@ impl Path { #![inline] x.unwrap().as_bytes() } + let convert: for<'b> fn(Option<&'b str>) -> &'b [u8] = convert; // coerce to fn ptr self.str_components().map(convert) } fn equiv_prefix(&self, other: &Path) -> bool { - let s_repr = self.repr.as_slice(); - let o_repr = other.repr.as_slice(); + let s_repr = self.repr[]; + let o_repr = other.repr[]; match (self.prefix, other.prefix) { (Some(DiskPrefix), Some(VerbatimDiskPrefix)) => { self.is_absolute() && @@ -688,28 +686,28 @@ impl Path { o_repr.as_bytes()[4].to_ascii().to_lowercase() } (Some(UNCPrefix(_,_)), Some(VerbatimUNCPrefix(_,_))) => { - s_repr.slice(2, self.prefix_len()) == o_repr.slice(8, other.prefix_len()) + s_repr[2..self.prefix_len()] == o_repr[8..other.prefix_len()] } (Some(VerbatimUNCPrefix(_,_)), Some(UNCPrefix(_,_))) => { - s_repr.slice(8, self.prefix_len()) == o_repr.slice(2, other.prefix_len()) + s_repr[8..self.prefix_len()] == o_repr[2..other.prefix_len()] } (None, None) => true, (a, b) if a == b => { - s_repr.slice_to(self.prefix_len()) == o_repr.slice_to(other.prefix_len()) + s_repr[0..self.prefix_len()] == o_repr[0..other.prefix_len()] } _ => false } } - fn normalize_(s: S) -> (Option, String) { + fn normalize_(s: &str) -> (Option, String) { // make borrowck happy let (prefix, val) = { - let prefix = parse_prefix(s.as_slice()); - let path = Path::normalize__(s.as_slice(), prefix); + let prefix = parse_prefix(s); + let path = Path::normalize__(s, prefix); (prefix, path) }; (prefix, match val { - None => s.into_string(), + None => s.to_string(), Some(val) => val }) } @@ -749,7 +747,7 @@ impl Path { match prefix.unwrap() { DiskPrefix => { let len = prefix_len(prefix) + is_abs as uint; - let mut s = String::from_str(s.slice_to(len)); + let mut s = String::from_str(s[0..len]); unsafe { let v = s.as_mut_vec(); v[0] = (*v)[0].to_ascii().to_uppercase().as_byte(); @@ -764,7 +762,7 @@ impl Path { } VerbatimDiskPrefix => { let len = prefix_len(prefix) + is_abs as uint; - let mut s = String::from_str(s.slice_to(len)); + let mut s = String::from_str(s[0..len]); unsafe { let v = s.as_mut_vec(); v[4] = (*v)[4].to_ascii().to_uppercase().as_byte(); @@ -774,14 +772,14 @@ impl Path { _ => { let plen = prefix_len(prefix); if s.len() > plen { - Some(String::from_str(s.slice_to(plen))) + Some(String::from_str(s[0..plen])) } else { None } } } } else if is_abs && comps.is_empty() { Some(String::from_char(1, SEP)) } else { - let prefix_ = s.slice_to(prefix_len(prefix)); + let prefix_ = s[0..prefix_len(prefix)]; let n = prefix_.len() + if is_abs { comps.len() } else { comps.len() - 1} + comps.iter().map(|v| v.len()).sum(); @@ -793,16 +791,16 @@ impl Path { s.push(':'); } Some(VerbatimDiskPrefix) => { - s.push_str(prefix_.slice_to(4)); + s.push_str(prefix_[0..4]); s.push(prefix_.as_bytes()[4].to_ascii() .to_uppercase().as_char()); - s.push_str(prefix_.slice_from(5)); + s.push_str(prefix_[5..]); } Some(UNCPrefix(a,b)) => { s.push_str("\\\\"); - s.push_str(prefix_.slice(2, a+2)); + s.push_str(prefix_[2..a+2]); s.push(SEP); - s.push_str(prefix_.slice(3+a, 3+a+b)); + s.push_str(prefix_[3+a..3+a+b]); } Some(_) => s.push_str(prefix_), None => () @@ -827,10 +825,14 @@ impl Path { fn update_sepidx(&mut self) { let s = if self.has_nonsemantic_trailing_slash() { - self.repr.slice_to(self.repr.len()-1) - } else { self.repr.as_slice() }; - let idx = s.rfind(if !prefix_is_verbatim(self.prefix) { is_sep } - else { is_sep_verbatim }); + self.repr[0..self.repr.len()-1] + } else { self.repr[] }; + let sep_test: fn(char) -> bool = if !prefix_is_verbatim(self.prefix) { + is_sep + } else { + is_sep_verbatim + }; + let idx = s.rfind(sep_test); let prefixlen = self.prefix_len(); self.sepidx = idx.and_then(|x| if x < prefixlen { None } else { Some(x) }); } @@ -860,8 +862,8 @@ impl Path { self.repr.as_bytes()[self.repr.len()-1] == SEP_BYTE } - fn update_normalized(&mut self, s: S) { - let (prefix, path) = Path::normalize_(s.as_slice()); + fn update_normalized(&mut self, s: &str) { + let (prefix, path) = Path::normalize_(s); self.repr = path; self.prefix = prefix; self.update_sepidx(); @@ -903,17 +905,17 @@ pub fn is_verbatim(path: &Path) -> bool { /// non-verbatim, the non-verbatim version is returned. /// Otherwise, None is returned. pub fn make_non_verbatim(path: &Path) -> Option { - let repr = path.repr.as_slice(); + let repr = path.repr[]; let new_path = match path.prefix { Some(VerbatimPrefix(_)) | Some(DeviceNSPrefix(_)) => return None, Some(UNCPrefix(_,_)) | Some(DiskPrefix) | None => return Some(path.clone()), Some(VerbatimDiskPrefix) => { // \\?\D:\ - Path::new(repr.slice_from(4)) + Path::new(repr[4..]) } Some(VerbatimUNCPrefix(_,_)) => { // \\?\UNC\server\share - Path::new(format!(r"\{}", repr.slice_from(7))) + Path::new(format!(r"\{}", repr[7..])) } }; if new_path.prefix.is_none() { @@ -922,8 +924,8 @@ pub fn make_non_verbatim(path: &Path) -> Option { return None; } // now ensure normalization didn't change anything - if repr.slice_from(path.prefix_len()) == - new_path.repr.slice_from(new_path.prefix_len()) { + if repr[path.prefix_len()..] == + new_path.repr[new_path.prefix_len()..] { Some(new_path) } else { None @@ -988,13 +990,13 @@ pub enum PathPrefix { fn parse_prefix<'a>(mut path: &'a str) -> Option { if path.starts_with("\\\\") { // \\ - path = path.slice_from(2); + path = path[2..]; if path.starts_with("?\\") { // \\?\ - path = path.slice_from(2); + path = path[2..]; if path.starts_with("UNC\\") { // \\?\UNC\server\share - path = path.slice_from(4); + path = path[4..]; let (idx_a, idx_b) = match parse_two_comps(path, is_sep_verbatim) { Some(x) => x, None => (path.len(), 0) @@ -1015,7 +1017,7 @@ fn parse_prefix<'a>(mut path: &'a str) -> Option { } } else if path.starts_with(".\\") { // \\.\path - path = path.slice_from(2); + path = path[2..]; let idx = path.find('\\').unwrap_or(path.len()); return Some(DeviceNSPrefix(idx)); } @@ -1040,7 +1042,7 @@ fn parse_prefix<'a>(mut path: &'a str) -> Option { None => return None, Some(x) => x }; - path = path.slice_from(idx_a+1); + path = path[idx_a+1..]; let idx_b = path.find(f).unwrap_or(path.len()); Some((idx_a, idx_b)) } @@ -1048,10 +1050,14 @@ fn parse_prefix<'a>(mut path: &'a str) -> Option { // None result means the string didn't need normalizing fn normalize_helper<'a>(s: &'a str, prefix: Option) -> (bool, Option>) { - let f = if !prefix_is_verbatim(prefix) { is_sep } else { is_sep_verbatim }; + let f: fn(char) -> bool = if !prefix_is_verbatim(prefix) { + is_sep + } else { + is_sep_verbatim + }; let is_abs = s.len() > prefix_len(prefix) && f(s.char_at(prefix_len(prefix))); - let s_ = s.slice_from(prefix_len(prefix)); - let s_ = if is_abs { s_.slice_from(1) } else { s_ }; + let s_ = s[prefix_len(prefix)..]; + let s_ = if is_abs { s_[1..] } else { s_ }; if is_abs && s_.is_empty() { return (is_abs, match prefix { @@ -1117,6 +1123,7 @@ fn prefix_len(p: Option) -> uint { mod tests { use prelude::*; use super::*; + use super::PathPrefix::*; use super::parse_prefix; macro_rules! t { diff --git a/src/libstd/prelude.rs b/src/libstd/prelude.rs index 8b6575b6bc1cc..49b888d17f47f 100644 --- a/src/libstd/prelude.rs +++ b/src/libstd/prelude.rs @@ -79,11 +79,11 @@ #[doc(no_inline)] pub use result::Result; #[doc(no_inline)] pub use result::Result::{Ok, Err}; #[doc(no_inline)] pub use io::{Buffer, Writer, Reader, Seek, BufferPrelude}; -#[doc(no_inline)] pub use str::{Str, StrVector, StrPrelude}; -#[doc(no_inline)] pub use str::{StrAllocating, UnicodeStrPrelude}; -#[doc(no_inline)] pub use tuple::{Tuple1, Tuple2, Tuple3, Tuple4}; -#[doc(no_inline)] pub use tuple::{Tuple5, Tuple6, Tuple7, Tuple8}; -#[doc(no_inline)] pub use tuple::{Tuple9, Tuple10, Tuple11, Tuple12}; +#[doc(no_inline)] pub use core::prelude::{Tuple1, Tuple2, Tuple3, Tuple4}; +#[doc(no_inline)] pub use core::prelude::{Tuple5, Tuple6, Tuple7, Tuple8}; +#[doc(no_inline)] pub use core::prelude::{Tuple9, Tuple10, Tuple11, Tuple12}; +#[doc(no_inline)] pub use str::{Str, StrVector}; +#[doc(no_inline)] pub use str::StrExt; #[doc(no_inline)] pub use slice::AsSlice; #[doc(no_inline)] pub use slice::{VectorVector, PartialEqSliceExt}; #[doc(no_inline)] pub use slice::{CloneSliceExt, OrdSliceExt, SliceExt}; diff --git a/src/libstd/rand/mod.rs b/src/libstd/rand/mod.rs index 0035e5747aa69..c590c0f575ee6 100644 --- a/src/libstd/rand/mod.rs +++ b/src/libstd/rand/mod.rs @@ -45,7 +45,7 @@ //! so the "quality" of `/dev/random` is not better than `/dev/urandom` in most cases. //! However, this means that `/dev/urandom` can yield somewhat predictable randomness //! if the entropy pool is very small, such as immediately after first booting. -//! Linux 3,17 added `getrandom(2)` system call which solves the issue: it blocks if entropy +//! Linux 3.17 added the `getrandom(2)` system call which solves the issue: it blocks if entropy //! pool is not initialized yet, but it does not block once initialized. //! `OsRng` tries to use `getrandom(2)` if available, and use `/dev/urandom` fallback if not. //! If an application does not have `getrandom` and likely to be run soon after first booting, @@ -126,7 +126,7 @@ //! > Is it to your advantage to switch your choice? //! //! The rather unintuitive answer is that you will have a 2/3 chance of winning if -//! you switch and a 1/3 chance of winning of you don't, so it's better to switch. +//! you switch and a 1/3 chance of winning if you don't, so it's better to switch. //! //! This program will simulate the game show and with large enough simulation steps //! it will indeed confirm that it is better to switch. diff --git a/src/libstd/rand/os.rs b/src/libstd/rand/os.rs index dbe5f9193298a..68c99b1275855 100644 --- a/src/libstd/rand/os.rs +++ b/src/libstd/rand/os.rs @@ -170,7 +170,6 @@ mod imp { extern crate libc; use io::{IoResult}; - use kinds::marker; use mem; use os; use rand::Rng; diff --git a/src/libstd/rt/args.rs b/src/libstd/rt/args.rs index 3a4705f58b49c..b1f268597c7ad 100644 --- a/src/libstd/rt/args.rs +++ b/src/libstd/rt/args.rs @@ -44,12 +44,10 @@ pub fn clone() -> Option>> { imp::clone() } target_os = "freebsd", target_os = "dragonfly"))] mod imp { - use core::prelude::*; + use prelude::*; - use boxed::Box; - use vec::Vec; - use string::String; use mem; + use slice; use sync::{StaticMutex, MUTEX_INIT}; @@ -98,7 +96,12 @@ mod imp { unsafe fn load_argc_and_argv(argc: int, argv: *const *const u8) -> Vec> { Vec::from_fn(argc as uint, |i| { - String::from_raw_buf(*argv.offset(i as int)).into_bytes() + let arg = *argv.offset(i as int); + let mut len = 0u; + while *arg.offset(len as int) != 0 { + len += 1u; + } + slice::from_raw_buf(&arg, len).to_vec() }) } diff --git a/src/libstd/rt/backtrace.rs b/src/libstd/rt/backtrace.rs index 4a692bccf9eea..775e9bb526f7c 100644 --- a/src/libstd/rt/backtrace.rs +++ b/src/libstd/rt/backtrace.rs @@ -12,7 +12,8 @@ #![allow(non_camel_case_types)] -use option::Option::{Some, None}; +use prelude::*; + use os; use sync::atomic; diff --git a/src/libstd/rt/mod.rs b/src/libstd/rt/mod.rs index 8d9c1268e7e33..d64336569c6e9 100644 --- a/src/libstd/rt/mod.rs +++ b/src/libstd/rt/mod.rs @@ -91,7 +91,7 @@ fn lang_start(main: *const u8, argc: int, argv: *const *const u8) -> int { // created. Note that this isn't necessary in general for new threads, // but we just do this to name the main thread and to give it correct // info about the stack bounds. - let thread: Thread = NewThread::new(Some("
".into_string())); + let thread: Thread = NewThread::new(Some("
".to_string())); thread_info::set((my_stack_bottom, my_stack_top), sys::thread::guard::main(), thread); diff --git a/src/libstd/rt/unwind.rs b/src/libstd/rt/unwind.rs index f572141642cbf..eb15a7ba378e0 100644 --- a/src/libstd/rt/unwind.rs +++ b/src/libstd/rt/unwind.rs @@ -512,7 +512,7 @@ pub fn begin_unwind_fmt(msg: &fmt::Arguments, file_line: &(&'static str, uint)) let mut v = Vec::new(); let _ = write!(&mut VecWriter { v: &mut v }, "{}", msg); - let msg = box String::from_utf8_lossy(v.as_slice()).into_string(); + let msg = box String::from_utf8_lossy(v.as_slice()).into_owned(); begin_unwind_inner(msg, file_line) } diff --git a/src/libstd/rt/util.rs b/src/libstd/rt/util.rs index fa527a70f8395..d8cd8455deb5c 100644 --- a/src/libstd/rt/util.rs +++ b/src/libstd/rt/util.rs @@ -10,17 +10,16 @@ // // ignore-lexer-test FIXME #15677 -use core::prelude::*; +use prelude::*; -use core::cmp; -use core::fmt; -use core::intrinsics; -use core::slice; -use core::str; - -use libc::{mod, uintptr_t}; +use cmp; +use fmt; +use intrinsics; +use libc::uintptr_t; +use libc; use os; -use str::{FromStr, from_str, Str}; +use slice; +use str; use sync::atomic; /// Dynamically inquire about whether we're running under V. @@ -53,7 +52,7 @@ pub fn min_stack() -> uint { 0 => {} n => return n - 1, } - let amt = os::getenv("RUST_MIN_STACK").and_then(|s| from_str(s.as_slice())); + let amt = os::getenv("RUST_MIN_STACK").and_then(|s| s.parse()); let amt = amt.unwrap_or(2 * 1024 * 1024); // 0 is our sentinel value, so ensure that we'll never see 0 after // initialization has run @@ -66,7 +65,7 @@ pub fn min_stack() -> uint { pub fn default_sched_threads() -> uint { match os::getenv("RUST_THREADS") { Some(nstr) => { - let opt_n: Option = FromStr::from_str(nstr.as_slice()); + let opt_n: Option = nstr.parse(); match opt_n { Some(n) if n > 0 => n, _ => panic!("`RUST_THREADS` is `{}`, should be a positive integer", nstr) @@ -114,9 +113,8 @@ impl fmt::FormatWriter for Stdio { } pub fn dumb_print(args: &fmt::Arguments) { - use fmt::FormatWriter; let mut w = Stderr; - let _ = w.write_fmt(args); + let _ = write!(&mut w, "{}", args); } pub fn abort(args: &fmt::Arguments) -> ! { diff --git a/src/libstd/sync/future.rs b/src/libstd/sync/future.rs index 5e9d234c6423d..51899a87a325d 100644 --- a/src/libstd/sync/future.rs +++ b/src/libstd/sync/future.rs @@ -153,7 +153,6 @@ mod test { use prelude::*; use sync::Future; use task; - use comm::channel; #[test] fn test_from_value() { diff --git a/src/libstd/sync/task_pool.rs b/src/libstd/sync/task_pool.rs index 793825f1b08f5..366e4b7d35b01 100644 --- a/src/libstd/sync/task_pool.rs +++ b/src/libstd/sync/task_pool.rs @@ -131,10 +131,8 @@ fn spawn_in_pool(jobs: Arc>>) { #[cfg(test)] mod test { - use core::prelude::*; + use prelude::*; use super::*; - use comm::channel; - use iter::range; const TEST_TASKS: uint = 4u; diff --git a/src/libstd/sys/common/backtrace.rs b/src/libstd/sys/common/backtrace.rs index a39c8d6d8fed8..1d646eb06b167 100644 --- a/src/libstd/sys/common/backtrace.rs +++ b/src/libstd/sys/common/backtrace.rs @@ -8,12 +8,9 @@ // option. This file may not be copied, modified, or distributed // except according to those terms. -use io::{IoResult, Writer}; -use iter::{Iterator, IteratorExt}; -use option::Option::{Some, None}; -use result::Result::{Ok, Err}; -use str::{StrPrelude, from_str}; -use unicode::char::UnicodeChar; +use prelude::*; + +use io::IoResult; #[cfg(target_word_size = "64")] pub const HEX_WIDTH: uint = 18; #[cfg(target_word_size = "32")] pub const HEX_WIDTH: uint = 10; @@ -85,7 +82,7 @@ pub fn demangle(writer: &mut Writer, s: &str) -> IoResult<()> { while rest.char_at(0).is_numeric() { rest = rest.slice_from(1); } - let i: uint = from_str(inner.slice_to(inner.len() - rest.len())).unwrap(); + let i: uint = inner.slice_to(inner.len() - rest.len()).parse().unwrap(); inner = rest.slice_from(i); rest = rest.slice_to(i); while rest.len() > 0 { diff --git a/src/libstd/sys/common/mutex.rs b/src/libstd/sys/common/mutex.rs index 117d33db32896..1a8a92a105a77 100644 --- a/src/libstd/sys/common/mutex.rs +++ b/src/libstd/sys/common/mutex.rs @@ -8,8 +8,6 @@ // option. This file may not be copied, modified, or distributed // except according to those terms. -pub use sys::mutex::raw; - use sys::mutex as imp; /// An OS-based mutual exclusion lock. diff --git a/src/libstd/sys/common/net.rs b/src/libstd/sys/common/net.rs index 73da200e16238..382f6875b281d 100644 --- a/src/libstd/sys/common/net.rs +++ b/src/libstd/sys/common/net.rs @@ -8,8 +8,8 @@ // option. This file may not be copied, modified, or distributed // except according to those terms. -pub use self::SocketStatus::*; -pub use self::InAddr::*; +use self::SocketStatus::*; +use self::InAddr::*; use alloc::arc::Arc; use libc::{mod, c_char, c_int}; diff --git a/src/libstd/sys/unix/backtrace.rs b/src/libstd/sys/unix/backtrace.rs index df9dbad2ec787..983d0e5fa1486 100644 --- a/src/libstd/sys/unix/backtrace.rs +++ b/src/libstd/sys/unix/backtrace.rs @@ -105,7 +105,7 @@ use sys_common::backtrace::*; #[cfg(all(target_os = "ios", target_arch = "arm"))] #[inline(never)] pub fn write(w: &mut Writer) -> IoResult<()> { - use iter::{Iterator, range}; + use iter::{IteratorExt, range}; use result; use slice::SliceExt; @@ -117,7 +117,7 @@ pub fn write(w: &mut Writer) -> IoResult<()> { // while it doesn't requires lock for work as everything is // local, it still displays much nicer backtraces when a // couple of tasks panic simultaneously - static LOCK: StaticNativeMutex = NATIVE_MUTEX_INIT; + static LOCK: StaticMutex = MUTEX_INIT; let _g = unsafe { LOCK.lock() }; try!(writeln!(w, "stack backtrace:")); diff --git a/src/libstd/sys/unix/fs.rs b/src/libstd/sys/unix/fs.rs index 66f7d85f20dfb..98d860f964649 100644 --- a/src/libstd/sys/unix/fs.rs +++ b/src/libstd/sys/unix/fs.rs @@ -18,14 +18,11 @@ use io; use prelude::*; use io::{FilePermission, Write, UnstableFileStat, Open, FileAccess, FileMode}; -use io::{IoResult, FileStat, SeekStyle, Reader}; +use io::{IoResult, FileStat, SeekStyle}; use io::{Read, Truncate, SeekCur, SeekSet, ReadWrite, SeekEnd, Append}; -use result::Result::{Ok, Err}; use sys::retry; use sys_common::{keep_going, eof, mkerr_libc}; -pub use path::PosixPath as Path; - pub type fd_t = libc::c_int; pub struct FileDesc { diff --git a/src/libstd/sys/unix/os.rs b/src/libstd/sys/unix/os.rs index 0ed079df55b35..316d97064eeca 100644 --- a/src/libstd/sys/unix/os.rs +++ b/src/libstd/sys/unix/os.rs @@ -16,8 +16,8 @@ use error::{FromError, Error}; use fmt; use io::{IoError, IoResult}; use libc::{mod, c_int, c_char, c_void}; -use path::{Path, GenericPath, BytesContainer}; -use ptr::{mod, RawPtr}; +use path::BytesContainer; +use ptr; use sync::atomic::{AtomicInt, INIT_ATOMIC_INT, SeqCst}; use sys::fs::FileDesc; use os; @@ -189,7 +189,7 @@ pub fn load_self() -> Option> { if sz == 0 { return None; } let mut v: Vec = Vec::with_capacity(sz as uint); let err = sysctl(mib.as_mut_ptr(), mib.len() as ::libc::c_uint, - v.as_mut_ptr() as *mut c_void, &mut sz, + v.as_mut_ptr() as *mut libc::c_void, &mut sz, ptr::null_mut(), 0u as libc::size_t); if err != 0 { return None; } if sz == 0 { return None; } diff --git a/src/libstd/sys/unix/pipe.rs b/src/libstd/sys/unix/pipe.rs index 26fd410a7a9b6..348b7cfad334e 100644 --- a/src/libstd/sys/unix/pipe.rs +++ b/src/libstd/sys/unix/pipe.rs @@ -19,6 +19,7 @@ use prelude::*; use sys::{mod, timer, retry, c, set_nonblocking, wouldblock}; use sys::fs::{fd_t, FileDesc}; use sys_common::net::*; +use sys_common::net::SocketStatus::*; use sys_common::{eof, mkerr_libc}; fn unix_socket(ty: libc::c_int) -> IoResult { diff --git a/src/libstd/sys/unix/tcp.rs b/src/libstd/sys/unix/tcp.rs index 00643ac0a79f2..5c99ad1e0ce22 100644 --- a/src/libstd/sys/unix/tcp.rs +++ b/src/libstd/sys/unix/tcp.rs @@ -20,7 +20,8 @@ use sys::fs::FileDesc; use sys::{set_nonblocking, wouldblock}; use sys; use sys_common; -use sys_common::net::*; +use sys_common::net; +use sys_common::net::SocketStatus::Readable; pub use sys_common::net::TcpStream; @@ -34,17 +35,19 @@ pub struct TcpListener { impl TcpListener { pub fn bind(addr: ip::SocketAddr) -> IoResult { - let fd = try!(socket(addr, libc::SOCK_STREAM)); + let fd = try!(net::socket(addr, libc::SOCK_STREAM)); let ret = TcpListener { inner: FileDesc::new(fd, true) }; let mut storage = unsafe { mem::zeroed() }; - let len = addr_to_sockaddr(addr, &mut storage); + let len = net::addr_to_sockaddr(addr, &mut storage); let addrp = &storage as *const _ as *const libc::sockaddr; // On platforms with Berkeley-derived sockets, this allows // to quickly rebind a socket, without needing to wait for // the OS to clean up the previous one. - try!(setsockopt(fd, libc::SOL_SOCKET, libc::SO_REUSEADDR, 1 as libc::c_int)); + try!(net::setsockopt(fd, libc::SOL_SOCKET, + libc::SO_REUSEADDR, + 1 as libc::c_int)); match unsafe { libc::bind(fd, addrp, len) } { @@ -77,7 +80,7 @@ impl TcpListener { } pub fn socket_name(&mut self) -> IoResult { - sockname(self.fd(), libc::getsockname) + net::sockname(self.fd(), libc::getsockname) } } @@ -121,7 +124,7 @@ impl TcpAcceptor { -1 => return Err(last_net_error()), fd => return Ok(TcpStream::new(fd as sock_t)), } - try!(await(&[self.fd(), self.inner.reader.fd()], + try!(net::await(&[self.fd(), self.inner.reader.fd()], deadline, Readable)); } @@ -129,7 +132,7 @@ impl TcpAcceptor { } pub fn socket_name(&mut self) -> IoResult { - sockname(self.fd(), libc::getsockname) + net::sockname(self.fd(), libc::getsockname) } pub fn set_timeout(&mut self, timeout: Option) { diff --git a/src/libstd/sys/unix/timer.rs b/src/libstd/sys/unix/timer.rs index 79a6a871f8dd4..fe393b81e3d9a 100644 --- a/src/libstd/sys/unix/timer.rs +++ b/src/libstd/sys/unix/timer.rs @@ -46,7 +46,7 @@ //! //! Note that all time units in this file are in *milliseconds*. -pub use self::Req::*; +use self::Req::*; use libc; use mem; diff --git a/src/libstd/sys/windows/backtrace.rs b/src/libstd/sys/windows/backtrace.rs index f2f543dd9697f..42c8f7705e1fc 100644 --- a/src/libstd/sys/windows/backtrace.rs +++ b/src/libstd/sys/windows/backtrace.rs @@ -32,7 +32,7 @@ use path::Path; use result::Result::{Ok, Err}; use sync::{StaticMutex, MUTEX_INIT}; use slice::SliceExt; -use str::StrPrelude; +use str::StrExt; use dynamic_lib::DynamicLibrary; use sys_common::backtrace::*; diff --git a/src/libstd/sys/windows/condvar.rs b/src/libstd/sys/windows/condvar.rs index 3cabf3a63194c..7f9d669c44748 100644 --- a/src/libstd/sys/windows/condvar.rs +++ b/src/libstd/sys/windows/condvar.rs @@ -10,7 +10,6 @@ use cell::UnsafeCell; use libc::{mod, DWORD}; -use libc; use os; use sys::mutex::{mod, Mutex}; use sys::sync as ffi; diff --git a/src/libstd/sys/windows/fs.rs b/src/libstd/sys/windows/fs.rs index 0fb52c758d5db..15eddd569beec 100644 --- a/src/libstd/sys/windows/fs.rs +++ b/src/libstd/sys/windows/fs.rs @@ -23,13 +23,13 @@ use io; use prelude::*; use sys; +use sys::os; use sys_common::{keep_going, eof, mkerr_libc}; use io::{FilePermission, Write, UnstableFileStat, Open, FileAccess, FileMode}; -use io::{IoResult, IoError, FileStat, SeekStyle, Seek, Writer, Reader}; +use io::{IoResult, IoError, FileStat, SeekStyle}; use io::{Read, Truncate, SeekCur, SeekSet, ReadWrite, SeekEnd, Append}; -pub use path::WindowsPath as Path; pub type fd_t = libc::c_int; pub struct FileDesc { @@ -263,7 +263,7 @@ pub fn readdir(p: &Path) -> IoResult> { let mut more_files = 1 as libc::BOOL; while more_files != 0 { { - let filename = str::truncate_utf16_at_nul(&wfd.cFileName); + let filename = os::truncate_utf16_at_nul(&wfd.cFileName); match String::from_utf16(filename) { Some(filename) => paths.push(Path::new(filename)), None => { diff --git a/src/libstd/sys/windows/os.rs b/src/libstd/sys/windows/os.rs index 2fbb9494c710b..e007b46b261b4 100644 --- a/src/libstd/sys/windows/os.rs +++ b/src/libstd/sys/windows/os.rs @@ -20,12 +20,10 @@ use io::{IoResult, IoError}; use libc::{c_int, c_char, c_void}; use libc; use os; -use path::{Path, GenericPath, BytesContainer}; -use ptr::{mod, RawPtr}; +use path::BytesContainer; +use ptr; use sync::atomic::{AtomicInt, INIT_ATOMIC_INT, SeqCst}; use sys::fs::FileDesc; -use option::Option; -use option::Option::{Some, None}; use slice; use os::TMPBUF_SZ; @@ -33,6 +31,16 @@ use libc::types::os::arch::extra::DWORD; const BUF_BYTES : uint = 2048u; +/// Return a slice of `v` ending at (and not including) the first NUL +/// (0). +pub fn truncate_utf16_at_nul<'a>(v: &'a [u16]) -> &'a [u16] { + match v.iter().position(|c| *c == 0) { + // don't include the 0 + Some(i) => v[..i], + None => v + } +} + pub fn errno() -> uint { use libc::types::os::arch::extra::DWORD; @@ -89,7 +97,7 @@ pub fn error_string(errnum: i32) -> String { return format!("OS Error {} (FormatMessageW() returned error {})", errnum, fm_err); } - let msg = String::from_utf16(::str::truncate_utf16_at_nul(&buf)); + let msg = String::from_utf16(truncate_utf16_at_nul(&buf)); match msg { Some(msg) => format!("OS Error {}: {}", errnum, msg), None => format!("OS Error {} (FormatMessageW() returned invalid UTF-16)", errnum), @@ -160,7 +168,7 @@ pub fn getcwd() -> IoResult { } } - match String::from_utf16(::str::truncate_utf16_at_nul(&buf)) { + match String::from_utf16(truncate_utf16_at_nul(&buf)) { Some(ref cwd) => Ok(Path::new(cwd)), None => Err(IoError { kind: OtherIoError, @@ -271,7 +279,7 @@ pub fn load_self() -> Option> { unsafe { fill_utf16_buf_and_decode(|buf, sz| { libc::GetModuleFileNameW(0u as libc::DWORD, buf, sz) - }).map(|s| s.into_string().into_bytes()) + }).map(|s| s.to_string().into_bytes()) } } @@ -296,3 +304,30 @@ pub fn page_size() -> uint { return info.dwPageSize as uint; } } + +#[cfg(test)] +mod tests { + use super::truncate_utf16_at_nul; + + #[test] + fn test_truncate_utf16_at_nul() { + let v = []; + let b: &[u16] = &[]; + assert_eq!(truncate_utf16_at_nul(&v), b); + + let v = [0, 2, 3]; + assert_eq!(truncate_utf16_at_nul(&v), b); + + let v = [1, 0, 3]; + let b: &[u16] = &[1]; + assert_eq!(truncate_utf16_at_nul(&v), b); + + let v = [1, 2, 0]; + let b: &[u16] = &[1, 2]; + assert_eq!(truncate_utf16_at_nul(&v), b); + + let v = [1, 2, 3]; + let b: &[u16] = &[1, 2, 3]; + assert_eq!(truncate_utf16_at_nul(&v), b); + } +} diff --git a/src/libstd/sys/windows/process.rs b/src/libstd/sys/windows/process.rs index bbfd32ee76bfa..0c2c76077dd54 100644 --- a/src/libstd/sys/windows/process.rs +++ b/src/libstd/sys/windows/process.rs @@ -29,7 +29,6 @@ use sys_common::helper_thread::Helper; use sys_common::{AsInner, mkerr_libc, timeout}; use io::fs::PathExtensions; -use string::String; pub use sys_common::ProcessConfig; @@ -123,7 +122,7 @@ impl Process { use mem; use iter::{Iterator, IteratorExt}; - use str::StrPrelude; + use str::StrExt; if cfg.gid().is_some() || cfg.uid().is_some() { return Err(IoError { diff --git a/src/libstd/sys/windows/tcp.rs b/src/libstd/sys/windows/tcp.rs index b577372d2fc59..505e6137bf904 100644 --- a/src/libstd/sys/windows/tcp.rs +++ b/src/libstd/sys/windows/tcp.rs @@ -18,8 +18,7 @@ use super::{last_error, last_net_error, retry, sock_t}; use sync::{Arc, atomic}; use sys::fs::FileDesc; use sys::{mod, c, set_nonblocking, wouldblock, timer}; -use sys_common::{mod, timeout, eof}; -use sys_common::net::*; +use sys_common::{mod, timeout, eof, net}; pub use sys_common::net::TcpStream; @@ -54,11 +53,11 @@ impl TcpListener { pub fn bind(addr: ip::SocketAddr) -> IoResult { sys::init_net(); - let sock = try!(socket(addr, libc::SOCK_STREAM)); + let sock = try!(net::socket(addr, libc::SOCK_STREAM)); let ret = TcpListener { sock: sock }; let mut storage = unsafe { mem::zeroed() }; - let len = addr_to_sockaddr(addr, &mut storage); + let len = net::addr_to_sockaddr(addr, &mut storage); let addrp = &storage as *const _ as *const libc::sockaddr; match unsafe { libc::bind(sock, addrp, len) } { @@ -95,7 +94,7 @@ impl TcpListener { } pub fn socket_name(&mut self) -> IoResult { - sockname(self.socket(), libc::getsockname) + net::sockname(self.socket(), libc::getsockname) } } @@ -195,7 +194,7 @@ impl TcpAcceptor { } pub fn socket_name(&mut self) -> IoResult { - sockname(self.socket(), libc::getsockname) + net::sockname(self.socket(), libc::getsockname) } pub fn set_timeout(&mut self, timeout: Option) { diff --git a/src/libstd/sys/windows/timer.rs b/src/libstd/sys/windows/timer.rs index e2f9e2a9201c3..7e4dd768aa975 100644 --- a/src/libstd/sys/windows/timer.rs +++ b/src/libstd/sys/windows/timer.rs @@ -20,7 +20,7 @@ //! Other than that, the implementation is pretty straightforward in terms of //! the other two implementations of timers with nothing *that* new showing up. -pub use self::Req::*; +use self::Req::*; use libc; use ptr; diff --git a/src/libstd/sys/windows/tty.rs b/src/libstd/sys/windows/tty.rs index 51679bb2003fc..f793de5bb57ef 100644 --- a/src/libstd/sys/windows/tty.rs +++ b/src/libstd/sys/windows/tty.rs @@ -111,7 +111,7 @@ impl TTY { } pub fn write(&mut self, buf: &[u8]) -> IoResult<()> { - let utf16 = match from_utf8(buf) { + let utf16 = match from_utf8(buf).ok() { Some(utf8) => { utf8.utf16_units().collect::>() } diff --git a/src/libstd/thread.rs b/src/libstd/thread.rs index 8ef53a22aeb04..89773207347d5 100644 --- a/src/libstd/thread.rs +++ b/src/libstd/thread.rs @@ -124,13 +124,17 @@ //! //! * It can be implemented highly efficiently on many platforms. -use core::prelude::*; - use any::Any; use borrow::IntoCow; use boxed::Box; use cell::UnsafeCell; +use clone::Clone; +use kinds::Send; +use ops::{Drop, FnOnce}; +use option::Option::{mod, Some, None}; +use result::Result::{Err, Ok}; use sync::{Mutex, Condvar, Arc}; +use str::Str; use string::String; use rt::{mod, unwind}; use io::{Writer, stdio}; @@ -424,13 +428,11 @@ impl Drop for JoinGuard { #[cfg(test)] mod test { + use prelude::*; use any::{Any, AnyRefExt}; use boxed::BoxAny; - use prelude::*; - use result::Result::{Ok, Err}; use result; use std::io::{ChanReader, ChanWriter}; - use string::String; use thunk::Thunk; use super::{Thread, Builder}; diff --git a/src/libstd/thread_local/mod.rs b/src/libstd/thread_local/mod.rs index 4c33d1c418d96..04718dcc6ae3b 100644 --- a/src/libstd/thread_local/mod.rs +++ b/src/libstd/thread_local/mod.rs @@ -189,11 +189,12 @@ macro_rules! __thread_local_inner { } }; - #[cfg(not(any(target_os = "macos", target_os = "linux")))] + #[cfg(all(stage0, not(any(target_os = "macos", target_os = "linux"))))] const INIT: ::std::thread_local::KeyInner<$t> = { unsafe extern fn __destroy(ptr: *mut u8) { ::std::thread_local::destroy_value::<$t>(ptr); } + ::std::thread_local::KeyInner { inner: ::std::cell::UnsafeCell { value: $init }, os: ::std::thread_local::OsStaticKey { @@ -203,6 +204,21 @@ macro_rules! __thread_local_inner { } }; + #[cfg(all(not(stage0), not(any(target_os = "macos", target_os = "linux"))))] + const INIT: ::std::thread_local::KeyInner<$t> = { + unsafe extern fn __destroy(ptr: *mut u8) { + ::std::thread_local::destroy_value::<$t>(ptr); + } + + ::std::thread_local::KeyInner { + inner: ::std::cell::UnsafeCell { value: $init }, + os: ::std::thread_local::OsStaticKey { + inner: ::std::thread_local::OS_INIT_INNER, + dtor: ::std::option::Option::Some(__destroy as unsafe extern fn(*mut u8)), + }, + } + }; + INIT }); } @@ -323,6 +339,12 @@ mod imp { // *should* be the case that this loop always terminates because we // provide the guarantee that a TLS key cannot be set after it is // flagged for destruction. + #[cfg(not(stage0))] + static DTORS: os::StaticKey = os::StaticKey { + inner: os::INIT_INNER, + dtor: Some(run_dtors as unsafe extern "C" fn(*mut u8)), + }; + #[cfg(stage0)] static DTORS: os::StaticKey = os::StaticKey { inner: os::INIT_INNER, dtor: Some(run_dtors), diff --git a/src/libstd/time/duration.rs b/src/libstd/time/duration.rs index 7cb14e8e4bc62..f7351c9580f2f 100644 --- a/src/libstd/time/duration.rs +++ b/src/libstd/time/duration.rs @@ -287,21 +287,6 @@ impl Neg for Duration { } } -// NOTE(stage0): Remove impl after a snapshot -#[cfg(stage0)] -impl Add for Duration { - fn add(&self, rhs: &Duration) -> Duration { - let mut secs = self.secs + rhs.secs; - let mut nanos = self.nanos + rhs.nanos; - if nanos >= NANOS_PER_SEC { - nanos -= NANOS_PER_SEC; - secs += 1; - } - Duration { secs: secs, nanos: nanos } - } -} - -#[cfg(not(stage0))] // NOTE(stage0): Remove cfg after a snapshot impl Add for Duration { fn add(self, rhs: Duration) -> Duration { let mut secs = self.secs + rhs.secs; @@ -314,21 +299,6 @@ impl Add for Duration { } } -// NOTE(stage0): Remove impl after a snapshot -#[cfg(stage0)] -impl Sub for Duration { - fn sub(&self, rhs: &Duration) -> Duration { - let mut secs = self.secs - rhs.secs; - let mut nanos = self.nanos - rhs.nanos; - if nanos < 0 { - nanos += NANOS_PER_SEC; - secs -= 1; - } - Duration { secs: secs, nanos: nanos } - } -} - -#[cfg(not(stage0))] // NOTE(stage0): Remove cfg after a snapshot impl Sub for Duration { fn sub(self, rhs: Duration) -> Duration { let mut secs = self.secs - rhs.secs; @@ -341,19 +311,6 @@ impl Sub for Duration { } } -// NOTE(stage0): Remove impl after a snapshot -#[cfg(stage0)] -impl Mul for Duration { - fn mul(&self, rhs: &i32) -> Duration { - // Multiply nanoseconds as i64, because it cannot overflow that way. - let total_nanos = self.nanos as i64 * *rhs as i64; - let (extra_secs, nanos) = div_mod_floor_64(total_nanos, NANOS_PER_SEC as i64); - let secs = self.secs * *rhs as i64 + extra_secs; - Duration { secs: secs, nanos: nanos as i32 } - } -} - -#[cfg(not(stage0))] // NOTE(stage0): Remove cfg after a snapshot impl Mul for Duration { fn mul(self, rhs: i32) -> Duration { // Multiply nanoseconds as i64, because it cannot overflow that way. @@ -364,27 +321,6 @@ impl Mul for Duration { } } -// NOTE(stage0): Remove impl after a snapshot -#[cfg(stage0)] -impl Div for Duration { - fn div(&self, rhs: &i32) -> Duration { - let mut secs = self.secs / *rhs as i64; - let carry = self.secs - secs * *rhs as i64; - let extra_nanos = carry * NANOS_PER_SEC as i64 / *rhs as i64; - let mut nanos = self.nanos / *rhs + extra_nanos as i32; - if nanos >= NANOS_PER_SEC { - nanos -= NANOS_PER_SEC; - secs += 1; - } - if nanos < 0 { - nanos += NANOS_PER_SEC; - secs -= 1; - } - Duration { secs: secs, nanos: nanos } - } -} - -#[cfg(not(stage0))] // NOTE(stage0): Remove cfg after a snapshot impl Div for Duration { fn div(self, rhs: i32) -> Duration { let mut secs = self.secs / rhs as i64; diff --git a/src/libstd/tuple.rs b/src/libstd/tuple.rs new file mode 100644 index 0000000000000..5cd60d6e153ea --- /dev/null +++ b/src/libstd/tuple.rs @@ -0,0 +1,66 @@ +// Copyright 2012 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +//! Operations on tuples +//! +//! To access a single element of a tuple one can use the following +//! methods: +//! +//! * `valN` - returns a value of _N_-th element +//! * `refN` - returns a reference to _N_-th element +//! * `mutN` - returns a mutable reference to _N_-th element +//! +//! Indexing starts from zero, so `val0` returns first value, `val1` +//! returns second value, and so on. In general, a tuple with _S_ +//! elements provides aforementioned methods suffixed with numbers +//! from `0` to `S-1`. Traits which contain these methods are +//! implemented for tuples with up to 12 elements. +//! +//! If every type inside a tuple implements one of the following +//! traits, then a tuple itself also implements it. +//! +//! * `Clone` +//! * `PartialEq` +//! * `Eq` +//! * `PartialOrd` +//! * `Ord` +//! * `Default` +//! +//! # Examples +//! +//! Using methods: +//! +//! ``` +//! #[allow(deprecated)] +//! # fn main() { +//! let pair = ("pi", 3.14f64); +//! assert_eq!(pair.val0(), "pi"); +//! assert_eq!(pair.val1(), 3.14f64); +//! # } +//! ``` +//! +//! Using traits implemented for tuples: +//! +//! ``` +//! use std::default::Default; +//! +//! let a = (1i, 2i); +//! let b = (3i, 4i); +//! assert!(a != b); +//! +//! let c = b.clone(); +//! assert!(b == c); +//! +//! let d : (u32, f32) = Default::default(); +//! assert_eq!(d, (0u32, 0.0f32)); +//! ``` + +#![doc(primitive = "tuple")] +#![stable] diff --git a/src/libcore/tuple/unit.rs b/src/libstd/unit.rs similarity index 91% rename from src/libcore/tuple/unit.rs rename to src/libstd/unit.rs index 7f89f0e5ae399..012b175b031f1 100644 --- a/src/libcore/tuple/unit.rs +++ b/src/libstd/unit.rs @@ -9,8 +9,7 @@ // except according to those terms. #![doc(primitive = "unit")] -#![unstable = "this module is purely for documentation and it will likely be \ - removed from the public api"] +#![stable] //! The `()` type, sometimes called "unit" or "nil". //! diff --git a/src/libsyntax/abi.rs b/src/libsyntax/abi.rs index 70bad90aea1c0..b1599cb807d01 100644 --- a/src/libsyntax/abi.rs +++ b/src/libsyntax/abi.rs @@ -26,7 +26,7 @@ pub enum Os { OsDragonfly, } -#[deriving(Copy, PartialEq, Eq, Hash, Encodable, Decodable, Clone)] +#[deriving(PartialEq, Eq, Hash, RustcEncodable, RustcDecodable, Clone, Copy)] pub enum Abi { // NB: This ordering MUST match the AbiDatas array below. // (This is ensured by the test indices_are_correct().) diff --git a/src/libsyntax/ast.rs b/src/libsyntax/ast.rs index be8f32bc4d5d8..9d4bf77d4a5d3 100644 --- a/src/libsyntax/ast.rs +++ b/src/libsyntax/ast.rs @@ -32,7 +32,6 @@ pub use self::LitIntType::*; pub use self::LocalSource::*; pub use self::Mac_::*; pub use self::MacStmtStyle::*; -pub use self::MatchSource::*; pub use self::MetaItem_::*; pub use self::Method_::*; pub use self::Mutability::*; @@ -158,7 +157,8 @@ pub const ILLEGAL_CTXT : SyntaxContext = 1; /// A name is a part of an identifier, representing a string or gensym. It's /// the result of interning. -#[deriving(Copy, Eq, Ord, PartialEq, PartialOrd, Hash, Encodable, Decodable, Clone)] +#[deriving(Eq, Ord, PartialEq, PartialOrd, Hash, + RustcEncodable, RustcDecodable, Clone, Copy)] pub struct Name(pub u32); impl Name { @@ -188,23 +188,24 @@ impl, E> Encodable for Ident { } } -impl, E> Decodable for Ident { +impl, E> Decodable for Ident { fn decode(d: &mut D) -> Result { - Ok(str_to_ident(try!(d.read_str()).as_slice())) + Ok(str_to_ident(try!(d.read_str())[])) } } /// Function name (not all functions have names) pub type FnIdent = Option; -#[deriving(Clone, Copy, PartialEq, Eq, Encodable, Decodable, Hash, Show)] +#[deriving(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, + Show, Copy)] pub struct Lifetime { pub id: NodeId, pub span: Span, pub name: Name } -#[deriving(Clone, PartialEq, Eq, Encodable, Decodable, Hash, Show)] +#[deriving(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Show)] pub struct LifetimeDef { pub lifetime: Lifetime, pub bounds: Vec @@ -213,7 +214,7 @@ pub struct LifetimeDef { /// A "Path" is essentially Rust's notion of a name; for instance: /// std::cmp::PartialEq . It's represented as a sequence of identifiers, /// along with a bunch of supporting information. -#[deriving(Clone, PartialEq, Eq, Encodable, Decodable, Hash, Show)] +#[deriving(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Show)] pub struct Path { pub span: Span, /// A `::foo` path, is relative to the crate root rather than current @@ -225,7 +226,7 @@ pub struct Path { /// A segment of a path: an identifier, an optional lifetime, and a set of /// types. -#[deriving(Clone, PartialEq, Eq, Encodable, Decodable, Hash, Show)] +#[deriving(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Show)] pub struct PathSegment { /// The identifier portion of this path segment. pub identifier: Ident, @@ -238,7 +239,7 @@ pub struct PathSegment { pub parameters: PathParameters, } -#[deriving(Clone, PartialEq, Eq, Encodable, Decodable, Hash, Show)] +#[deriving(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Show)] pub enum PathParameters { AngleBracketedParameters(AngleBracketedParameterData), ParenthesizedParameters(ParenthesizedParameterData), @@ -316,7 +317,7 @@ impl PathParameters { } /// A path like `Foo<'a, T>` -#[deriving(Clone, PartialEq, Eq, Encodable, Decodable, Hash, Show)] +#[deriving(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Show)] pub struct AngleBracketedParameterData { /// The lifetime parameters for this path segment. pub lifetimes: Vec, @@ -334,7 +335,7 @@ impl AngleBracketedParameterData { } /// A path like `Foo(A,B) -> C` -#[deriving(Clone, PartialEq, Eq, Encodable, Decodable, Hash, Show)] +#[deriving(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Show)] pub struct ParenthesizedParameterData { /// `(A,B)` pub inputs: Vec>, @@ -347,7 +348,8 @@ pub type CrateNum = u32; pub type NodeId = u32; -#[deriving(Clone, Copy, Eq, Ord, PartialOrd, PartialEq, Encodable, Decodable, Hash, Show)] +#[deriving(Clone, Eq, Ord, PartialOrd, PartialEq, RustcEncodable, + RustcDecodable, Hash, Show, Copy)] pub struct DefId { pub krate: CrateNum, pub node: NodeId, @@ -367,7 +369,7 @@ pub const DUMMY_NODE_ID: NodeId = -1; /// typeck::collect::compute_bounds matches these against /// the "special" built-in traits (see middle::lang_items) and /// detects Copy, Send and Sync. -#[deriving(Clone, PartialEq, Eq, Encodable, Decodable, Hash, Show)] +#[deriving(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Show)] pub enum TyParamBound { TraitTyParamBound(PolyTraitRef), RegionTyParamBound(Lifetime) @@ -375,7 +377,7 @@ pub enum TyParamBound { pub type TyParamBounds = OwnedSlice; -#[deriving(Clone, PartialEq, Eq, Encodable, Decodable, Hash, Show)] +#[deriving(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Show)] pub struct TyParam { pub ident: Ident, pub id: NodeId, @@ -387,7 +389,7 @@ pub struct TyParam { /// Represents lifetimes and type parameters attached to a declaration /// of a function, enum, trait, etc. -#[deriving(Clone, PartialEq, Eq, Encodable, Decodable, Hash, Show)] +#[deriving(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Show)] pub struct Generics { pub lifetimes: Vec, pub ty_params: OwnedSlice, @@ -406,27 +408,34 @@ impl Generics { } } -#[deriving(Clone, PartialEq, Eq, Encodable, Decodable, Hash, Show)] +#[deriving(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Show)] pub struct WhereClause { pub id: NodeId, pub predicates: Vec, } -#[deriving(Clone, PartialEq, Eq, Encodable, Decodable, Hash, Show)] +#[deriving(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Show)] pub enum WherePredicate { BoundPredicate(WhereBoundPredicate), + RegionPredicate(WhereRegionPredicate), EqPredicate(WhereEqPredicate) } -#[deriving(Clone, PartialEq, Eq, Encodable, Decodable, Hash, Show)] +#[deriving(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Show)] pub struct WhereBoundPredicate { - pub id: NodeId, pub span: Span, - pub ident: Ident, + pub bounded_ty: P, pub bounds: OwnedSlice, } -#[deriving(Clone, PartialEq, Eq, Encodable, Decodable, Hash, Show)] +#[deriving(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Show)] +pub struct WhereRegionPredicate { + pub span: Span, + pub lifetime: Lifetime, + pub bounds: Vec, +} + +#[deriving(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Show)] pub struct WhereEqPredicate { pub id: NodeId, pub span: Span, @@ -438,7 +447,7 @@ pub struct WhereEqPredicate { /// used to drive conditional compilation pub type CrateConfig = Vec> ; -#[deriving(Clone, PartialEq, Eq, Encodable, Decodable, Hash, Show)] +#[deriving(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Show)] pub struct Crate { pub module: Mod, pub attrs: Vec, @@ -449,7 +458,7 @@ pub struct Crate { pub type MetaItem = Spanned; -#[deriving(Clone, Eq, Encodable, Decodable, Hash, Show)] +#[deriving(Clone, Eq, RustcEncodable, RustcDecodable, Hash, Show)] pub enum MetaItem_ { MetaWord(InternedString), MetaList(InternedString, Vec>), @@ -481,7 +490,7 @@ impl PartialEq for MetaItem_ { } } -#[deriving(Clone, PartialEq, Eq, Encodable, Decodable, Hash, Show)] +#[deriving(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Show)] pub struct Block { pub view_items: Vec, pub stmts: Vec>, @@ -491,27 +500,27 @@ pub struct Block { pub span: Span, } -#[deriving(Clone, PartialEq, Eq, Encodable, Decodable, Hash, Show)] +#[deriving(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Show)] pub struct Pat { pub id: NodeId, pub node: Pat_, pub span: Span, } -#[deriving(Clone, PartialEq, Eq, Encodable, Decodable, Hash, Show)] +#[deriving(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Show)] pub struct FieldPat { pub ident: Ident, pub pat: P, pub is_shorthand: bool, } -#[deriving(Clone, Copy, PartialEq, Eq, Encodable, Decodable, Hash, Show)] +#[deriving(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Show, Copy)] pub enum BindingMode { BindByRef(Mutability), BindByValue(Mutability), } -#[deriving(Clone, Copy, PartialEq, Eq, Encodable, Decodable, Hash, Show)] +#[deriving(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Show, Copy)] pub enum PatWildKind { /// Represents the wildcard pattern `_` PatWildSingle, @@ -520,7 +529,7 @@ pub enum PatWildKind { PatWildMulti, } -#[deriving(Clone, PartialEq, Eq, Encodable, Decodable, Hash, Show)] +#[deriving(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Show)] pub enum Pat_ { /// Represents a wildcard pattern (either `_` or `..`) PatWild(PatWildKind), @@ -549,13 +558,13 @@ pub enum Pat_ { PatMac(Mac), } -#[deriving(Clone, Copy, PartialEq, Eq, Encodable, Decodable, Hash, Show)] +#[deriving(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Show, Copy)] pub enum Mutability { MutMutable, MutImmutable, } -#[deriving(Clone, Copy, PartialEq, Eq, Encodable, Decodable, Hash, Show)] +#[deriving(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Show, Copy)] pub enum BinOp { BiAdd, BiSub, @@ -577,7 +586,7 @@ pub enum BinOp { BiGt, } -#[deriving(Clone, Copy, PartialEq, Eq, Encodable, Decodable, Hash, Show)] +#[deriving(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Show, Copy)] pub enum UnOp { UnUniq, UnDeref, @@ -587,7 +596,7 @@ pub enum UnOp { pub type Stmt = Spanned; -#[deriving(Clone, PartialEq, Eq, Encodable, Decodable, Hash, Show)] +#[deriving(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Show)] pub enum Stmt_ { /// Could be an item or a local (let) binding: StmtDecl(P, NodeId), @@ -601,7 +610,7 @@ pub enum Stmt_ { StmtMac(Mac, MacStmtStyle), } -#[deriving(Clone, Copy, PartialEq, Eq, Encodable, Decodable, Hash, Show)] +#[deriving(Clone, Copy, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Show)] pub enum MacStmtStyle { /// The macro statement had a trailing semicolon, e.g. `foo! { ... };` /// `foo!(...);`, `foo![...];` @@ -616,7 +625,7 @@ pub enum MacStmtStyle { /// Where a local declaration came from: either a true `let ... = /// ...;`, or one desugared from the pattern of a for loop. -#[deriving(Clone, Copy, PartialEq, Eq, Encodable, Decodable, Hash, Show)] +#[deriving(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Show, Copy)] pub enum LocalSource { LocalLet, LocalFor, @@ -625,7 +634,7 @@ pub enum LocalSource { // FIXME (pending discussion of #1697, #2178...): local should really be // a refinement on pat. /// Local represents a `let` statement, e.g., `let : = ;` -#[deriving(Clone, PartialEq, Eq, Encodable, Decodable, Hash, Show)] +#[deriving(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Show)] pub struct Local { pub ty: P, pub pat: P, @@ -637,7 +646,7 @@ pub struct Local { pub type Decl = Spanned; -#[deriving(Clone, PartialEq, Eq, Encodable, Decodable, Hash, Show)] +#[deriving(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Show)] pub enum Decl_ { /// A local (let) binding: DeclLocal(P), @@ -646,7 +655,7 @@ pub enum Decl_ { } /// represents one arm of a 'match' -#[deriving(Clone, PartialEq, Eq, Encodable, Decodable, Hash, Show)] +#[deriving(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Show)] pub struct Arm { pub attrs: Vec, pub pats: Vec>, @@ -654,7 +663,7 @@ pub struct Arm { pub body: P, } -#[deriving(Clone, PartialEq, Eq, Encodable, Decodable, Hash, Show)] +#[deriving(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Show)] pub struct Field { pub ident: SpannedIdent, pub expr: P, @@ -663,26 +672,26 @@ pub struct Field { pub type SpannedIdent = Spanned; -#[deriving(Clone, Copy, PartialEq, Eq, Encodable, Decodable, Hash, Show)] +#[deriving(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Show, Copy)] pub enum BlockCheckMode { DefaultBlock, UnsafeBlock(UnsafeSource), } -#[deriving(Clone, Copy, PartialEq, Eq, Encodable, Decodable, Hash, Show)] +#[deriving(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Show, Copy)] pub enum UnsafeSource { CompilerGenerated, UserProvided, } -#[deriving(Clone, PartialEq, Eq, Encodable, Decodable, Hash, Show)] +#[deriving(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Show)] pub struct Expr { pub id: NodeId, pub node: Expr_, pub span: Span, } -#[deriving(Clone, PartialEq, Eq, Encodable, Decodable, Hash, Show)] +#[deriving(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Show)] pub enum Expr_ { /// First expr is the place; second expr is the value. ExprBox(Option>, P), @@ -744,28 +753,28 @@ pub enum Expr_ { /// as SomeTrait>::SomeAssociatedItem /// ^~~~~ ^~~~~~~~~ ^~~~~~~~~~~~~~~~~~ /// self_type trait_name item_name -#[deriving(Clone, PartialEq, Eq, Encodable, Decodable, Hash, Show)] +#[deriving(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Show)] pub struct QPath { pub self_type: P, pub trait_ref: P, pub item_name: Ident, } -#[deriving(Clone, Copy, PartialEq, Eq, Encodable, Decodable, Hash, Show)] +#[deriving(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Show, Copy)] pub enum MatchSource { - MatchNormal, - MatchIfLetDesugar, - MatchWhileLetDesugar, + Normal, + IfLetDesugar { contains_else_clause: bool }, + WhileLetDesugar, } -#[deriving(Clone, Copy, PartialEq, Eq, Encodable, Decodable, Hash, Show)] +#[deriving(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Show, Copy)] pub enum CaptureClause { CaptureByValue, CaptureByRef, } /// A delimited sequence of token trees -#[deriving(Clone, PartialEq, Eq, Encodable, Decodable, Hash, Show)] +#[deriving(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Show)] pub struct Delimited { /// The type of delimiter pub delim: token::DelimToken, @@ -800,7 +809,7 @@ impl Delimited { } /// A sequence of token treesee -#[deriving(Clone, PartialEq, Eq, Encodable, Decodable, Hash, Show)] +#[deriving(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Show)] pub struct SequenceRepetition { /// The sequence of token trees pub tts: Vec, @@ -814,7 +823,7 @@ pub struct SequenceRepetition { /// A Kleene-style [repetition operator](http://en.wikipedia.org/wiki/Kleene_star) /// for token sequences. -#[deriving(Clone, Copy, PartialEq, Eq, Encodable, Decodable, Hash, Show)] +#[deriving(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Show, Copy)] pub enum KleeneOp { ZeroOrMore, OneOrMore, @@ -832,7 +841,7 @@ pub enum KleeneOp { /// /// The RHS of an MBE macro is the only place `SubstNt`s are substituted. /// Nothing special happens to misnamed or misplaced `SubstNt`s. -#[deriving(Clone, PartialEq, Eq, Encodable, Decodable, Hash, Show)] +#[deriving(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Show)] #[doc="For macro invocations; parsing is delegated to the macro"] pub enum TokenTree { /// A single token @@ -922,14 +931,14 @@ pub type Mac = Spanned; /// is being invoked, and the vector of token-trees contains the source /// of the macro invocation. /// There's only one flavor, now, so this could presumably be simplified. -#[deriving(Clone, PartialEq, Eq, Encodable, Decodable, Hash, Show)] +#[deriving(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Show)] pub enum Mac_ { // NB: the additional ident for a macro_rules-style macro is actually // stored in the enclosing item. Oog. MacInvocTT(Path, Vec , SyntaxContext), // new macro-invocation } -#[deriving(Clone, Copy, PartialEq, Eq, Encodable, Decodable, Hash, Show)] +#[deriving(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Show, Copy)] pub enum StrStyle { CookedStr, RawStr(uint) @@ -937,7 +946,7 @@ pub enum StrStyle { pub type Lit = Spanned; -#[deriving(Clone, Copy, PartialEq, Eq, Encodable, Decodable, Hash, Show)] +#[deriving(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Show, Copy)] pub enum Sign { Minus, Plus @@ -953,7 +962,7 @@ impl Sign where T: Int { } } -#[deriving(Clone, Copy, PartialEq, Eq, Encodable, Decodable, Hash, Show)] +#[deriving(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Show, Copy)] pub enum LitIntType { SignedIntLit(IntTy, Sign), UnsignedIntLit(UintTy), @@ -970,7 +979,7 @@ impl LitIntType { } } -#[deriving(Clone, PartialEq, Eq, Encodable, Decodable, Hash, Show)] +#[deriving(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Show)] pub enum Lit_ { LitStr(InternedString, StrStyle), LitBinary(Rc >), @@ -984,13 +993,13 @@ pub enum Lit_ { // NB: If you change this, you'll probably want to change the corresponding // type structure in middle/ty.rs as well. -#[deriving(Clone, PartialEq, Eq, Encodable, Decodable, Hash, Show)] +#[deriving(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Show)] pub struct MutTy { pub ty: P, pub mutbl: Mutability, } -#[deriving(Clone, PartialEq, Eq, Encodable, Decodable, Hash, Show)] +#[deriving(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Show)] pub struct TypeField { pub ident: Ident, pub mt: MutTy, @@ -999,7 +1008,7 @@ pub struct TypeField { /// Represents a required method in a trait declaration, /// one without a default implementation -#[deriving(Clone, PartialEq, Eq, Encodable, Decodable, Hash, Show)] +#[deriving(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Show)] pub struct TypeMethod { pub ident: Ident, pub attrs: Vec, @@ -1017,26 +1026,26 @@ pub struct TypeMethod { /// a default implementation A trait method is either required (meaning it /// doesn't have an implementation, just a signature) or provided (meaning it /// has a default implementation). -#[deriving(Clone, PartialEq, Eq, Encodable, Decodable, Hash, Show)] +#[deriving(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Show)] pub enum TraitItem { RequiredMethod(TypeMethod), ProvidedMethod(P), TypeTraitItem(P), } -#[deriving(Clone, PartialEq, Eq, Encodable, Decodable, Hash, Show)] +#[deriving(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Show)] pub enum ImplItem { MethodImplItem(P), TypeImplItem(P), } -#[deriving(Clone, PartialEq, Eq, Encodable, Decodable, Hash, Show)] +#[deriving(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Show)] pub struct AssociatedType { pub attrs: Vec, pub ty_param: TyParam, } -#[deriving(Clone, PartialEq, Eq, Encodable, Decodable, Hash, Show)] +#[deriving(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Show)] pub struct Typedef { pub id: NodeId, pub span: Span, @@ -1046,7 +1055,7 @@ pub struct Typedef { pub typ: P, } -#[deriving(Clone, Copy, PartialEq, Eq, Encodable, Decodable, Hash)] +#[deriving(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Copy)] pub enum IntTy { TyI, TyI8, @@ -1071,7 +1080,7 @@ impl IntTy { } } -#[deriving(Clone, Copy, PartialEq, Eq, Encodable, Decodable, Hash)] +#[deriving(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Copy)] pub enum UintTy { TyU, TyU8, @@ -1096,7 +1105,7 @@ impl fmt::Show for UintTy { } } -#[deriving(Clone, Copy, PartialEq, Eq, Encodable, Decodable, Hash)] +#[deriving(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Copy)] pub enum FloatTy { TyF32, TyF64, @@ -1117,7 +1126,7 @@ impl FloatTy { } // Bind a type to an associated type: `A=Foo`. -#[deriving(Clone, PartialEq, Eq, Encodable, Decodable, Hash, Show)] +#[deriving(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Show)] pub struct TypeBinding { pub id: NodeId, pub ident: Ident, @@ -1127,7 +1136,7 @@ pub struct TypeBinding { // NB PartialEq method appears below. -#[deriving(Clone, PartialEq, Eq, Encodable, Decodable, Hash, Show)] +#[deriving(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Show)] pub struct Ty { pub id: NodeId, pub node: Ty_, @@ -1135,7 +1144,7 @@ pub struct Ty { } /// Not represented directly in the AST, referred to by name through a ty_path. -#[deriving(Clone, Copy, PartialEq, Eq, Encodable, Decodable, Hash, Show)] +#[deriving(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Show, Copy)] pub enum PrimTy { TyInt(IntTy), TyUint(UintTy), @@ -1145,7 +1154,7 @@ pub enum PrimTy { TyChar } -#[deriving(Clone, Copy, PartialEq, Eq, Encodable, Decodable, Hash)] +#[deriving(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Copy)] pub enum Onceness { Once, Many @@ -1161,7 +1170,7 @@ impl fmt::Show for Onceness { } /// Represents the type of a closure -#[deriving(Clone, PartialEq, Eq, Encodable, Decodable, Hash, Show)] +#[deriving(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Show)] pub struct ClosureTy { pub lifetimes: Vec, pub unsafety: Unsafety, @@ -1170,7 +1179,7 @@ pub struct ClosureTy { pub bounds: TyParamBounds, } -#[deriving(Clone, PartialEq, Eq, Encodable, Decodable, Hash, Show)] +#[deriving(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Show)] pub struct BareFnTy { pub unsafety: Unsafety, pub abi: Abi, @@ -1178,7 +1187,7 @@ pub struct BareFnTy { pub decl: P } -#[deriving(Clone, PartialEq, Eq, Encodable, Decodable, Hash, Show)] +#[deriving(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Show)] /// The different kinds of types recognized by the compiler pub enum Ty_ { TyVec(P), @@ -1213,13 +1222,13 @@ pub enum Ty_ { TyInfer, } -#[deriving(Clone, Copy, PartialEq, Eq, Encodable, Decodable, Hash, Show)] +#[deriving(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Show, Copy)] pub enum AsmDialect { AsmAtt, AsmIntel } -#[deriving(Clone, PartialEq, Eq, Encodable, Decodable, Hash, Show)] +#[deriving(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Show)] pub struct InlineAsm { pub asm: InternedString, pub asm_str_style: StrStyle, @@ -1233,7 +1242,7 @@ pub struct InlineAsm { } /// represents an argument in a function header -#[deriving(Clone, PartialEq, Eq, Encodable, Decodable, Hash, Show)] +#[deriving(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Show)] pub struct Arg { pub ty: P, pub pat: P, @@ -1261,14 +1270,14 @@ impl Arg { } /// represents the header (not the body) of a function declaration -#[deriving(Clone, PartialEq, Eq, Encodable, Decodable, Hash, Show)] +#[deriving(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Show)] pub struct FnDecl { pub inputs: Vec, pub output: FunctionRetTy, pub variadic: bool } -#[deriving(Copy, Clone, PartialEq, Eq, Encodable, Decodable, Hash)] +#[deriving(Copy, Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash)] pub enum Unsafety { Unsafe, Normal, @@ -1283,7 +1292,7 @@ impl fmt::Show for Unsafety { } } -#[deriving(Clone, PartialEq, Eq, Encodable, Decodable, Hash, Show)] +#[deriving(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Show)] pub enum FunctionRetTy { /// Functions with return type ! that always /// raise an error or exit (i.e. never return to the caller) @@ -1302,7 +1311,7 @@ impl FunctionRetTy { } /// Represents the kind of 'self' associated with a method -#[deriving(Clone, PartialEq, Eq, Encodable, Decodable, Hash, Show)] +#[deriving(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Show)] pub enum ExplicitSelf_ { /// No self SelfStatic, @@ -1316,7 +1325,7 @@ pub enum ExplicitSelf_ { pub type ExplicitSelf = Spanned; -#[deriving(Clone, PartialEq, Eq, Encodable, Decodable, Hash, Show)] +#[deriving(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Show)] pub struct Method { pub attrs: Vec, pub id: NodeId, @@ -1324,7 +1333,7 @@ pub struct Method { pub node: Method_, } -#[deriving(Clone, PartialEq, Eq, Encodable, Decodable, Hash, Show)] +#[deriving(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Show)] pub enum Method_ { /// Represents a method declaration MethDecl(Ident, @@ -1339,7 +1348,7 @@ pub enum Method_ { MethMac(Mac), } -#[deriving(Clone, PartialEq, Eq, Encodable, Decodable, Hash, Show)] +#[deriving(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Show)] pub struct Mod { /// A span from the first token past `{` to the last token until `}`. /// For `mod foo;`, the inner span ranges from the first token @@ -1349,31 +1358,31 @@ pub struct Mod { pub items: Vec>, } -#[deriving(Clone, PartialEq, Eq, Encodable, Decodable, Hash, Show)] +#[deriving(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Show)] pub struct ForeignMod { pub abi: Abi, pub view_items: Vec, pub items: Vec>, } -#[deriving(Clone, PartialEq, Eq, Encodable, Decodable, Hash, Show)] +#[deriving(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Show)] pub struct VariantArg { pub ty: P, pub id: NodeId, } -#[deriving(Clone, PartialEq, Eq, Encodable, Decodable, Hash, Show)] +#[deriving(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Show)] pub enum VariantKind { TupleVariantKind(Vec), StructVariantKind(P), } -#[deriving(Clone, PartialEq, Eq, Encodable, Decodable, Hash, Show)] +#[deriving(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Show)] pub struct EnumDef { pub variants: Vec>, } -#[deriving(Clone, PartialEq, Eq, Encodable, Decodable, Hash, Show)] +#[deriving(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Show)] pub struct Variant_ { pub name: Ident, pub attrs: Vec, @@ -1385,7 +1394,7 @@ pub struct Variant_ { pub type Variant = Spanned; -#[deriving(Clone, Copy, PartialEq, Eq, Encodable, Decodable, Hash, Show)] +#[deriving(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Show, Copy)] pub enum PathListItem_ { PathListIdent { name: Ident, id: NodeId }, PathListMod { id: NodeId } @@ -1403,7 +1412,7 @@ pub type PathListItem = Spanned; pub type ViewPath = Spanned; -#[deriving(Clone, PartialEq, Eq, Encodable, Decodable, Hash, Show)] +#[deriving(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Show)] pub enum ViewPath_ { /// `foo::bar::baz as quux` @@ -1420,7 +1429,7 @@ pub enum ViewPath_ { ViewPathList(Path, Vec , NodeId) } -#[deriving(Clone, PartialEq, Eq, Encodable, Decodable, Hash, Show)] +#[deriving(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Show)] pub struct ViewItem { pub node: ViewItem_, pub attrs: Vec, @@ -1428,7 +1437,7 @@ pub struct ViewItem { pub span: Span, } -#[deriving(Clone, PartialEq, Eq, Encodable, Decodable, Hash, Show)] +#[deriving(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Show)] pub enum ViewItem_ { /// Ident: name used to refer to this crate in the code /// optional (InternedString,StrStyle): if present, this is a location @@ -1444,17 +1453,17 @@ pub type Attribute = Spanned; /// Distinguishes between Attributes that decorate items and Attributes that /// are contained as statements within items. These two cases need to be /// distinguished for pretty-printing. -#[deriving(Clone, Copy, PartialEq, Eq, Encodable, Decodable, Hash, Show)] +#[deriving(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Show, Copy)] pub enum AttrStyle { AttrOuter, AttrInner, } -#[deriving(Clone, Copy, PartialEq, Eq, Encodable, Decodable, Hash, Show)] +#[deriving(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Show, Copy)] pub struct AttrId(pub uint); /// Doc-comments are promoted to attributes that have is_sugared_doc = true -#[deriving(Clone, PartialEq, Eq, Encodable, Decodable, Hash, Show)] +#[deriving(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Show)] pub struct Attribute_ { pub id: AttrId, pub style: AttrStyle, @@ -1467,13 +1476,13 @@ pub struct Attribute_ { /// that the ref_id is for. The impl_id maps to the "self type" of this impl. /// If this impl is an ItemImpl, the impl_id is redundant (it could be the /// same as the impl's node id). -#[deriving(Clone, PartialEq, Eq, Encodable, Decodable, Hash, Show)] +#[deriving(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Show)] pub struct TraitRef { pub path: Path, pub ref_id: NodeId, } -#[deriving(Clone, PartialEq, Eq, Encodable, Decodable, Hash, Show)] +#[deriving(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Show)] pub struct PolyTraitRef { /// The `'a` in `<'a> Foo<&'a T>` pub bound_lifetimes: Vec, @@ -1482,7 +1491,7 @@ pub struct PolyTraitRef { pub trait_ref: TraitRef } -#[deriving(Clone, Copy, PartialEq, Eq, Encodable, Decodable, Hash, Show)] +#[deriving(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Show, Copy)] pub enum Visibility { Public, Inherited, @@ -1497,7 +1506,7 @@ impl Visibility { } } -#[deriving(Clone, PartialEq, Eq, Encodable, Decodable, Hash, Show)] +#[deriving(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Show)] pub struct StructField_ { pub kind: StructFieldKind, pub id: NodeId, @@ -1516,7 +1525,7 @@ impl StructField_ { pub type StructField = Spanned; -#[deriving(Clone, Copy, PartialEq, Eq, Encodable, Decodable, Hash, Show)] +#[deriving(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Show, Copy)] pub enum StructFieldKind { NamedField(Ident, Visibility), /// Element of a tuple-like struct @@ -1532,7 +1541,7 @@ impl StructFieldKind { } } -#[deriving(Clone, PartialEq, Eq, Encodable, Decodable, Hash, Show)] +#[deriving(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Show)] pub struct StructDef { /// Fields, not including ctor pub fields: Vec, @@ -1545,7 +1554,7 @@ pub struct StructDef { FIXME (#3300): Should allow items to be anonymous. Right now we just use dummy names for anon items. */ -#[deriving(Clone, PartialEq, Eq, Encodable, Decodable, Hash, Show)] +#[deriving(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Show)] pub struct Item { pub ident: Ident, pub attrs: Vec, @@ -1555,7 +1564,7 @@ pub struct Item { pub span: Span, } -#[deriving(Clone, PartialEq, Eq, Encodable, Decodable, Hash, Show)] +#[deriving(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Show)] pub enum Item_ { ItemStatic(P, Mutability, P), ItemConst(P, P), @@ -1599,7 +1608,7 @@ impl Item_ { } } -#[deriving(Clone, PartialEq, Eq, Encodable, Decodable, Hash, Show)] +#[deriving(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Show)] pub struct ForeignItem { pub ident: Ident, pub attrs: Vec, @@ -1609,7 +1618,7 @@ pub struct ForeignItem { pub vis: Visibility, } -#[deriving(Clone, PartialEq, Eq, Encodable, Decodable, Hash, Show)] +#[deriving(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Show)] pub enum ForeignItem_ { ForeignItemFn(P, Generics), ForeignItemStatic(P, /* is_mutbl */ bool), @@ -1624,7 +1633,7 @@ impl ForeignItem_ { } } -#[deriving(Clone, Copy, PartialEq, Eq, Encodable, Decodable, Hash, Show)] +#[deriving(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Show, Copy)] pub enum UnboxedClosureKind { FnUnboxedClosureKind, FnMutUnboxedClosureKind, @@ -1634,7 +1643,7 @@ pub enum UnboxedClosureKind { /// The data we save and restore about an inlined item or method. This is not /// part of the AST that we parse from a file, but it becomes part of the tree /// that we trans. -#[deriving(Clone, PartialEq, Eq, Encodable, Decodable, Hash, Show)] +#[deriving(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Show)] pub enum InlinedItem { IIItem(P), IITraitItem(DefId /* impl id */, TraitItem), diff --git a/src/libsyntax/ast_map/mod.rs b/src/libsyntax/ast_map/mod.rs index a95c9e199060b..9b42a8f754069 100644 --- a/src/libsyntax/ast_map/mod.rs +++ b/src/libsyntax/ast_map/mod.rs @@ -73,9 +73,9 @@ impl<'a> Iterator for LinkedPath<'a> { } } -// HACK(eddyb) move this into libstd (value wrapper for slice::Items). +// HACK(eddyb) move this into libstd (value wrapper for slice::Iter). #[deriving(Clone)] -pub struct Values<'a, T:'a>(pub slice::Items<'a, T>); +pub struct Values<'a, T:'a>(pub slice::Iter<'a, T>); impl<'a, T: Copy> Iterator for Values<'a, T> { fn next(&mut self) -> Option { @@ -95,7 +95,7 @@ pub fn path_to_string>(path: PI) -> String { if !s.is_empty() { s.push_str("::"); } - s.push_str(e.as_slice()); + s.push_str(e[]); s }).to_string() } @@ -472,20 +472,20 @@ impl<'ast> Map<'ast> { F: FnOnce(Option<&[Attribute]>) -> T, { let attrs = match self.get(id) { - NodeItem(i) => Some(i.attrs.as_slice()), - NodeForeignItem(fi) => Some(fi.attrs.as_slice()), + NodeItem(i) => Some(i.attrs[]), + NodeForeignItem(fi) => Some(fi.attrs[]), NodeTraitItem(ref tm) => match **tm { - RequiredMethod(ref type_m) => Some(type_m.attrs.as_slice()), - ProvidedMethod(ref m) => Some(m.attrs.as_slice()), - TypeTraitItem(ref typ) => Some(typ.attrs.as_slice()), + RequiredMethod(ref type_m) => Some(type_m.attrs[]), + ProvidedMethod(ref m) => Some(m.attrs[]), + TypeTraitItem(ref typ) => Some(typ.attrs[]), }, NodeImplItem(ref ii) => { match **ii { - MethodImplItem(ref m) => Some(m.attrs.as_slice()), - TypeImplItem(ref t) => Some(t.attrs.as_slice()), + MethodImplItem(ref m) => Some(m.attrs[]), + TypeImplItem(ref t) => Some(t.attrs[]), } } - NodeVariant(ref v) => Some(v.node.attrs.as_slice()), + NodeVariant(ref v) => Some(v.node.attrs[]), // unit/tuple structs take the attributes straight from // the struct definition. // FIXME(eddyb) make this work again (requires access to the map). @@ -504,8 +504,8 @@ impl<'ast> Map<'ast> { /// the iterator will produce node id's for items with paths /// such as `foo::bar::quux`, `bar::quux`, `other::bar::quux`, and /// any other such items it can find in the map. - pub fn nodes_matching_suffix<'a, S:Str>(&'a self, parts: &'a [S]) - -> NodesMatchingSuffix<'a, 'ast, S> { + pub fn nodes_matching_suffix<'a>(&'a self, parts: &'a [String]) + -> NodesMatchingSuffix<'a, 'ast> { NodesMatchingSuffix { map: self, item_name: parts.last().unwrap(), @@ -565,14 +565,14 @@ impl<'ast> Map<'ast> { } } -pub struct NodesMatchingSuffix<'a, 'ast:'a, S:'a> { +pub struct NodesMatchingSuffix<'a, 'ast:'a> { map: &'a Map<'ast>, - item_name: &'a S, - in_which: &'a [S], + item_name: &'a String, + in_which: &'a [String], idx: NodeId, } -impl<'a, 'ast, S:Str> NodesMatchingSuffix<'a, 'ast, S> { +impl<'a, 'ast> NodesMatchingSuffix<'a, 'ast> { /// Returns true only if some suffix of the module path for parent /// matches `self.in_which`. /// @@ -586,7 +586,7 @@ impl<'a, 'ast, S:Str> NodesMatchingSuffix<'a, 'ast, S> { None => return false, Some((node_id, name)) => (node_id, name), }; - if part.as_slice() != mod_name.as_str() { + if part[] != mod_name.as_str() { return false; } cursor = self.map.get_parent(mod_id); @@ -624,12 +624,12 @@ impl<'a, 'ast, S:Str> NodesMatchingSuffix<'a, 'ast, S> { // We are looking at some node `n` with a given name and parent // id; do their names match what I am seeking? fn matches_names(&self, parent_of_n: NodeId, name: Name) -> bool { - name.as_str() == self.item_name.as_slice() && + name.as_str() == self.item_name[] && self.suffix_matches(parent_of_n) } } -impl<'a, 'ast, S:Str> Iterator for NodesMatchingSuffix<'a, 'ast, S> { +impl<'a, 'ast> Iterator for NodesMatchingSuffix<'a, 'ast> { fn next(&mut self) -> Option { loop { let idx = self.idx; @@ -1037,7 +1037,7 @@ impl<'a> NodePrinter for pprust::State<'a> { fn node_id_to_string(map: &Map, id: NodeId, include_id: bool) -> String { let id_str = format!(" (id={})", id); - let id_str = if include_id { id_str.as_slice() } else { "" }; + let id_str = if include_id { id_str[] } else { "" }; match map.find(id) { Some(NodeItem(item)) => { diff --git a/src/libsyntax/ast_util.rs b/src/libsyntax/ast_util.rs index 02771809ae6a7..9196055267f6f 100644 --- a/src/libsyntax/ast_util.rs +++ b/src/libsyntax/ast_util.rs @@ -238,11 +238,11 @@ pub fn impl_pretty_name(trait_ref: &Option, ty: &Ty) -> Ident { match *trait_ref { Some(ref trait_ref) => { pretty.push('.'); - pretty.push_str(pprust::path_to_string(&trait_ref.path).as_slice()); + pretty.push_str(pprust::path_to_string(&trait_ref.path)[]); } None => {} } - token::gensym_ident(pretty.as_slice()) + token::gensym_ident(pretty[]) } pub fn trait_method_to_ty_method(method: &Method) -> TypeMethod { @@ -343,7 +343,7 @@ pub fn empty_generics() -> Generics { // ______________________________________________________________________ // Enumerating the IDs which appear in an AST -#[deriving(Copy, Encodable, Decodable, Show)] +#[deriving(RustcEncodable, RustcDecodable, Show, Copy)] pub struct IdRange { pub min: NodeId, pub max: NodeId, @@ -700,7 +700,7 @@ pub fn pat_is_ident(pat: P) -> bool { pub fn path_name_eq(a : &ast::Path, b : &ast::Path) -> bool { (a.span == b.span) && (a.global == b.global) - && (segments_name_eq(a.segments.as_slice(), b.segments.as_slice())) + && (segments_name_eq(a.segments[], b.segments[])) } // are two arrays of segments equal when compared unhygienically? @@ -788,13 +788,13 @@ mod test { #[test] fn idents_name_eq_test() { assert!(segments_name_eq( [Ident{name:Name(3),ctxt:4}, Ident{name:Name(78),ctxt:82}] - .iter().map(ident_to_segment).collect::>().as_slice(), + .iter().map(ident_to_segment).collect::>()[], [Ident{name:Name(3),ctxt:104}, Ident{name:Name(78),ctxt:182}] - .iter().map(ident_to_segment).collect::>().as_slice())); + .iter().map(ident_to_segment).collect::>()[])); assert!(!segments_name_eq( [Ident{name:Name(3),ctxt:4}, Ident{name:Name(78),ctxt:82}] - .iter().map(ident_to_segment).collect::>().as_slice(), + .iter().map(ident_to_segment).collect::>()[], [Ident{name:Name(3),ctxt:104}, Ident{name:Name(77),ctxt:182}] - .iter().map(ident_to_segment).collect::>().as_slice())); + .iter().map(ident_to_segment).collect::>()[])); } } diff --git a/src/libsyntax/attr.rs b/src/libsyntax/attr.rs index 127cc5ed51d11..df820b40cb6de 100644 --- a/src/libsyntax/attr.rs +++ b/src/libsyntax/attr.rs @@ -97,7 +97,7 @@ impl AttrMetaMethods for MetaItem { fn meta_item_list<'a>(&'a self) -> Option<&'a [P]> { match self.node { - MetaList(_, ref l) => Some(l.as_slice()), + MetaList(_, ref l) => Some(l[]), _ => None } } @@ -136,7 +136,7 @@ impl AttributeMethods for Attribute { let meta = mk_name_value_item_str( InternedString::new("doc"), token::intern_and_get_ident(strip_doc_comment_decoration( - comment.get()).as_slice())); + comment.get())[])); if self.node.style == ast::AttrOuter { f(&mk_attr_outer(self.node.id, meta)) } else { @@ -296,9 +296,9 @@ pub fn find_inline_attr(attrs: &[Attribute]) -> InlineAttr { } MetaList(ref n, ref items) if *n == "inline" => { mark_used(attr); - if contains_name(items.as_slice(), "always") { + if contains_name(items[], "always") { InlineAlways - } else if contains_name(items.as_slice(), "never") { + } else if contains_name(items[], "never") { InlineNever } else { InlineHint @@ -332,7 +332,7 @@ pub fn cfg_matches(diagnostic: &SpanHandler, cfgs: &[P], cfg: &ast::Me !cfg_matches(diagnostic, cfgs, &*mis[0]) } ast::MetaList(ref pred, _) => { - diagnostic.span_err(cfg.span, format!("invalid predicate `{}`", pred).as_slice()); + diagnostic.span_err(cfg.span, format!("invalid predicate `{}`", pred)[]); false }, ast::MetaWord(_) | ast::MetaNameValue(..) => contains(cfgs, cfg), @@ -340,14 +340,14 @@ pub fn cfg_matches(diagnostic: &SpanHandler, cfgs: &[P], cfg: &ast::Me } /// Represents the #[deprecated="foo"] and friends attributes. -#[deriving(Encodable,Decodable,Clone,Show)] +#[deriving(RustcEncodable,RustcDecodable,Clone,Show)] pub struct Stability { pub level: StabilityLevel, pub text: Option } /// The available stability levels. -#[deriving(Copy,Encodable,Decodable,PartialEq,PartialOrd,Clone,Show)] +#[deriving(RustcEncodable,RustcDecodable,PartialEq,PartialOrd,Clone,Show,Copy)] pub enum StabilityLevel { Deprecated, Experimental, @@ -396,8 +396,7 @@ pub fn require_unique_names(diagnostic: &SpanHandler, metas: &[P]) { if !set.insert(name.clone()) { diagnostic.span_fatal(meta.span, - format!("duplicate meta item `{}`", - name).as_slice()); + format!("duplicate meta item `{}`", name)[]); } } } @@ -464,7 +463,7 @@ fn int_type_of_word(s: &str) -> Option { } } -#[deriving(Copy, PartialEq, Show, Encodable, Decodable)] +#[deriving(PartialEq, Show, RustcEncodable, RustcDecodable, Copy)] pub enum ReprAttr { ReprAny, ReprInt(Span, IntType), @@ -483,7 +482,7 @@ impl ReprAttr { } } -#[deriving(Copy, Eq, Hash, PartialEq, Show, Encodable, Decodable)] +#[deriving(Eq, Hash, PartialEq, Show, RustcEncodable, RustcDecodable, Copy)] pub enum IntType { SignedInt(ast::IntTy), UnsignedInt(ast::UintTy) diff --git a/src/libsyntax/codemap.rs b/src/libsyntax/codemap.rs index b7c0678cf139c..6b9af29c60457 100644 --- a/src/libsyntax/codemap.rs +++ b/src/libsyntax/codemap.rs @@ -48,30 +48,12 @@ impl Pos for BytePos { fn to_uint(&self) -> uint { let BytePos(n) = *self; n as uint } } -// NOTE(stage0): Remove impl after a snapshot -#[cfg(stage0)] -impl Add for BytePos { - fn add(&self, rhs: &BytePos) -> BytePos { - BytePos((self.to_uint() + rhs.to_uint()) as u32) - } -} - -#[cfg(not(stage0))] // NOTE(stage0): Remove cfg after a snapshot impl Add for BytePos { fn add(self, rhs: BytePos) -> BytePos { BytePos((self.to_uint() + rhs.to_uint()) as u32) } } -// NOTE(stage0): Remove impl after a snapshot -#[cfg(stage0)] -impl Sub for BytePos { - fn sub(&self, rhs: &BytePos) -> BytePos { - BytePos((self.to_uint() - rhs.to_uint()) as u32) - } -} - -#[cfg(not(stage0))] // NOTE(stage0): Remove cfg after a snapshot impl Sub for BytePos { fn sub(self, rhs: BytePos) -> BytePos { BytePos((self.to_uint() - rhs.to_uint()) as u32) @@ -83,30 +65,12 @@ impl Pos for CharPos { fn to_uint(&self) -> uint { let CharPos(n) = *self; n } } -// NOTE(stage0): Remove impl after a snapshot -#[cfg(stage0)] -impl Add for CharPos { - fn add(&self, rhs: &CharPos) -> CharPos { - CharPos(self.to_uint() + rhs.to_uint()) - } -} - -#[cfg(not(stage0))] // NOTE(stage0): Remove cfg after a snapshot impl Add for CharPos { fn add(self, rhs: CharPos) -> CharPos { CharPos(self.to_uint() + rhs.to_uint()) } } -// NOTE(stage0): Remove impl after a snapshot -#[cfg(stage0)] -impl Sub for CharPos { - fn sub(&self, rhs: &CharPos) -> CharPos { - CharPos(self.to_uint() - rhs.to_uint()) - } -} - -#[cfg(not(stage0))] // NOTE(stage0): Remove cfg after a snapshot impl Sub for CharPos { fn sub(self, rhs: CharPos) -> CharPos { CharPos(self.to_uint() - rhs.to_uint()) @@ -128,7 +92,7 @@ pub struct Span { pub const DUMMY_SP: Span = Span { lo: BytePos(0), hi: BytePos(0), expn_id: NO_EXPANSION }; -#[deriving(Clone, Copy, PartialEq, Eq, Encodable, Decodable, Hash, Show)] +#[deriving(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Show, Copy)] pub struct Spanned { pub node: T, pub span: Span, @@ -254,7 +218,7 @@ pub struct ExpnInfo { pub callee: NameAndSpan } -#[deriving(Copy, PartialEq, Eq, Clone, Show, Hash, Encodable, Decodable)] +#[deriving(PartialEq, Eq, Clone, Show, Hash, RustcEncodable, RustcDecodable, Copy)] pub struct ExpnId(u32); pub const NO_EXPANSION: ExpnId = ExpnId(-1); @@ -327,9 +291,9 @@ impl FileMap { lines.get(line_number).map(|&line| { let begin: BytePos = line - self.start_pos; let begin = begin.to_uint(); - let slice = self.src.slice_from(begin); + let slice = self.src[begin..]; match slice.find('\n') { - Some(e) => slice.slice_to(e), + Some(e) => slice[0..e], None => slice }.to_string() }) @@ -374,9 +338,9 @@ impl CodeMap { // FIXME #12884: no efficient/safe way to remove from the start of a string // and reuse the allocation. let mut src = if src.starts_with("\u{feff}") { - String::from_str(src.slice_from(3)) + String::from_str(src[3..]) } else { - String::from_str(src.as_slice()) + String::from_str(src[]) }; // Append '\n' in case it's not already there. @@ -463,8 +427,8 @@ impl CodeMap { if begin.fm.start_pos != end.fm.start_pos { None } else { - Some(begin.fm.src.slice(begin.pos.to_uint(), - end.pos.to_uint()).to_string()) + Some(begin.fm.src[begin.pos.to_uint().. + end.pos.to_uint()].to_string()) } } diff --git a/src/libsyntax/diagnostic.rs b/src/libsyntax/diagnostic.rs index 4d765f49acabd..88dfdf6e2d8f6 100644 --- a/src/libsyntax/diagnostic.rs +++ b/src/libsyntax/diagnostic.rs @@ -123,7 +123,7 @@ impl SpanHandler { panic!(ExplicitBug); } pub fn span_unimpl(&self, sp: Span, msg: &str) -> ! { - self.span_bug(sp, format!("unimplemented {}", msg).as_slice()); + self.span_bug(sp, format!("unimplemented {}", msg)[]); } pub fn handler<'a>(&'a self) -> &'a Handler { &self.handler @@ -166,7 +166,7 @@ impl Handler { self.err_count.get()); } } - self.fatal(s.as_slice()); + self.fatal(s[]); } pub fn warn(&self, msg: &str) { self.emit.borrow_mut().emit(None, msg, None, Warning); @@ -182,7 +182,7 @@ impl Handler { panic!(ExplicitBug); } pub fn unimpl(&self, msg: &str) -> ! { - self.bug(format!("unimplemented {}", msg).as_slice()); + self.bug(format!("unimplemented {}", msg)[]); } pub fn emit(&self, cmsp: Option<(&codemap::CodeMap, Span)>, @@ -277,7 +277,7 @@ fn print_maybe_styled(w: &mut EmitterWriter, // to be miscolored. We assume this is rare enough that we don't // have to worry about it. if msg.ends_with("\n") { - try!(t.write_str(msg.slice_to(msg.len()-1))); + try!(t.write_str(msg[0..msg.len()-1])); try!(t.reset()); try!(t.write_str("\n")); } else { @@ -299,16 +299,16 @@ fn print_diagnostic(dst: &mut EmitterWriter, topic: &str, lvl: Level, } try!(print_maybe_styled(dst, - format!("{}: ", lvl.to_string()).as_slice(), + format!("{}: ", lvl.to_string())[], term::attr::ForegroundColor(lvl.color()))); try!(print_maybe_styled(dst, - format!("{}", msg).as_slice(), + format!("{}", msg)[], term::attr::Bold)); match code { Some(code) => { let style = term::attr::ForegroundColor(term::color::BRIGHT_MAGENTA); - try!(print_maybe_styled(dst, format!(" [{}]", code.clone()).as_slice(), style)); + try!(print_maybe_styled(dst, format!(" [{}]", code.clone())[], style)); } None => () } @@ -398,12 +398,12 @@ fn emit(dst: &mut EmitterWriter, cm: &codemap::CodeMap, rsp: RenderSpan, // the span) let span_end = Span { lo: sp.hi, hi: sp.hi, expn_id: sp.expn_id}; let ses = cm.span_to_string(span_end); - try!(print_diagnostic(dst, ses.as_slice(), lvl, msg, code)); + try!(print_diagnostic(dst, ses[], lvl, msg, code)); if rsp.is_full_span() { try!(custom_highlight_lines(dst, cm, sp, lvl, lines)); } } else { - try!(print_diagnostic(dst, ss.as_slice(), lvl, msg, code)); + try!(print_diagnostic(dst, ss[], lvl, msg, code)); if rsp.is_full_span() { try!(highlight_lines(dst, cm, sp, lvl, lines)); } @@ -413,9 +413,9 @@ fn emit(dst: &mut EmitterWriter, cm: &codemap::CodeMap, rsp: RenderSpan, Some(code) => match dst.registry.as_ref().and_then(|registry| registry.find_description(code)) { Some(_) => { - try!(print_diagnostic(dst, ss.as_slice(), Help, + try!(print_diagnostic(dst, ss[], Help, format!("pass `--explain {}` to see a detailed \ - explanation", code).as_slice(), None)); + explanation", code)[], None)); } None => () }, @@ -432,7 +432,7 @@ fn highlight_lines(err: &mut EmitterWriter, let fm = &*lines.file; let mut elided = false; - let mut display_lines = lines.lines.as_slice(); + let mut display_lines = lines.lines[]; if display_lines.len() > MAX_LINES { display_lines = display_lines[0u..MAX_LINES]; elided = true; @@ -494,7 +494,7 @@ fn highlight_lines(err: &mut EmitterWriter, } } try!(print_maybe_styled(err, - format!("{}\n", s).as_slice(), + format!("{}\n", s)[], term::attr::ForegroundColor(lvl.color()))); } Ok(()) @@ -514,7 +514,7 @@ fn custom_highlight_lines(w: &mut EmitterWriter, -> io::IoResult<()> { let fm = &*lines.file; - let lines = lines.lines.as_slice(); + let lines = lines.lines[]; if lines.len() > MAX_LINES { if let Some(line) = fm.get_line(lines[0]) { try!(write!(&mut w.dst, "{}:{} {}\n", fm.name, @@ -545,7 +545,7 @@ fn custom_highlight_lines(w: &mut EmitterWriter, s.push('^'); s.push('\n'); print_maybe_styled(w, - s.as_slice(), + s[], term::attr::ForegroundColor(lvl.color())) } @@ -560,12 +560,12 @@ fn print_macro_backtrace(w: &mut EmitterWriter, codemap::MacroAttribute => ("#[", "]"), codemap::MacroBang => ("", "!") }; - try!(print_diagnostic(w, ss.as_slice(), Note, + try!(print_diagnostic(w, ss[], Note, format!("in expansion of {}{}{}", pre, ei.callee.name, - post).as_slice(), None)); + post)[], None)); let ss = cm.span_to_string(ei.call_site); - try!(print_diagnostic(w, ss.as_slice(), Note, "expansion site", None)); + try!(print_diagnostic(w, ss[], Note, "expansion site", None)); Ok(Some(ei.call_site)) } None => Ok(None) @@ -578,6 +578,6 @@ pub fn expect(diag: &SpanHandler, opt: Option, msg: M) -> T where { match opt { Some(t) => t, - None => diag.handler().bug(msg().as_slice()), + None => diag.handler().bug(msg()[]), } } diff --git a/src/libsyntax/diagnostics/plugin.rs b/src/libsyntax/diagnostics/plugin.rs index bcce5538314b7..90fc28014e64f 100644 --- a/src/libsyntax/diagnostics/plugin.rs +++ b/src/libsyntax/diagnostics/plugin.rs @@ -58,7 +58,7 @@ pub fn expand_diagnostic_used<'cx>(ecx: &'cx mut ExtCtxt, Some(previous_span) => { ecx.span_warn(span, format!( "diagnostic code {} already used", token::get_ident(code).get() - ).as_slice()); + )[]); ecx.span_note(previous_span, "previous invocation"); }, None => () @@ -87,12 +87,12 @@ pub fn expand_register_diagnostic<'cx>(ecx: &'cx mut ExtCtxt, if diagnostics.insert(code.name, description).is_some() { ecx.span_err(span, format!( "diagnostic code {} already registered", token::get_ident(*code).get() - ).as_slice()); + )[]); } }); let sym = Ident::new(token::gensym(( "__register_diagnostic_".to_string() + token::get_ident(*code).get() - ).as_slice())); + )[])); MacItems::new(vec![quote_item!(ecx, mod $sym {}).unwrap()].into_iter()) } diff --git a/src/libsyntax/ext/asm.rs b/src/libsyntax/ext/asm.rs index b138811187ba9..b77b822a6b2a1 100644 --- a/src/libsyntax/ext/asm.rs +++ b/src/libsyntax/ext/asm.rs @@ -100,8 +100,7 @@ pub fn expand_asm<'cx>(cx: &'cx mut ExtCtxt, sp: Span, tts: &[ast::TokenTree]) Some(('=', _)) => None, Some(('+', operand)) => { Some(token::intern_and_get_ident(format!( - "={}", - operand).as_slice())) + "={}", operand)[])) } _ => { cx.span_err(span, "output operand constraint lacks '=' or '+'"); diff --git a/src/libsyntax/ext/base.rs b/src/libsyntax/ext/base.rs index 3947a602809e6..d45871708dc20 100644 --- a/src/libsyntax/ext/base.rs +++ b/src/libsyntax/ext/base.rs @@ -50,14 +50,16 @@ pub trait ItemDecorator { push: |P|); } -impl ItemDecorator for fn(&mut ExtCtxt, Span, &ast::MetaItem, &ast::Item, |P|) { +impl ItemDecorator for F + where F : Fn(&mut ExtCtxt, Span, &ast::MetaItem, &ast::Item, |P|) +{ fn expand(&self, ecx: &mut ExtCtxt, sp: Span, meta_item: &ast::MetaItem, item: &ast::Item, push: |P|) { - self.clone()(ecx, sp, meta_item, item, push) + (*self)(ecx, sp, meta_item, item, push) } } @@ -70,14 +72,16 @@ pub trait ItemModifier { -> P; } -impl ItemModifier for fn(&mut ExtCtxt, Span, &ast::MetaItem, P) -> P { +impl ItemModifier for F + where F : Fn(&mut ExtCtxt, Span, &ast::MetaItem, P) -> P +{ fn expand(&self, ecx: &mut ExtCtxt, span: Span, meta_item: &ast::MetaItem, item: P) -> P { - self.clone()(ecx, span, meta_item, item) + (*self)(ecx, span, meta_item, item) } } @@ -93,13 +97,15 @@ pub trait TTMacroExpander { pub type MacroExpanderFn = for<'cx> fn(&'cx mut ExtCtxt, Span, &[ast::TokenTree]) -> Box; -impl TTMacroExpander for MacroExpanderFn { +impl TTMacroExpander for F + where F : for<'cx> Fn(&'cx mut ExtCtxt, Span, &[ast::TokenTree]) -> Box +{ fn expand<'cx>(&self, ecx: &'cx mut ExtCtxt, span: Span, token_tree: &[ast::TokenTree]) -> Box { - self.clone()(ecx, span, token_tree) + (*self)(ecx, span, token_tree) } } @@ -115,14 +121,18 @@ pub trait IdentMacroExpander { pub type IdentMacroExpanderFn = for<'cx> fn(&'cx mut ExtCtxt, Span, ast::Ident, Vec) -> Box; -impl IdentMacroExpander for IdentMacroExpanderFn { +impl IdentMacroExpander for F + where F : for<'cx> Fn(&'cx mut ExtCtxt, Span, ast::Ident, + Vec) -> Box +{ fn expand<'cx>(&self, cx: &'cx mut ExtCtxt, sp: Span, ident: ast::Ident, token_tree: Vec ) - -> Box { - self.clone()(cx, sp, ident, token_tree) + -> Box + { + (*self)(cx, sp, ident, token_tree) } } @@ -490,7 +500,7 @@ impl<'a> ExtCtxt<'a> { /// Returns a `Folder` for deeply expanding all macros in a AST node. pub fn expander<'b>(&'b mut self) -> expand::MacroExpander<'b, 'a> { - expand::MacroExpander { cx: self } + expand::MacroExpander::new(self) } pub fn new_parser_from_tts(&self, tts: &[ast::TokenTree]) @@ -549,7 +559,7 @@ impl<'a> ExtCtxt<'a> { pub fn mod_pop(&mut self) { self.mod_path.pop().unwrap(); } pub fn mod_path(&self) -> Vec { let mut v = Vec::new(); - v.push(token::str_to_ident(self.ecfg.crate_name.as_slice())); + v.push(token::str_to_ident(self.ecfg.crate_name[])); v.extend(self.mod_path.iter().map(|a| *a)); return v; } @@ -558,7 +568,7 @@ impl<'a> ExtCtxt<'a> { if self.recursion_count > self.ecfg.recursion_limit { self.span_fatal(ei.call_site, format!("recursion limit reached while expanding the macro `{}`", - ei.callee.name).as_slice()); + ei.callee.name)[]); } let mut call_site = ei.call_site; @@ -669,7 +679,7 @@ pub fn check_zero_tts(cx: &ExtCtxt, tts: &[ast::TokenTree], name: &str) { if tts.len() != 0 { - cx.span_err(sp, format!("{} takes no arguments", name).as_slice()); + cx.span_err(sp, format!("{} takes no arguments", name)[]); } } @@ -682,12 +692,12 @@ pub fn get_single_str_from_tts(cx: &mut ExtCtxt, -> Option { let mut p = cx.new_parser_from_tts(tts); if p.token == token::Eof { - cx.span_err(sp, format!("{} takes 1 argument", name).as_slice()); + cx.span_err(sp, format!("{} takes 1 argument", name)[]); return None } let ret = cx.expander().fold_expr(p.parse_expr()); if p.token != token::Eof { - cx.span_err(sp, format!("{} takes 1 argument", name).as_slice()); + cx.span_err(sp, format!("{} takes 1 argument", name)[]); } expr_to_string(cx, ret, "argument must be a string literal").map(|(s, _)| { s.get().to_string() diff --git a/src/libsyntax/ext/build.rs b/src/libsyntax/ext/build.rs index d35091f8ab0aa..77165168746b7 100644 --- a/src/libsyntax/ext/build.rs +++ b/src/libsyntax/ext/build.rs @@ -712,8 +712,7 @@ impl<'a> AstBuilder for ExtCtxt<'a> { let loc = self.codemap().lookup_char_pos(span.lo); let expr_file = self.expr_str(span, token::intern_and_get_ident(loc.file - .name - .as_slice())); + .name[])); let expr_line = self.expr_uint(span, loc.line); let expr_file_line_tuple = self.expr_tuple(span, vec!(expr_file, expr_line)); let expr_file_line_ptr = self.expr_addr_of(span, expr_file_line_tuple); @@ -868,7 +867,7 @@ impl<'a> AstBuilder for ExtCtxt<'a> { } fn expr_match(&self, span: Span, arg: P, arms: Vec) -> P { - self.expr(span, ast::ExprMatch(arg, arms, ast::MatchNormal)) + self.expr(span, ast::ExprMatch(arg, arms, ast::MatchSource::Normal)) } fn expr_if(&self, span: Span, cond: P, diff --git a/src/libsyntax/ext/concat.rs b/src/libsyntax/ext/concat.rs index e2867c2fbabfa..03dd08fdf7fe4 100644 --- a/src/libsyntax/ext/concat.rs +++ b/src/libsyntax/ext/concat.rs @@ -40,14 +40,14 @@ pub fn expand_syntax_ext(cx: &mut base::ExtCtxt, ast::LitInt(i, ast::UnsignedIntLit(_)) | ast::LitInt(i, ast::SignedIntLit(_, ast::Plus)) | ast::LitInt(i, ast::UnsuffixedIntLit(ast::Plus)) => { - accumulator.push_str(format!("{}", i).as_slice()); + accumulator.push_str(format!("{}", i)[]); } ast::LitInt(i, ast::SignedIntLit(_, ast::Minus)) | ast::LitInt(i, ast::UnsuffixedIntLit(ast::Minus)) => { - accumulator.push_str(format!("-{}", i).as_slice()); + accumulator.push_str(format!("-{}", i)[]); } ast::LitBool(b) => { - accumulator.push_str(format!("{}", b).as_slice()); + accumulator.push_str(format!("{}", b)[]); } ast::LitByte(..) | ast::LitBinary(..) => { @@ -62,5 +62,5 @@ pub fn expand_syntax_ext(cx: &mut base::ExtCtxt, } base::MacExpr::new(cx.expr_str( sp, - token::intern_and_get_ident(accumulator.as_slice()))) + token::intern_and_get_ident(accumulator[]))) } diff --git a/src/libsyntax/ext/concat_idents.rs b/src/libsyntax/ext/concat_idents.rs index aa18b1be31acc..2cf60d30a1b25 100644 --- a/src/libsyntax/ext/concat_idents.rs +++ b/src/libsyntax/ext/concat_idents.rs @@ -40,7 +40,7 @@ pub fn expand_syntax_ext<'cx>(cx: &mut ExtCtxt, sp: Span, tts: &[ast::TokenTree] } } } - let res = str_to_ident(res_str.as_slice()); + let res = str_to_ident(res_str[]); let e = P(ast::Expr { id: ast::DUMMY_NODE_ID, diff --git a/src/libsyntax/ext/deriving/bounds.rs b/src/libsyntax/ext/deriving/bounds.rs index 3145b3bb1a4fe..c27a27fce6a90 100644 --- a/src/libsyntax/ext/deriving/bounds.rs +++ b/src/libsyntax/ext/deriving/bounds.rs @@ -31,8 +31,7 @@ pub fn expand_deriving_bound(cx: &mut ExtCtxt, ref tname => { cx.span_bug(span, format!("expected built-in trait name but \ - found {}", - *tname).as_slice()) + found {}", *tname)[]) } } }, diff --git a/src/libsyntax/ext/deriving/clone.rs b/src/libsyntax/ext/deriving/clone.rs index a34764221b3b6..eedec6f37c840 100644 --- a/src/libsyntax/ext/deriving/clone.rs +++ b/src/libsyntax/ext/deriving/clone.rs @@ -80,13 +80,11 @@ fn cs_clone( EnumNonMatchingCollapsed (..) => { cx.span_bug(trait_span, format!("non-matching enum variants in \ - `deriving({})`", - name).as_slice()) + `deriving({})`", name)[]) } StaticEnum(..) | StaticStruct(..) => { cx.span_bug(trait_span, - format!("static method in `deriving({})`", - name).as_slice()) + format!("static method in `deriving({})`", name)[]) } } @@ -103,8 +101,7 @@ fn cs_clone( None => { cx.span_bug(trait_span, format!("unnamed field in normal struct in \ - `deriving({})`", - name).as_slice()) + `deriving({})`", name)[]) } }; cx.field_imm(field.span, ident, subcall(field)) diff --git a/src/libsyntax/ext/deriving/decodable.rs b/src/libsyntax/ext/deriving/decodable.rs index 0a8d59da89677..57dfbc0c6e8bc 100644 --- a/src/libsyntax/ext/deriving/decodable.rs +++ b/src/libsyntax/ext/deriving/decodable.rs @@ -21,24 +21,45 @@ use parse::token::InternedString; use parse::token; use ptr::P; +pub fn expand_deriving_rustc_decodable(cx: &mut ExtCtxt, + span: Span, + mitem: &MetaItem, + item: &Item, + push: F) where + F: FnOnce(P), +{ + expand_deriving_decodable_imp(cx, span, mitem, item, push, "rustc_serialize") +} + pub fn expand_deriving_decodable(cx: &mut ExtCtxt, span: Span, mitem: &MetaItem, item: &Item, push: F) where F: FnOnce(P), +{ + expand_deriving_decodable_imp(cx, span, mitem, item, push, "serialize") +} + +fn expand_deriving_decodable_imp(cx: &mut ExtCtxt, + span: Span, + mitem: &MetaItem, + item: &Item, + push: F, + krate: &'static str) where + F: FnOnce(P), { let trait_def = TraitDef { span: span, attributes: Vec::new(), - path: Path::new_(vec!("serialize", "Decodable"), None, + path: Path::new_(vec!(krate, "Decodable"), None, vec!(box Literal(Path::new_local("__D")), box Literal(Path::new_local("__E"))), true), additional_bounds: Vec::new(), generics: LifetimeBounds { lifetimes: Vec::new(), bounds: vec!(("__D", None, vec!(Path::new_( - vec!("serialize", "Decoder"), None, + vec!(krate, "Decoder"), None, vec!(box Literal(Path::new_local("__E"))), true))), ("__E", None, vec!())) }, @@ -54,7 +75,7 @@ pub fn expand_deriving_decodable(cx: &mut ExtCtxt, box Literal(Path::new_local("__E"))), true)), attributes: Vec::new(), combine_substructure: combine_substructure(|a, b, c| { - decodable_substructure(a, b, c) + decodable_substructure(a, b, c, krate) }), }) }; @@ -63,9 +84,10 @@ pub fn expand_deriving_decodable(cx: &mut ExtCtxt, } fn decodable_substructure(cx: &mut ExtCtxt, trait_span: Span, - substr: &Substructure) -> P { + substr: &Substructure, + krate: &str) -> P { let decoder = substr.nonself_args[0].clone(); - let recurse = vec!(cx.ident_of("serialize"), + let recurse = vec!(cx.ident_of(krate), cx.ident_of("Decodable"), cx.ident_of("decode")); // throw an underscore in front to suppress unused variable warnings @@ -174,7 +196,7 @@ fn decode_static_fields(cx: &mut ExtCtxt, let fields = fields.iter().enumerate().map(|(i, &span)| { getarg(cx, span, token::intern_and_get_ident(format!("_field{}", - i).as_slice()), + i)[]), i) }).collect(); diff --git a/src/libsyntax/ext/deriving/encodable.rs b/src/libsyntax/ext/deriving/encodable.rs index 30851ebeaaef3..8bd3df6232ce5 100644 --- a/src/libsyntax/ext/deriving/encodable.rs +++ b/src/libsyntax/ext/deriving/encodable.rs @@ -97,24 +97,45 @@ use ext::deriving::generic::ty::*; use parse::token; use ptr::P; +pub fn expand_deriving_rustc_encodable(cx: &mut ExtCtxt, + span: Span, + mitem: &MetaItem, + item: &Item, + push: F) where + F: FnOnce(P), +{ + expand_deriving_encodable_imp(cx, span, mitem, item, push, "rustc_serialize") +} + pub fn expand_deriving_encodable(cx: &mut ExtCtxt, span: Span, mitem: &MetaItem, item: &Item, push: F) where F: FnOnce(P), +{ + expand_deriving_encodable_imp(cx, span, mitem, item, push, "serialize") +} + +fn expand_deriving_encodable_imp(cx: &mut ExtCtxt, + span: Span, + mitem: &MetaItem, + item: &Item, + push: F, + krate: &'static str) where + F: FnOnce(P), { let trait_def = TraitDef { span: span, attributes: Vec::new(), - path: Path::new_(vec!("serialize", "Encodable"), None, + path: Path::new_(vec!(krate, "Encodable"), None, vec!(box Literal(Path::new_local("__S")), box Literal(Path::new_local("__E"))), true), additional_bounds: Vec::new(), generics: LifetimeBounds { lifetimes: Vec::new(), bounds: vec!(("__S", None, vec!(Path::new_( - vec!("serialize", "Encoder"), None, + vec!(krate, "Encoder"), None, vec!(box Literal(Path::new_local("__E"))), true))), ("__E", None, vec!())) }, @@ -162,8 +183,7 @@ fn encodable_substructure(cx: &mut ExtCtxt, trait_span: Span, let name = match name { Some(id) => token::get_ident(id), None => { - token::intern_and_get_ident(format!("_field{}", - i).as_slice()) + token::intern_and_get_ident(format!("_field{}", i)[]) } }; let enc = cx.expr_method_call(span, self_.clone(), diff --git a/src/libsyntax/ext/deriving/generic/mod.rs b/src/libsyntax/ext/deriving/generic/mod.rs index b31758e2d2a12..cf0201294ae54 100644 --- a/src/libsyntax/ext/deriving/generic/mod.rs +++ b/src/libsyntax/ext/deriving/generic/mod.rs @@ -426,12 +426,18 @@ impl<'a> TraitDef<'a> { match *clause { ast::WherePredicate::BoundPredicate(ref wb) => { ast::WherePredicate::BoundPredicate(ast::WhereBoundPredicate { - id: ast::DUMMY_NODE_ID, span: self.span, - ident: wb.ident, + bounded_ty: wb.bounded_ty.clone(), bounds: OwnedSlice::from_vec(wb.bounds.iter().map(|b| b.clone()).collect()) }) } + ast::WherePredicate::RegionPredicate(ref rb) => { + ast::WherePredicate::RegionPredicate(ast::WhereRegionPredicate { + span: self.span, + lifetime: rb.lifetime, + bounds: rb.bounds.iter().map(|b| b.clone()).collect() + }) + } ast::WherePredicate::EqPredicate(ref we) => { ast::WherePredicate::EqPredicate(ast::WhereEqPredicate { id: ast::DUMMY_NODE_ID, @@ -508,15 +514,15 @@ impl<'a> TraitDef<'a> { self, struct_def, type_ident, - self_args.as_slice(), - nonself_args.as_slice()) + self_args[], + nonself_args[]) } else { method_def.expand_struct_method_body(cx, self, struct_def, type_ident, - self_args.as_slice(), - nonself_args.as_slice()) + self_args[], + nonself_args[]) }; method_def.create_method(cx, @@ -548,15 +554,15 @@ impl<'a> TraitDef<'a> { self, enum_def, type_ident, - self_args.as_slice(), - nonself_args.as_slice()) + self_args[], + nonself_args[]) } else { method_def.expand_enum_method_body(cx, self, enum_def, type_ident, self_args, - nonself_args.as_slice()) + nonself_args[]) }; method_def.create_method(cx, @@ -643,7 +649,7 @@ impl<'a> MethodDef<'a> { for (i, ty) in self.args.iter().enumerate() { let ast_ty = ty.to_ty(cx, trait_.span, type_ident, generics); - let ident = cx.ident_of(format!("__arg_{}", i).as_slice()); + let ident = cx.ident_of(format!("__arg_{}", i)[]); arg_tys.push((ident, ast_ty)); let arg_expr = cx.expr_ident(trait_.span, ident); @@ -750,7 +756,7 @@ impl<'a> MethodDef<'a> { struct_path, struct_def, format!("__self_{}", - i).as_slice(), + i)[], ast::MutImmutable); patterns.push(pat); raw_fields.push(ident_expr); @@ -760,7 +766,7 @@ impl<'a> MethodDef<'a> { let fields = if raw_fields.len() > 0 { let mut raw_fields = raw_fields.into_iter().map(|v| v.into_iter()); let first_field = raw_fields.next().unwrap(); - let mut other_fields: Vec, P)>> + let mut other_fields: Vec, P)>> = raw_fields.collect(); first_field.map(|(span, opt_id, field)| { FieldInfo { @@ -906,22 +912,22 @@ impl<'a> MethodDef<'a> { .collect::>(); let self_arg_idents = self_arg_names.iter() - .map(|name|cx.ident_of(name.as_slice())) + .map(|name|cx.ident_of(name[])) .collect::>(); // The `vi_idents` will be bound, solely in the catch-all, to // a series of let statements mapping each self_arg to a uint // corresponding to its variant index. let vi_idents: Vec = self_arg_names.iter() - .map(|name| { let vi_suffix = format!("{}_vi", name.as_slice()); - cx.ident_of(vi_suffix.as_slice()) }) + .map(|name| { let vi_suffix = format!("{}_vi", name[]); + cx.ident_of(vi_suffix[]) }) .collect::>(); // Builds, via callback to call_substructure_method, the // delegated expression that handles the catch-all case, // using `__variants_tuple` to drive logic if necessary. let catch_all_substructure = EnumNonMatchingCollapsed( - self_arg_idents, variants.as_slice(), vi_idents.as_slice()); + self_arg_idents, variants[], vi_idents[]); // These arms are of the form: // (Variant1, Variant1, ...) => Body1 @@ -943,12 +949,12 @@ impl<'a> MethodDef<'a> { let mut subpats = Vec::with_capacity(self_arg_names.len()); let mut self_pats_idents = Vec::with_capacity(self_arg_names.len() - 1); let first_self_pat_idents = { - let (p, idents) = mk_self_pat(cx, self_arg_names[0].as_slice()); + let (p, idents) = mk_self_pat(cx, self_arg_names[0][]); subpats.push(p); idents }; for self_arg_name in self_arg_names.tail().iter() { - let (p, idents) = mk_self_pat(cx, self_arg_name.as_slice()); + let (p, idents) = mk_self_pat(cx, self_arg_name[]); subpats.push(p); self_pats_idents.push(idents); } @@ -1004,7 +1010,7 @@ impl<'a> MethodDef<'a> { &**variant, field_tuples); let arm_expr = self.call_substructure_method( - cx, trait_, type_ident, self_args.as_slice(), nonself_args, + cx, trait_, type_ident, self_args[], nonself_args, &substructure); cx.arm(sp, vec![single_pat], arm_expr) @@ -1057,7 +1063,7 @@ impl<'a> MethodDef<'a> { } let arm_expr = self.call_substructure_method( - cx, trait_, type_ident, self_args.as_slice(), nonself_args, + cx, trait_, type_ident, self_args[], nonself_args, &catch_all_substructure); // Builds the expression: @@ -1261,7 +1267,7 @@ impl<'a> TraitDef<'a> { cx.span_bug(sp, "a struct with named and unnamed fields in `deriving`"); } }; - let ident = cx.ident_of(format!("{}_{}", prefix, i).as_slice()); + let ident = cx.ident_of(format!("{}_{}", prefix, i)[]); paths.push(codemap::Spanned{span: sp, node: ident}); let val = cx.expr( sp, ast::ExprParen(cx.expr_deref(sp, cx.expr_path(cx.path_ident(sp,ident))))); @@ -1307,7 +1313,7 @@ impl<'a> TraitDef<'a> { let mut ident_expr = Vec::new(); for (i, va) in variant_args.iter().enumerate() { let sp = self.set_expn_info(cx, va.ty.span); - let ident = cx.ident_of(format!("{}_{}", prefix, i).as_slice()); + let ident = cx.ident_of(format!("{}_{}", prefix, i)[]); let path1 = codemap::Spanned{span: sp, node: ident}; paths.push(path1); let expr_path = cx.expr_path(cx.path_ident(sp, ident)); @@ -1350,7 +1356,7 @@ pub fn cs_fold(use_foldl: bool, field.span, old, field.self_.clone(), - field.other.as_slice()) + field.other[]) }) } else { all_fields.iter().rev().fold(base, |old, field| { @@ -1358,12 +1364,12 @@ pub fn cs_fold(use_foldl: bool, field.span, old, field.self_.clone(), - field.other.as_slice()) + field.other[]) }) } }, EnumNonMatchingCollapsed(ref all_args, _, tuple) => - enum_nonmatch_f(cx, trait_span, (all_args.as_slice(), tuple), + enum_nonmatch_f(cx, trait_span, (all_args[], tuple), substructure.nonself_args), StaticEnum(..) | StaticStruct(..) => { cx.span_bug(trait_span, "static function in `deriving`") @@ -1403,7 +1409,7 @@ pub fn cs_same_method(f: F, f(cx, trait_span, called) }, EnumNonMatchingCollapsed(ref all_self_args, _, tuple) => - enum_nonmatch_f(cx, trait_span, (all_self_args.as_slice(), tuple), + enum_nonmatch_f(cx, trait_span, (all_self_args[], tuple), substructure.nonself_args), StaticEnum(..) | StaticStruct(..) => { cx.span_bug(trait_span, "static function in `deriving`") diff --git a/src/libsyntax/ext/deriving/mod.rs b/src/libsyntax/ext/deriving/mod.rs index 839e99c81d1ab..edf29e670eb88 100644 --- a/src/libsyntax/ext/deriving/mod.rs +++ b/src/libsyntax/ext/deriving/mod.rs @@ -71,24 +71,22 @@ pub fn expand_meta_deriving(cx: &mut ExtCtxt, "Hash" => expand!(hash::expand_deriving_hash), "RustcEncodable" => { - expand!(encodable::expand_deriving_encodable) + expand!(encodable::expand_deriving_rustc_encodable) } "RustcDecodable" => { - expand!(decodable::expand_deriving_decodable) + expand!(decodable::expand_deriving_rustc_decodable) } "Encodable" => { - // NOTE: uncomment after a stage0 snap - // cx.span_warn(titem.span, - // "deriving(Encodable) is deprecated \ - // in favor of deriving(RustcEncodable)"); + cx.span_warn(titem.span, + "deriving(Encodable) is deprecated \ + in favor of deriving(RustcEncodable)"); expand!(encodable::expand_deriving_encodable) } "Decodable" => { - // NOTE: uncomment after a stage0 snap - // cx.span_warn(titem.span, - // "deriving(Decodable) is deprecated \ - // in favor of deriving(RustcDecodable)"); + cx.span_warn(titem.span, + "deriving(Decodable) is deprecated \ + in favor of deriving(RustcDecodable)"); expand!(decodable::expand_deriving_decodable) } @@ -115,7 +113,7 @@ pub fn expand_meta_deriving(cx: &mut ExtCtxt, cx.span_err(titem.span, format!("unknown `deriving` \ trait: `{}`", - *tname).as_slice()); + *tname)[]); } }; } diff --git a/src/libsyntax/ext/deriving/show.rs b/src/libsyntax/ext/deriving/show.rs index a68b521bbc9a2..19b45a1e61007 100644 --- a/src/libsyntax/ext/deriving/show.rs +++ b/src/libsyntax/ext/deriving/show.rs @@ -127,7 +127,7 @@ fn show_substructure(cx: &mut ExtCtxt, span: Span, let formatter = substr.nonself_args[0].clone(); let meth = cx.ident_of("write_fmt"); - let s = token::intern_and_get_ident(format_string.as_slice()); + let s = token::intern_and_get_ident(format_string[]); let format_string = cx.expr_str(span, s); // phew, not our responsibility any more! diff --git a/src/libsyntax/ext/env.rs b/src/libsyntax/ext/env.rs index 8c17b31f458d0..9fedc4a158e15 100644 --- a/src/libsyntax/ext/env.rs +++ b/src/libsyntax/ext/env.rs @@ -30,7 +30,7 @@ pub fn expand_option_env<'cx>(cx: &'cx mut ExtCtxt, sp: Span, tts: &[ast::TokenT Some(v) => v }; - let e = match os::getenv(var.as_slice()) { + let e = match os::getenv(var[]) { None => { cx.expr_path(cx.path_all(sp, true, @@ -56,7 +56,7 @@ pub fn expand_option_env<'cx>(cx: &'cx mut ExtCtxt, sp: Span, tts: &[ast::TokenT cx.ident_of("Some")), vec!(cx.expr_str(sp, token::intern_and_get_ident( - s.as_slice())))) + s[])))) } }; MacExpr::new(e) @@ -83,7 +83,7 @@ pub fn expand_env<'cx>(cx: &'cx mut ExtCtxt, sp: Span, tts: &[ast::TokenTree]) None => { token::intern_and_get_ident(format!("environment variable `{}` \ not defined", - var).as_slice()) + var)[]) } Some(second) => { match expr_to_string(cx, second, "expected string literal") { @@ -106,7 +106,7 @@ pub fn expand_env<'cx>(cx: &'cx mut ExtCtxt, sp: Span, tts: &[ast::TokenTree]) cx.span_err(sp, msg.get()); cx.expr_uint(sp, 0) } - Some(s) => cx.expr_str(sp, token::intern_and_get_ident(s.as_slice())) + Some(s) => cx.expr_str(sp, token::intern_and_get_ident(s[])) }; MacExpr::new(e) } diff --git a/src/libsyntax/ext/expand.rs b/src/libsyntax/ext/expand.rs index 20c8ff20b713b..f2b6f6bfe16b0 100644 --- a/src/libsyntax/ext/expand.rs +++ b/src/libsyntax/ext/expand.rs @@ -15,6 +15,7 @@ use ast::{ItemMac, MacStmtWithSemicolon, Mrk, Stmt, StmtDecl, StmtMac}; use ast::{StmtExpr, StmtSemi}; use ast::TokenTree; use ast; +use ast_util::path_to_ident; use ext::mtwt; use ext::build::AstBuilder; use attr; @@ -37,6 +38,30 @@ enum Either { Right(R) } +pub fn expand_type(t: P, + fld: &mut MacroExpander, + impl_ty: Option>) + -> P { + debug!("expanding type {} with impl_ty {}", t, impl_ty); + let t = match (t.node.clone(), impl_ty) { + // Expand uses of `Self` in impls to the concrete type. + (ast::Ty_::TyPath(ref path, _), Some(ref impl_ty)) => { + let path_as_ident = path_to_ident(path); + // Note unhygenic comparison here. I think this is correct, since + // even though `Self` is almost just a type parameter, the treatment + // for this expansion is as if it were a keyword. + if path_as_ident.is_some() && + path_as_ident.unwrap().name == token::special_idents::type_self.name { + impl_ty.clone() + } else { + t + } + } + _ => t + }; + fold::noop_fold_ty(t, fld) +} + pub fn expand_expr(e: P, fld: &mut MacroExpander) -> P { e.and_then(|ast::Expr {id, node, span}| match node { // expr_mac should really be expr_ext or something; it's the @@ -97,7 +122,7 @@ pub fn expand_expr(e: P, fld: &mut MacroExpander) -> P { // `match { ... }` let arms = vec![pat_arm, break_arm]; let match_expr = fld.cx.expr(span, - ast::ExprMatch(expr, arms, ast::MatchWhileLetDesugar)); + ast::ExprMatch(expr, arms, ast::MatchSource::WhileLetDesugar)); // `[opt_ident]: loop { ... }` let loop_block = fld.cx.block_expr(match_expr); @@ -158,6 +183,8 @@ pub fn expand_expr(e: P, fld: &mut MacroExpander) -> P { arms }; + let contains_else_clause = elseopt.is_some(); + // `_ => [ | ()]` let else_arm = { let pat_under = fld.cx.pat_wild(span); @@ -170,7 +197,11 @@ pub fn expand_expr(e: P, fld: &mut MacroExpander) -> P { arms.extend(else_if_arms.into_iter()); arms.push(else_arm); - let match_expr = fld.cx.expr(span, ast::ExprMatch(expr, arms, ast::MatchIfLetDesugar)); + let match_expr = fld.cx.expr(span, + ast::ExprMatch(expr, arms, + ast::MatchSource::IfLetDesugar { + contains_else_clause: contains_else_clause, + })); fld.fold_expr(match_expr) } @@ -262,7 +293,7 @@ fn expand_mac_invoc(mac: ast::Mac, span: codemap::Span, fld.cx.span_err( pth.span, format!("macro undefined: '{}!'", - extnamestr.get()).as_slice()); + extnamestr.get())[]); // let compilation continue None @@ -278,7 +309,7 @@ fn expand_mac_invoc(mac: ast::Mac, span: codemap::Span, }, }); let fm = fresh_mark(); - let marked_before = mark_tts(tts.as_slice(), fm); + let marked_before = mark_tts(tts[], fm); // The span that we pass to the expanders we want to // be the root of the call stack. That's the most @@ -289,7 +320,7 @@ fn expand_mac_invoc(mac: ast::Mac, span: codemap::Span, let opt_parsed = { let expanded = expandfun.expand(fld.cx, mac_span, - marked_before.as_slice()); + marked_before[]); parse_thunk(expanded) }; let parsed = match opt_parsed { @@ -298,8 +329,8 @@ fn expand_mac_invoc(mac: ast::Mac, span: codemap::Span, fld.cx.span_err( pth.span, format!("non-expression macro in expression position: {}", - extnamestr.get().as_slice() - ).as_slice()); + extnamestr.get()[] + )[]); return None; } }; @@ -309,7 +340,7 @@ fn expand_mac_invoc(mac: ast::Mac, span: codemap::Span, fld.cx.span_err( pth.span, format!("'{}' is not a tt-style macro", - extnamestr.get()).as_slice()); + extnamestr.get())[]); None } } @@ -414,7 +445,7 @@ pub fn expand_item(it: P, fld: &mut MacroExpander) if valid_ident { fld.cx.mod_push(it.ident); } - let macro_escape = contains_macro_escape(new_attrs.as_slice()); + let macro_escape = contains_macro_escape(new_attrs[]); let result = with_exts_frame!(fld.cx.syntax_env, macro_escape, noop_fold_item(it, fld)); @@ -522,7 +553,7 @@ pub fn expand_item_mac(it: P, fld: &mut MacroExpander) None => { fld.cx.span_err(path_span, format!("macro undefined: '{}!'", - extnamestr).as_slice()); + extnamestr)[]); // let compilation continue return SmallVector::zero(); } @@ -535,7 +566,7 @@ pub fn expand_item_mac(it: P, fld: &mut MacroExpander) format!("macro {}! expects no ident argument, \ given '{}'", extnamestr, - token::get_ident(it.ident)).as_slice()); + token::get_ident(it.ident))[]); return SmallVector::zero(); } fld.cx.bt_push(ExpnInfo { @@ -547,14 +578,14 @@ pub fn expand_item_mac(it: P, fld: &mut MacroExpander) } }); // mark before expansion: - let marked_before = mark_tts(tts.as_slice(), fm); - expander.expand(fld.cx, it.span, marked_before.as_slice()) + let marked_before = mark_tts(tts[], fm); + expander.expand(fld.cx, it.span, marked_before[]) } IdentTT(ref expander, span) => { if it.ident.name == parse::token::special_idents::invalid.name { fld.cx.span_err(path_span, format!("macro {}! expects an ident argument", - extnamestr.get()).as_slice()); + extnamestr.get())[]); return SmallVector::zero(); } fld.cx.bt_push(ExpnInfo { @@ -566,14 +597,14 @@ pub fn expand_item_mac(it: P, fld: &mut MacroExpander) } }); // mark before expansion: - let marked_tts = mark_tts(tts.as_slice(), fm); + let marked_tts = mark_tts(tts[], fm); expander.expand(fld.cx, it.span, it.ident, marked_tts) } LetSyntaxTT(ref expander, span) => { if it.ident.name == parse::token::special_idents::invalid.name { fld.cx.span_err(path_span, format!("macro {}! expects an ident argument", - extnamestr.get()).as_slice()); + extnamestr.get())[]); return SmallVector::zero(); } fld.cx.bt_push(ExpnInfo { @@ -590,7 +621,7 @@ pub fn expand_item_mac(it: P, fld: &mut MacroExpander) _ => { fld.cx.span_err(it.span, format!("{}! is not legal in item position", - extnamestr.get()).as_slice()); + extnamestr.get())[]); return SmallVector::zero(); } } @@ -608,8 +639,8 @@ pub fn expand_item_mac(it: P, fld: &mut MacroExpander) // result of expanding a LetSyntaxTT, and thus doesn't // need to be marked. Not that it could be marked anyway. // create issue to recommend refactoring here? - fld.cx.syntax_env.insert(intern(name.as_slice()), ext); - if attr::contains_name(it.attrs.as_slice(), "macro_export") { + fld.cx.syntax_env.insert(intern(name[]), ext); + if attr::contains_name(it.attrs[], "macro_export") { fld.cx.exported_macros.push(it); } SmallVector::zero() @@ -623,7 +654,7 @@ pub fn expand_item_mac(it: P, fld: &mut MacroExpander) Right(None) => { fld.cx.span_err(path_span, format!("non-item macro in item position: {}", - extnamestr.get()).as_slice()); + extnamestr.get())[]); return SmallVector::zero(); } }; @@ -872,7 +903,7 @@ fn expand_pat(p: P, fld: &mut MacroExpander) -> P { None => { fld.cx.span_err(pth.span, format!("macro undefined: '{}!'", - extnamestr).as_slice()); + extnamestr)[]); // let compilation continue return DummyResult::raw_pat(span); } @@ -889,11 +920,11 @@ fn expand_pat(p: P, fld: &mut MacroExpander) -> P { }); let fm = fresh_mark(); - let marked_before = mark_tts(tts.as_slice(), fm); + let marked_before = mark_tts(tts[], fm); let mac_span = fld.cx.original_span(); let expanded = match expander.expand(fld.cx, mac_span, - marked_before.as_slice()).make_pat() { + marked_before[]).make_pat() { Some(e) => e, None => { fld.cx.span_err( @@ -901,7 +932,7 @@ fn expand_pat(p: P, fld: &mut MacroExpander) -> P { format!( "non-pattern macro in pattern position: {}", extnamestr.get() - ).as_slice() + )[] ); return DummyResult::raw_pat(span); } @@ -913,7 +944,7 @@ fn expand_pat(p: P, fld: &mut MacroExpander) -> P { _ => { fld.cx.span_err(span, format!("{}! is not legal in pattern position", - extnamestr.get()).as_slice()); + extnamestr.get())[]); return DummyResult::raw_pat(span); } } @@ -1059,6 +1090,14 @@ fn expand_and_rename_fn_decl_and_block(fn_decl: P, block: P { pub cx: &'a mut ExtCtxt<'b>, + // The type of the impl currently being expanded. + current_impl_type: Option>, +} + +impl<'a, 'b> MacroExpander<'a, 'b> { + pub fn new(cx: &'a mut ExtCtxt<'b>) -> MacroExpander<'a, 'b> { + MacroExpander { cx: cx, current_impl_type: None } + } } impl<'a, 'b> Folder for MacroExpander<'a, 'b> { @@ -1071,7 +1110,14 @@ impl<'a, 'b> Folder for MacroExpander<'a, 'b> { } fn fold_item(&mut self, item: P) -> SmallVector> { - expand_item(item, self) + let prev_type = self.current_impl_type.clone(); + if let ast::Item_::ItemImpl(_, _, _, ref ty, _) = item.node { + self.current_impl_type = Some(ty.clone()); + } + + let result = expand_item(item, self); + self.current_impl_type = prev_type; + result } fn fold_item_underscore(&mut self, item: ast::Item_) -> ast::Item_ { @@ -1094,6 +1140,11 @@ impl<'a, 'b> Folder for MacroExpander<'a, 'b> { expand_method(method, self) } + fn fold_ty(&mut self, t: P) -> P { + let impl_type = self.current_impl_type.clone(); + expand_type(t, self, impl_type) + } + fn new_span(&mut self, span: Span) -> Span { new_span(self.cx, span) } @@ -1138,13 +1189,10 @@ pub fn expand_crate(parse_sess: &parse::ParseSess, user_exts: Vec, c: Crate) -> Crate { let mut cx = ExtCtxt::new(parse_sess, c.config.clone(), cfg); - let mut expander = MacroExpander { - cx: &mut cx, - }; + let mut expander = MacroExpander::new(&mut cx); for ExportedMacros { crate_name, macros } in imported_macros.into_iter() { - let name = format!("<{} macros>", token::get_ident(crate_name)) - .into_string(); + let name = format!("<{} macros>", token::get_ident(crate_name)); for source in macros.into_iter() { let item = parse::parse_item_from_source_str(name.clone(), @@ -1189,7 +1237,7 @@ impl Folder for Marker { node: match node { MacInvocTT(path, tts, ctxt) => { MacInvocTT(self.fold_path(path), - self.fold_tts(tts.as_slice()), + self.fold_tts(tts[]), mtwt::apply_mark(self.mark, ctxt)) } }, @@ -1366,9 +1414,9 @@ mod test { let attr2 = make_dummy_attr ("bar"); let escape_attr = make_dummy_attr ("macro_escape"); let attrs1 = vec!(attr1.clone(), escape_attr, attr2.clone()); - assert_eq!(contains_macro_escape(attrs1.as_slice()),true); + assert_eq!(contains_macro_escape(attrs1[]),true); let attrs2 = vec!(attr1,attr2); - assert_eq!(contains_macro_escape(attrs2.as_slice()),false); + assert_eq!(contains_macro_escape(attrs2[]),false); } // make a MetaWord outer attribute with the given name @@ -1680,7 +1728,7 @@ foo_module!(); let string = ident.get(); "xx" == string }).collect(); - let cxbinds: &[&ast::Ident] = cxbinds.as_slice(); + let cxbinds: &[&ast::Ident] = cxbinds[]; let cxbind = match cxbinds { [b] => b, _ => panic!("expected just one binding for ext_cx") diff --git a/src/libsyntax/ext/format.rs b/src/libsyntax/ext/format.rs index 5d595474e9c70..aad4045f00a52 100644 --- a/src/libsyntax/ext/format.rs +++ b/src/libsyntax/ext/format.rs @@ -136,7 +136,7 @@ fn parse_args(ecx: &mut ExtCtxt, sp: Span, allow_method: bool, _ => { ecx.span_err(p.span, format!("expected ident for named argument, found `{}`", - p.this_token_to_string()).as_slice()); + p.this_token_to_string())[]); return (invocation, None); } }; @@ -149,7 +149,7 @@ fn parse_args(ecx: &mut ExtCtxt, sp: Span, allow_method: bool, Some(prev) => { ecx.span_err(e.span, format!("duplicate argument named `{}`", - name).as_slice()); + name)[]); ecx.parse_sess.span_diagnostic.span_note(prev.span, "previously here"); continue } @@ -240,7 +240,7 @@ impl<'a, 'b> Context<'a, 'b> { let msg = format!("invalid reference to argument `{}` ({})", arg, self.describe_num_args()); - self.ecx.span_err(self.fmtsp, msg.as_slice()); + self.ecx.span_err(self.fmtsp, msg[]); return; } { @@ -260,7 +260,7 @@ impl<'a, 'b> Context<'a, 'b> { Some(e) => e.span, None => { let msg = format!("there is no argument named `{}`", name); - self.ecx.span_err(self.fmtsp, msg.as_slice()); + self.ecx.span_err(self.fmtsp, msg[]); return; } }; @@ -303,19 +303,19 @@ impl<'a, 'b> Context<'a, 'b> { format!("argument redeclared with type `{}` when \ it was previously `{}`", *ty, - *cur).as_slice()); + *cur)[]); } (&Known(ref cur), _) => { self.ecx.span_err(sp, format!("argument used to format with `{}` was \ attempted to not be used for formatting", - *cur).as_slice()); + *cur)[]); } (_, &Known(ref ty)) => { self.ecx.span_err(sp, format!("argument previously used as a format \ argument attempted to be used as `{}`", - *ty).as_slice()); + *ty)[]); } (_, _) => { self.ecx.span_err(sp, "argument declared with multiple formats"); @@ -380,7 +380,7 @@ impl<'a, 'b> Context<'a, 'b> { /// Translate the accumulated string literals to a literal expression fn trans_literal_string(&mut self) -> P { let sp = self.fmtsp; - let s = token::intern_and_get_ident(self.literal.as_slice()); + let s = token::intern_and_get_ident(self.literal[]); self.literal.clear(); self.ecx.expr_str(sp, s) } @@ -552,7 +552,7 @@ impl<'a, 'b> Context<'a, 'b> { None => continue // error already generated }; - let name = self.ecx.ident_of(format!("__arg{}", i).as_slice()); + let name = self.ecx.ident_of(format!("__arg{}", i)[]); pats.push(self.ecx.pat_ident(e.span, name)); locals.push(Context::format_arg(self.ecx, e.span, arg_ty, self.ecx.expr_ident(e.span, name))); @@ -569,7 +569,7 @@ impl<'a, 'b> Context<'a, 'b> { }; let lname = self.ecx.ident_of(format!("__arg{}", - *name).as_slice()); + *name)[]); pats.push(self.ecx.pat_ident(e.span, lname)); names[self.name_positions[*name]] = Some(Context::format_arg(self.ecx, e.span, arg_ty, @@ -652,8 +652,9 @@ impl<'a, 'b> Context<'a, 'b> { -> P { let trait_ = match *ty { Known(ref tyname) => { - match tyname.as_slice() { + match tyname[] { "" => "Show", + "?" => "Show", "e" => "LowerExp", "E" => "UpperExp", "o" => "Octal", @@ -664,7 +665,7 @@ impl<'a, 'b> Context<'a, 'b> { _ => { ecx.span_err(sp, format!("unknown format trait `{}`", - *tyname).as_slice()); + *tyname)[]); "Dummy" } } @@ -759,8 +760,7 @@ pub fn expand_preparsed_format_args(ecx: &mut ExtCtxt, sp: Span, match parser.errors.remove(0) { Some(error) => { cx.ecx.span_err(cx.fmtsp, - format!("invalid format string: {}", - error).as_slice()); + format!("invalid format string: {}", error)[]); return DummyResult::raw_expr(sp); } None => {} diff --git a/src/libsyntax/ext/mtwt.rs b/src/libsyntax/ext/mtwt.rs index ae979020bc7e5..6a296333fdb6a 100644 --- a/src/libsyntax/ext/mtwt.rs +++ b/src/libsyntax/ext/mtwt.rs @@ -21,7 +21,7 @@ use ast::{Ident, Mrk, Name, SyntaxContext}; use std::cell::RefCell; use std::collections::HashMap; -use std::collections::hash_map::{Occupied, Vacant}; +use std::collections::hash_map::Entry::{Occupied, Vacant}; /// The SCTable contains a table of SyntaxContext_'s. It /// represents a flattened tree structure, to avoid having @@ -39,7 +39,7 @@ pub struct SCTable { rename_memo: RefCell>, } -#[deriving(Copy, PartialEq, Encodable, Decodable, Hash, Show)] +#[deriving(PartialEq, RustcEncodable, RustcDecodable, Hash, Show, Copy)] pub enum SyntaxContext_ { EmptyCtxt, Mark (Mrk,SyntaxContext), diff --git a/src/libsyntax/ext/quote.rs b/src/libsyntax/ext/quote.rs index c7cb41e2ece21..368d4fa84476f 100644 --- a/src/libsyntax/ext/quote.rs +++ b/src/libsyntax/ext/quote.rs @@ -474,7 +474,7 @@ pub fn expand_quote_stmt(cx: &mut ExtCtxt, } fn ids_ext(strs: Vec ) -> Vec { - strs.iter().map(|str| str_to_ident((*str).as_slice())).collect() + strs.iter().map(|str| str_to_ident((*str)[])).collect() } fn id_ext(str: &str) -> ast::Ident { @@ -676,7 +676,7 @@ fn mk_tt(cx: &ExtCtxt, tt: &ast::TokenTree) -> Vec> { for i in range(0, tt.len()) { seq.push(tt.get_tt(i)); } - mk_tts(cx, seq.as_slice()) + mk_tts(cx, seq[]) } ast::TtToken(sp, ref tok) => { let e_sp = cx.expr_ident(sp, id_ext("_sp")); @@ -765,7 +765,7 @@ fn expand_tts(cx: &ExtCtxt, sp: Span, tts: &[ast::TokenTree]) let stmt_let_tt = cx.stmt_let(sp, true, id_ext("tt"), cx.expr_vec_ng(sp)); let mut vector = vec!(stmt_let_sp, stmt_let_tt); - vector.extend(mk_tts(cx, tts.as_slice()).into_iter()); + vector.extend(mk_tts(cx, tts[]).into_iter()); let block = cx.expr_block( cx.block_all(sp, Vec::new(), diff --git a/src/libsyntax/ext/source_util.rs b/src/libsyntax/ext/source_util.rs index 570231940aac6..7c2c5c1530c99 100644 --- a/src/libsyntax/ext/source_util.rs +++ b/src/libsyntax/ext/source_util.rs @@ -57,7 +57,7 @@ pub fn expand_file(cx: &mut ExtCtxt, sp: Span, tts: &[ast::TokenTree]) let topmost = cx.original_span_in_file(); let loc = cx.codemap().lookup_char_pos(topmost.lo); - let filename = token::intern_and_get_ident(loc.file.name.as_slice()); + let filename = token::intern_and_get_ident(loc.file.name[]); base::MacExpr::new(cx.expr_str(topmost, filename)) } @@ -65,7 +65,7 @@ pub fn expand_stringify(cx: &mut ExtCtxt, sp: Span, tts: &[ast::TokenTree]) -> Box { let s = pprust::tts_to_string(tts); base::MacExpr::new(cx.expr_str(sp, - token::intern_and_get_ident(s.as_slice()))) + token::intern_and_get_ident(s[]))) } pub fn expand_mod(cx: &mut ExtCtxt, sp: Span, tts: &[ast::TokenTree]) @@ -78,7 +78,7 @@ pub fn expand_mod(cx: &mut ExtCtxt, sp: Span, tts: &[ast::TokenTree]) .connect("::"); base::MacExpr::new(cx.expr_str( sp, - token::intern_and_get_ident(string.as_slice()))) + token::intern_and_get_ident(string[]))) } /// include! : parse the given file as an expr @@ -137,7 +137,7 @@ pub fn expand_include_str(cx: &mut ExtCtxt, sp: Span, tts: &[ast::TokenTree]) cx.span_err(sp, format!("couldn't read {}: {}", file.display(), - e).as_slice()); + e)[]); return DummyResult::expr(sp); } Ok(bytes) => bytes, @@ -147,7 +147,7 @@ pub fn expand_include_str(cx: &mut ExtCtxt, sp: Span, tts: &[ast::TokenTree]) // Add this input file to the code map to make it available as // dependency information let filename = file.display().to_string(); - let interned = token::intern_and_get_ident(src.as_slice()); + let interned = token::intern_and_get_ident(src[]); cx.codemap().new_filemap(filename, src); base::MacExpr::new(cx.expr_str(sp, interned)) @@ -155,7 +155,7 @@ pub fn expand_include_str(cx: &mut ExtCtxt, sp: Span, tts: &[ast::TokenTree]) Err(_) => { cx.span_err(sp, format!("{} wasn't a utf-8 file", - file.display()).as_slice()); + file.display())[]); return DummyResult::expr(sp); } } @@ -171,9 +171,7 @@ pub fn expand_include_bin(cx: &mut ExtCtxt, sp: Span, tts: &[ast::TokenTree]) match File::open(&file).read_to_end() { Err(e) => { cx.span_err(sp, - format!("couldn't read {}: {}", - file.display(), - e).as_slice()); + format!("couldn't read {}: {}", file.display(), e)[]); return DummyResult::expr(sp); } Ok(bytes) => { diff --git a/src/libsyntax/ext/tt/macro_parser.rs b/src/libsyntax/ext/tt/macro_parser.rs index 4785fe37293c0..73ef18b8449e0 100644 --- a/src/libsyntax/ext/tt/macro_parser.rs +++ b/src/libsyntax/ext/tt/macro_parser.rs @@ -98,7 +98,7 @@ use ptr::P; use std::mem; use std::rc::Rc; use std::collections::HashMap; -use std::collections::hash_map::{Vacant, Occupied}; +use std::collections::hash_map::Entry::{Vacant, Occupied}; // To avoid costly uniqueness checks, we require that `MatchSeq` always has // a nonempty body. @@ -153,7 +153,7 @@ pub fn count_names(ms: &[TokenTree]) -> uint { seq.num_captures } &TtDelimited(_, ref delim) => { - count_names(delim.tts.as_slice()) + count_names(delim.tts[]) } &TtToken(_, MatchNt(..)) => { 1 @@ -165,7 +165,7 @@ pub fn count_names(ms: &[TokenTree]) -> uint { pub fn initial_matcher_pos(ms: Rc>, sep: Option, lo: BytePos) -> Box { - let match_idx_hi = count_names(ms.as_slice()); + let match_idx_hi = count_names(ms[]); let matches = Vec::from_fn(match_idx_hi, |_i| Vec::new()); box MatcherPos { stack: vec![], @@ -229,7 +229,7 @@ pub fn nameize(p_s: &ParseSess, ms: &[TokenTree], res: &[Rc]) p_s.span_diagnostic .span_fatal(sp, format!("duplicated bind name: {}", - string.get()).as_slice()) + string.get())[]) } } } @@ -254,13 +254,13 @@ pub fn parse_or_else(sess: &ParseSess, rdr: TtReader, ms: Vec ) -> HashMap> { - match parse(sess, cfg, rdr, ms.as_slice()) { + match parse(sess, cfg, rdr, ms[]) { Success(m) => m, Failure(sp, str) => { - sess.span_diagnostic.span_fatal(sp, str.as_slice()) + sess.span_diagnostic.span_fatal(sp, str[]) } Error(sp, str) => { - sess.span_diagnostic.span_fatal(sp, str.as_slice()) + sess.span_diagnostic.span_fatal(sp, str[]) } } } @@ -416,7 +416,7 @@ pub fn parse(sess: &ParseSess, } } TtToken(sp, SubstNt(..)) => { - return Error(sp, "Cannot transcribe in macro LHS".into_string()) + return Error(sp, "Cannot transcribe in macro LHS".to_string()) } seq @ TtDelimited(..) | seq @ TtToken(_, DocComment(..)) => { let lower_elts = mem::replace(&mut ei.top_elts, Tt(seq)); @@ -446,7 +446,7 @@ pub fn parse(sess: &ParseSess, for dv in eof_eis[0].matches.iter_mut() { v.push(dv.pop().unwrap()); } - return Success(nameize(sess, ms, v.as_slice())); + return Success(nameize(sess, ms, v[])); } else if eof_eis.len() > 1u { return Error(sp, "ambiguity: multiple successful parses".to_string()); } else { @@ -521,7 +521,7 @@ pub fn parse_nt(p: &mut Parser, name: &str) -> Nonterminal { _ => { let token_str = pprust::token_to_string(&p.token); p.fatal((format!("expected ident, found {}", - token_str.as_slice())).as_slice()) + token_str[]))[]) } }, "path" => { @@ -535,8 +535,7 @@ pub fn parse_nt(p: &mut Parser, name: &str) -> Nonterminal { res } _ => { - p.fatal(format!("unsupported builtin nonterminal parser: {}", - name).as_slice()) + p.fatal(format!("unsupported builtin nonterminal parser: {}", name)[]) } } } diff --git a/src/libsyntax/ext/tt/macro_rules.rs b/src/libsyntax/ext/tt/macro_rules.rs index 92c68b7a9c724..08014dc13383f 100644 --- a/src/libsyntax/ext/tt/macro_rules.rs +++ b/src/libsyntax/ext/tt/macro_rules.rs @@ -52,7 +52,7 @@ impl<'a> ParserAnyMacro<'a> { following", token_str); let span = parser.span; - parser.span_err(span, msg.as_slice()); + parser.span_err(span, msg[]); } } } @@ -124,8 +124,8 @@ impl TTMacroExpander for MacroRulesMacroExpander { sp, self.name, arg, - self.lhses.as_slice(), - self.rhses.as_slice()) + self.lhses[], + self.rhses[]) } } @@ -160,7 +160,7 @@ fn generic_extension<'cx>(cx: &'cx ExtCtxt, match **lhs { MatchedNonterminal(NtTT(ref lhs_tt)) => { let lhs_tt = match **lhs_tt { - TtDelimited(_, ref delim) => delim.tts.as_slice(), + TtDelimited(_, ref delim) => delim.tts[], _ => cx.span_fatal(sp, "malformed macro lhs") }; // `None` is because we're not interpolating @@ -198,13 +198,13 @@ fn generic_extension<'cx>(cx: &'cx ExtCtxt, best_fail_spot = sp; best_fail_msg = (*msg).clone(); }, - Error(sp, ref msg) => cx.span_fatal(sp, msg.as_slice()) + Error(sp, ref msg) => cx.span_fatal(sp, msg[]) } } _ => cx.bug("non-matcher found in parsed lhses") } } - cx.span_fatal(best_fail_spot, best_fail_msg.as_slice()); + cx.span_fatal(best_fail_spot, best_fail_msg[]); } // Note that macro-by-example's input is also matched against a token tree: diff --git a/src/libsyntax/ext/tt/transcribe.rs b/src/libsyntax/ext/tt/transcribe.rs index e2439bad178fe..deed0b78e87e4 100644 --- a/src/libsyntax/ext/tt/transcribe.rs +++ b/src/libsyntax/ext/tt/transcribe.rs @@ -106,30 +106,6 @@ enum LockstepIterSize { LisContradiction(String), } -// NOTE(stage0): Remove impl after a snapshot -#[cfg(stage0)] -impl Add for LockstepIterSize { - fn add(&self, other: &LockstepIterSize) -> LockstepIterSize { - match *self { - LisUnconstrained => other.clone(), - LisContradiction(_) => self.clone(), - LisConstraint(l_len, l_id) => match *other { - LisUnconstrained => self.clone(), - LisContradiction(_) => other.clone(), - LisConstraint(r_len, _) if l_len == r_len => self.clone(), - LisConstraint(r_len, r_id) => { - let l_n = token::get_ident(l_id); - let r_n = token::get_ident(r_id); - LisContradiction(format!("inconsistent lockstep iteration: \ - '{}' has {} items, but '{}' has {}", - l_n, l_len, r_n, r_len).to_string()) - } - }, - } - } -} - -#[cfg(not(stage0))] // NOTE(stage0): Remove cfg after a snapshot impl Add for LockstepIterSize { fn add(self, other: LockstepIterSize) -> LockstepIterSize { match self { @@ -247,7 +223,7 @@ pub fn tt_next_token(r: &mut TtReader) -> TokenAndSpan { } LisContradiction(ref msg) => { // FIXME #2887 blame macro invoker instead - r.sp_diag.span_fatal(sp.clone(), msg.as_slice()); + r.sp_diag.span_fatal(sp.clone(), msg[]); } LisConstraint(len, _) => { if len == 0 { @@ -304,7 +280,7 @@ pub fn tt_next_token(r: &mut TtReader) -> TokenAndSpan { r.sp_diag.span_fatal( r.cur_span, /* blame the macro writer */ format!("variable '{}' is still repeating at this depth", - token::get_ident(ident)).as_slice()); + token::get_ident(ident))[]); } } } diff --git a/src/libsyntax/feature_gate.rs b/src/libsyntax/feature_gate.rs index 0e0a87c74f849..d53a4b0e8d1c1 100644 --- a/src/libsyntax/feature_gate.rs +++ b/src/libsyntax/feature_gate.rs @@ -133,7 +133,7 @@ impl<'a> Context<'a> { self.span_handler.span_err(span, explain); self.span_handler.span_help(span, format!("add #![feature({})] to the \ crate attributes to enable", - feature).as_slice()); + feature)[]); } } @@ -187,7 +187,7 @@ impl<'a, 'v> Visitor<'v> for Context<'a> { } match i.node { ast::ItemForeignMod(ref foreign_module) => { - if attr::contains_name(i.attrs.as_slice(), "link_args") { + if attr::contains_name(i.attrs[], "link_args") { self.gate_feature("link_args", i.span, "the `link_args` attribute is not portable \ across platforms, it is recommended to \ @@ -201,14 +201,14 @@ impl<'a, 'v> Visitor<'v> for Context<'a> { } ast::ItemFn(..) => { - if attr::contains_name(i.attrs.as_slice(), "plugin_registrar") { + if attr::contains_name(i.attrs[], "plugin_registrar") { self.gate_feature("plugin_registrar", i.span, "compiler plugins are experimental and possibly buggy"); } } ast::ItemStruct(..) => { - if attr::contains_name(i.attrs.as_slice(), "simd") { + if attr::contains_name(i.attrs[], "simd") { self.gate_feature("simd", i.span, "SIMD types are experimental and possibly buggy"); } @@ -285,7 +285,7 @@ impl<'a, 'v> Visitor<'v> for Context<'a> { } fn visit_foreign_item(&mut self, i: &ast::ForeignItem) { - if attr::contains_name(i.attrs.as_slice(), "linkage") { + if attr::contains_name(i.attrs[], "linkage") { self.gate_feature("linkage", i.span, "the `linkage` attribute is experimental \ and not portable across platforms") diff --git a/src/libsyntax/fold.rs b/src/libsyntax/fold.rs index 10860ee5e01de..86df588386464 100644 --- a/src/libsyntax/fold.rs +++ b/src/libsyntax/fold.rs @@ -814,17 +814,24 @@ pub fn noop_fold_where_predicate( fld: &mut T) -> WherePredicate { match pred { - ast::WherePredicate::BoundPredicate(ast::WhereBoundPredicate{id, - ident, + ast::WherePredicate::BoundPredicate(ast::WhereBoundPredicate{bounded_ty, bounds, span}) => { ast::WherePredicate::BoundPredicate(ast::WhereBoundPredicate { - id: fld.new_id(id), - ident: fld.fold_ident(ident), + bounded_ty: fld.fold_ty(bounded_ty), bounds: bounds.move_map(|x| fld.fold_ty_param_bound(x)), span: fld.new_span(span) }) } + ast::WherePredicate::RegionPredicate(ast::WhereRegionPredicate{lifetime, + bounds, + span}) => { + ast::WherePredicate::RegionPredicate(ast::WhereRegionPredicate { + span: fld.new_span(span), + lifetime: fld.fold_lifetime(lifetime), + bounds: bounds.move_map(|bound| fld.fold_lifetime(bound)) + }) + } ast::WherePredicate::EqPredicate(ast::WhereEqPredicate{id, path, ty, diff --git a/src/libsyntax/lib.rs b/src/libsyntax/lib.rs index 5d5b56d444f8e..d5093c5055c79 100644 --- a/src/libsyntax/lib.rs +++ b/src/libsyntax/lib.rs @@ -24,7 +24,7 @@ #![allow(unknown_features)] #![feature(macro_rules, globs, default_type_params, phase, slicing_syntax)] -#![feature(quote, unsafe_destructor, import_shadowing)] +#![feature(quote, unsafe_destructor)] #![feature(unboxed_closures)] extern crate arena; @@ -34,6 +34,8 @@ extern crate serialize; extern crate term; extern crate libc; +extern crate "serialize" as rustc_serialize; // used by deriving + pub mod util { pub mod interner; #[cfg(test)] diff --git a/src/libsyntax/owned_slice.rs b/src/libsyntax/owned_slice.rs index 8e418e46921ff..3023c547fb053 100644 --- a/src/libsyntax/owned_slice.rs +++ b/src/libsyntax/owned_slice.rs @@ -45,7 +45,7 @@ impl OwnedSlice { &*self.data } - pub fn move_iter(self) -> vec::MoveItems { + pub fn move_iter(self) -> vec::IntoIter { self.into_vec().into_iter() } diff --git a/src/libsyntax/parse/attr.rs b/src/libsyntax/parse/attr.rs index 41fee1556abff..41693d9d47a51 100644 --- a/src/libsyntax/parse/attr.rs +++ b/src/libsyntax/parse/attr.rs @@ -92,8 +92,7 @@ impl<'a> ParserAttr for Parser<'a> { } _ => { let token_str = self.this_token_to_string(); - self.fatal(format!("expected `#`, found `{}`", - token_str).as_slice()); + self.fatal(format!("expected `#`, found `{}`", token_str)[]); } }; diff --git a/src/libsyntax/parse/lexer/comments.rs b/src/libsyntax/parse/lexer/comments.rs index 95bae63f58f65..b8da8365f7e23 100644 --- a/src/libsyntax/parse/lexer/comments.rs +++ b/src/libsyntax/parse/lexer/comments.rs @@ -82,7 +82,7 @@ pub fn strip_doc_comment_decoration(comment: &str) -> String { while j > i && lines[j - 1].trim().is_empty() { j -= 1; } - return lines.slice(i, j).iter().map(|x| (*x).clone()).collect(); + return lines[i..j].iter().map(|x| (*x).clone()).collect(); } /// remove a "[ \t]*\*" block from each line, if possible @@ -116,7 +116,7 @@ pub fn strip_doc_comment_decoration(comment: &str) -> String { if can_trim { lines.iter().map(|line| { - line.slice(i + 1, line.len()).to_string() + line[i + 1..line.len()].to_string() }).collect() } else { lines @@ -127,12 +127,12 @@ pub fn strip_doc_comment_decoration(comment: &str) -> String { static ONLINERS: &'static [&'static str] = &["///!", "///", "//!", "//"]; for prefix in ONLINERS.iter() { if comment.starts_with(*prefix) { - return comment.slice_from(prefix.len()).to_string(); + return comment[prefix.len()..].to_string(); } } if comment.starts_with("/*") { - let lines = comment.slice(3u, comment.len() - 2u) + let lines = comment[3u..comment.len() - 2u] .lines_any() .map(|s| s.to_string()) .collect:: >(); @@ -187,7 +187,7 @@ fn read_line_comments(rdr: &mut StringReader, code_to_the_left: bool, let line = rdr.read_one_line_comment(); debug!("{}", line); // Doc comments are not put in comments. - if is_doc_comment(line.as_slice()) { + if is_doc_comment(line[]) { break; } lines.push(line); @@ -224,10 +224,10 @@ fn all_whitespace(s: &str, col: CharPos) -> Option { fn trim_whitespace_prefix_and_push_line(lines: &mut Vec , s: String, col: CharPos) { let len = s.len(); - let s1 = match all_whitespace(s.as_slice(), col) { + let s1 = match all_whitespace(s[], col) { Some(col) => { if col < len { - s.slice(col, len).to_string() + s[col..len].to_string() } else { "".to_string() } @@ -261,7 +261,7 @@ fn read_block_comment(rdr: &mut StringReader, rdr.bump(); rdr.bump(); } - if is_block_doc_comment(curr_line.as_slice()) { + if is_block_doc_comment(curr_line[]) { return } assert!(!curr_line.contains_char('\n')); diff --git a/src/libsyntax/parse/lexer/mod.rs b/src/libsyntax/parse/lexer/mod.rs index da908f46ff61f..13d020f6ae31b 100644 --- a/src/libsyntax/parse/lexer/mod.rs +++ b/src/libsyntax/parse/lexer/mod.rs @@ -194,7 +194,7 @@ impl<'a> StringReader<'a> { let mut m = m.to_string(); m.push_str(": "); for c in c.escape_default() { m.push(c) } - self.fatal_span_(from_pos, to_pos, m.as_slice()); + self.fatal_span_(from_pos, to_pos, m[]); } /// Report a lexical error spanning [`from_pos`, `to_pos`), appending an @@ -203,7 +203,7 @@ impl<'a> StringReader<'a> { let mut m = m.to_string(); m.push_str(": "); for c in c.escape_default() { m.push(c) } - self.err_span_(from_pos, to_pos, m.as_slice()); + self.err_span_(from_pos, to_pos, m[]); } /// Report a lexical error spanning [`from_pos`, `to_pos`), appending the @@ -212,8 +212,8 @@ impl<'a> StringReader<'a> { m.push_str(": "); let from = self.byte_offset(from_pos).to_uint(); let to = self.byte_offset(to_pos).to_uint(); - m.push_str(self.filemap.src.as_slice().slice(from, to)); - self.fatal_span_(from_pos, to_pos, m.as_slice()); + m.push_str(self.filemap.src[from..to]); + self.fatal_span_(from_pos, to_pos, m[]); } /// Advance peek_tok and peek_span to refer to the next token, and @@ -299,7 +299,7 @@ impl<'a> StringReader<'a> { while i < s.len() { let str::CharRange { ch, next } = s.char_range_at(i); if ch == '\r' { - if j < i { buf.push_str(s.slice(j, i)); } + if j < i { buf.push_str(s[j..i]); } j = next; if next >= s.len() || s.char_at(next) != '\n' { let pos = start + BytePos(i as u32); @@ -309,7 +309,7 @@ impl<'a> StringReader<'a> { } i = next; } - if j < s.len() { buf.push_str(s.slice_from(j)); } + if j < s.len() { buf.push_str(s[j..]); } buf } } @@ -358,7 +358,7 @@ impl<'a> StringReader<'a> { pub fn nextnextch(&self) -> Option { let offset = self.byte_offset(self.pos).to_uint(); - let s = self.filemap.deref().src.as_slice(); + let s = self.filemap.deref().src[]; if offset >= s.len() { return None } let str::CharRange { next, .. } = s.char_range_at(offset); if next < s.len() { @@ -554,7 +554,7 @@ impl<'a> StringReader<'a> { self.translate_crlf(start_bpos, string, "bare CR not allowed in block doc-comment") } else { string.into_cow() }; - token::DocComment(token::intern(string.as_slice())) + token::DocComment(token::intern(string[])) } else { token::Comment }; @@ -1108,7 +1108,7 @@ impl<'a> StringReader<'a> { // expansion purposes. See #12512 for the gory details of why // this is necessary. let ident = self.with_str_from(start, |lifetime_name| { - str_to_ident(format!("'{}", lifetime_name).as_slice()) + str_to_ident(format!("'{}", lifetime_name)[]) }); // Conjure up a "keyword checking ident" to make sure that diff --git a/src/libsyntax/parse/mod.rs b/src/libsyntax/parse/mod.rs index d6f5d0e248a86..8cefb111fd1fc 100644 --- a/src/libsyntax/parse/mod.rs +++ b/src/libsyntax/parse/mod.rs @@ -251,17 +251,17 @@ pub fn file_to_filemap(sess: &ParseSess, path: &Path, spanopt: Option) Err(e) => { err(format!("couldn't read {}: {}", path.display(), - e).as_slice()); + e)[]); unreachable!() } }; - match str::from_utf8(bytes.as_slice()) { + match str::from_utf8(bytes[]).ok() { Some(s) => { return string_to_filemap(sess, s.to_string(), path.as_str().unwrap().to_string()) } None => { - err(format!("{} is not UTF-8 encoded", path.display()).as_slice()) + err(format!("{} is not UTF-8 encoded", path.display())[]) } } unreachable!() @@ -391,10 +391,10 @@ pub fn char_lit(lit: &str) -> (char, int) { } let msg = format!("lexer should have rejected a bad character escape {}", lit); - let msg2 = msg.as_slice(); + let msg2 = msg[]; fn esc(len: uint, lit: &str) -> Option<(char, int)> { - num::from_str_radix(lit.slice(2, len), 16) + num::from_str_radix(lit[2..len], 16) .and_then(char::from_u32) .map(|x| (x, len as int)) } @@ -402,10 +402,10 @@ pub fn char_lit(lit: &str) -> (char, int) { let unicode_escape: || -> Option<(char, int)> = || if lit.as_bytes()[2] == b'{' { let idx = lit.find('}').expect(msg2); - let subslice = lit.slice(3, idx); + let subslice = lit[3..idx]; num::from_str_radix(subslice, 16) .and_then(char::from_u32) - .map(|x| (x, subslice.char_len() as int + 4)) + .map(|x| (x, subslice.chars().count() as int + 4)) } else { esc(6, lit) }; @@ -429,7 +429,7 @@ pub fn str_lit(lit: &str) -> String { let error = |i| format!("lexer should have rejected {} at {}", lit, i); /// Eat everything up to a non-whitespace - fn eat<'a>(it: &mut iter::Peekable<(uint, char), str::CharOffsets<'a>>) { + fn eat<'a>(it: &mut iter::Peekable<(uint, char), str::CharIndices<'a>>) { loop { match it.peek().map(|x| x.1) { Some(' ') | Some('\n') | Some('\r') | Some('\t') => { @@ -464,7 +464,7 @@ pub fn str_lit(lit: &str) -> String { eat(&mut chars); } else { // otherwise, a normal escape - let (c, n) = char_lit(lit.slice_from(i)); + let (c, n) = char_lit(lit[i..]); for _ in range(0, n - 1) { // we don't need to move past the first \ chars.next(); } @@ -527,7 +527,7 @@ pub fn raw_str_lit(lit: &str) -> String { fn looks_like_width_suffix(first_chars: &[char], s: &str) -> bool { s.len() > 1 && first_chars.contains(&s.char_at(0)) && - s.slice_from(1).chars().all(|c| '0' <= c && c <= '9') + s[1..].chars().all(|c| '0' <= c && c <= '9') } fn filtered_float_lit(data: token::InternedString, suffix: Option<&str>, @@ -540,7 +540,7 @@ fn filtered_float_lit(data: token::InternedString, suffix: Option<&str>, if suf.len() >= 2 && looks_like_width_suffix(&['f'], suf) { // if it looks like a width, lets try to be helpful. sd.span_err(sp, &*format!("illegal width `{}` for float literal, \ - valid widths are 32 and 64", suf.slice_from(1))); + valid widths are 32 and 64", suf[1..])); } else { sd.span_err(sp, &*format!("illegal suffix `{}` for float literal, \ valid suffixes are `f32` and `f64`", suf)); @@ -576,7 +576,7 @@ pub fn byte_lit(lit: &str) -> (u8, uint) { b'\'' => b'\'', b'0' => b'\0', _ => { - match ::std::num::from_str_radix::(lit.slice(2, 4), 16) { + match ::std::num::from_str_radix::(lit[2..4], 16) { Some(c) => if c > 0xFF { panic!(err(2)) @@ -626,7 +626,7 @@ pub fn binary_lit(lit: &str) -> Rc> { } _ => { // otherwise, a normal escape - let (c, n) = byte_lit(lit.slice_from(i)); + let (c, n) = byte_lit(lit[i..]); // we don't need to move past the first \ for _ in range(0, n - 1) { chars.next(); @@ -655,7 +655,7 @@ pub fn integer_lit(s: &str, suffix: Option<&str>, sd: &SpanHandler, sp: Span) -> // s can only be ascii, byte indexing is fine let s2 = s.chars().filter(|&c| c != '_').collect::(); - let mut s = s2.as_slice(); + let mut s = s2[]; debug!("integer_lit: {}, {}", s, suffix); @@ -688,7 +688,7 @@ pub fn integer_lit(s: &str, suffix: Option<&str>, sd: &SpanHandler, sp: Span) -> } if base != 10 { - s = s.slice_from(2); + s = s[2..]; } if let Some(suf) = suffix { @@ -710,7 +710,7 @@ pub fn integer_lit(s: &str, suffix: Option<&str>, sd: &SpanHandler, sp: Span) -> if looks_like_width_suffix(&['i', 'u'], suf) { sd.span_err(sp, &*format!("illegal width `{}` for integer literal; \ valid widths are 8, 16, 32 and 64", - suf.slice_from(1))); + suf[1..])); } else { sd.span_err(sp, &*format!("illegal suffix `{}` for numeric literal", suf)); } @@ -745,8 +745,7 @@ mod test { use owned_slice::OwnedSlice; use ast; use abi; - use attr; - use attr::AttrMetaMethods; + use attr::{first_attr_value_str_by_name, AttrMetaMethods}; use parse::parser::Parser; use parse::token::{str_to_ident}; use print::pprust::view_item_to_string; @@ -809,7 +808,7 @@ mod test { #[test] fn string_to_tts_macro () { let tts = string_to_tts("macro_rules! zip (($a)=>($a))".to_string()); - let tts: &[ast::TokenTree] = tts.as_slice(); + let tts: &[ast::TokenTree] = tts[]; match tts { [ast::TtToken(_, token::Ident(name_macro_rules, token::Plain)), ast::TtToken(_, token::Not), @@ -817,19 +816,19 @@ mod test { ast::TtDelimited(_, ref macro_delimed)] if name_macro_rules.as_str() == "macro_rules" && name_zip.as_str() == "zip" => { - match macro_delimed.tts.as_slice() { + match macro_delimed.tts[] { [ast::TtDelimited(_, ref first_delimed), ast::TtToken(_, token::FatArrow), ast::TtDelimited(_, ref second_delimed)] if macro_delimed.delim == token::Paren => { - match first_delimed.tts.as_slice() { + match first_delimed.tts[] { [ast::TtToken(_, token::Dollar), ast::TtToken(_, token::Ident(name, token::Plain))] if first_delimed.delim == token::Paren && name.as_str() == "a" => {}, _ => panic!("value 3: {}", **first_delimed), } - match second_delimed.tts.as_slice() { + match second_delimed.tts[] { [ast::TtToken(_, token::Dollar), ast::TtToken(_, token::Ident(name, token::Plain))] if second_delimed.delim == token::Paren @@ -1107,24 +1106,24 @@ mod test { let use_s = "use foo::bar::baz;"; let vitem = string_to_view_item(use_s.to_string()); let vitem_s = view_item_to_string(&vitem); - assert_eq!(vitem_s.as_slice(), use_s); + assert_eq!(vitem_s[], use_s); let use_s = "use foo::bar as baz;"; let vitem = string_to_view_item(use_s.to_string()); let vitem_s = view_item_to_string(&vitem); - assert_eq!(vitem_s.as_slice(), use_s); + assert_eq!(vitem_s[], use_s); } #[test] fn parse_extern_crate() { let ex_s = "extern crate foo;"; let vitem = string_to_view_item(ex_s.to_string()); let vitem_s = view_item_to_string(&vitem); - assert_eq!(vitem_s.as_slice(), ex_s); + assert_eq!(vitem_s[], ex_s); let ex_s = "extern crate \"foo\" as bar;"; let vitem = string_to_view_item(ex_s.to_string()); let vitem_s = view_item_to_string(&vitem); - assert_eq!(vitem_s.as_slice(), ex_s); + assert_eq!(vitem_s[], ex_s); } fn get_spans_of_pat_idents(src: &str) -> Vec { @@ -1162,9 +1161,9 @@ mod test { for &src in srcs.iter() { let spans = get_spans_of_pat_idents(src); let Span{lo:lo,hi:hi,..} = spans[0]; - assert!("self" == src.slice(lo.to_uint(), hi.to_uint()), + assert!("self" == src[lo.to_uint()..hi.to_uint()], "\"{}\" != \"self\". src=\"{}\"", - src.slice(lo.to_uint(), hi.to_uint()), src) + src[lo.to_uint()..hi.to_uint()], src) } } @@ -1195,7 +1194,7 @@ mod test { let name = "".to_string(); let source = "/// doc comment\r\nfn foo() {}".to_string(); let item = parse_item_from_source_str(name.clone(), source, Vec::new(), &sess).unwrap(); - let doc = attr::first_attr_value_str_by_name(item.attrs.as_slice(), "doc").unwrap(); + let doc = first_attr_value_str_by_name(item.attrs.as_slice(), "doc").unwrap(); assert_eq!(doc.get(), "/// doc comment"); let source = "/// doc comment\r\n/// line 2\r\nfn foo() {}".to_string(); @@ -1203,11 +1202,11 @@ mod test { let docs = item.attrs.iter().filter(|a| a.name().get() == "doc") .map(|a| a.value_str().unwrap().get().to_string()).collect::>(); let b: &[_] = &["/// doc comment".to_string(), "/// line 2".to_string()]; - assert_eq!(docs.as_slice(), b); + assert_eq!(docs[], b); let source = "/** doc comment\r\n * with CRLF */\r\nfn foo() {}".to_string(); let item = parse_item_from_source_str(name, source, Vec::new(), &sess).unwrap(); - let doc = attr::first_attr_value_str_by_name(item.attrs.as_slice(), "doc").unwrap(); + let doc = first_attr_value_str_by_name(item.attrs.as_slice(), "doc").unwrap(); assert_eq!(doc.get(), "/** doc comment\n * with CRLF */"); } } diff --git a/src/libsyntax/parse/obsolete.rs b/src/libsyntax/parse/obsolete.rs index a6ddcbf9ac41e..e3c831c09bac5 100644 --- a/src/libsyntax/parse/obsolete.rs +++ b/src/libsyntax/parse/obsolete.rs @@ -113,13 +113,13 @@ impl<'a> ParserObsoleteMethods for parser::Parser<'a> { kind_str: &str, desc: &str) { self.span_err(sp, - format!("obsolete syntax: {}", kind_str).as_slice()); + format!("obsolete syntax: {}", kind_str)[]); if !self.obsolete_set.contains(&kind) { self.sess .span_diagnostic .handler() - .note(format!("{}", desc).as_slice()); + .note(format!("{}", desc)[]); self.obsolete_set.insert(kind); } } diff --git a/src/libsyntax/parse/parser.rs b/src/libsyntax/parse/parser.rs index db195c0f206c1..197970317d245 100644 --- a/src/libsyntax/parse/parser.rs +++ b/src/libsyntax/parse/parser.rs @@ -41,7 +41,7 @@ use ast::{LifetimeDef, Lit, Lit_}; use ast::{LitBool, LitChar, LitByte, LitBinary}; use ast::{LitStr, LitInt, Local, LocalLet}; use ast::{MacStmtWithBraces, MacStmtWithSemicolon, MacStmtWithoutBraces}; -use ast::{MutImmutable, MutMutable, Mac_, MacInvocTT, MatchNormal}; +use ast::{MutImmutable, MutMutable, Mac_, MacInvocTT, MatchSource}; use ast::{Method, MutTy, BiMul, Mutability}; use ast::{MethodImplItem, NamedField, UnNeg, NoReturn, NodeId, UnNot}; use ast::{Pat, PatEnum, PatIdent, PatLit, PatRange, PatRegion, PatStruct}; @@ -319,7 +319,7 @@ impl TokenType { fn to_string(&self) -> String { match *self { TokenType::Token(ref t) => format!("`{}`", Parser::token_to_string(t)), - TokenType::Operator => "an operator".into_string(), + TokenType::Operator => "an operator".to_string(), } } } @@ -384,12 +384,12 @@ impl<'a> Parser<'a> { let token_str = Parser::token_to_string(t); let last_span = self.last_span; self.span_fatal(last_span, format!("unexpected token: `{}`", - token_str).as_slice()); + token_str)[]); } pub fn unexpected(&mut self) -> ! { let this_token = self.this_token_to_string(); - self.fatal(format!("unexpected token: `{}`", this_token).as_slice()); + self.fatal(format!("unexpected token: `{}`", this_token)[]); } /// Expect and consume the token t. Signal an error if @@ -403,7 +403,7 @@ impl<'a> Parser<'a> { let this_token_str = self.this_token_to_string(); self.fatal(format!("expected `{}`, found `{}`", token_str, - this_token_str).as_slice()) + this_token_str)[]) } } else { self.expect_one_of(slice::ref_slice(t), &[]); @@ -420,7 +420,7 @@ impl<'a> Parser<'a> { let mut i = tokens.iter(); // This might be a sign we need a connect method on Iterator. let b = i.next() - .map_or("".into_string(), |t| t.to_string()); + .map_or("".to_string(), |t| t.to_string()); i.enumerate().fold(b, |mut b, (i, ref a)| { if tokens.len() > 2 && i == tokens.len() - 2 { b.push_str(", or "); @@ -444,7 +444,7 @@ impl<'a> Parser<'a> { expected.push_all(&*self.expected_tokens); expected.sort_by(|a, b| a.to_string().cmp(&b.to_string())); expected.dedup(); - let expect = tokens_to_string(expected.as_slice()); + let expect = tokens_to_string(expected[]); let actual = self.this_token_to_string(); self.fatal( (if expected.len() != 1 { @@ -455,7 +455,7 @@ impl<'a> Parser<'a> { (format!("expected {}, found `{}`", expect, actual)) - }).as_slice() + })[] ) } } @@ -488,7 +488,7 @@ impl<'a> Parser<'a> { // might be unit-struct construction; check for recoverableinput error. let mut expected = edible.iter().map(|x| x.clone()).collect::>(); expected.push_all(inedible); - self.check_for_erroneous_unit_struct_expecting(expected.as_slice()); + self.check_for_erroneous_unit_struct_expecting(expected[]); } self.expect_one_of(edible, inedible) } @@ -505,9 +505,9 @@ impl<'a> Parser<'a> { .as_ref() .map_or(false, |t| t.is_ident() || t.is_path()) { let mut expected = edible.iter().map(|x| x.clone()).collect::>(); - expected.push_all(inedible.as_slice()); + expected.push_all(inedible[]); self.check_for_erroneous_unit_struct_expecting( - expected.as_slice()); + expected[]); } self.expect_one_of(edible, inedible) } @@ -530,7 +530,7 @@ impl<'a> Parser<'a> { _ => { let token_str = self.this_token_to_string(); self.fatal((format!("expected ident, found `{}`", - token_str)).as_slice()) + token_str))[]) } } } @@ -584,7 +584,7 @@ impl<'a> Parser<'a> { let id_interned_str = token::get_name(kw.to_name()); let token_str = self.this_token_to_string(); self.fatal(format!("expected `{}`, found `{}`", - id_interned_str, token_str).as_slice()) + id_interned_str, token_str)[]) } } @@ -595,7 +595,7 @@ impl<'a> Parser<'a> { let span = self.span; self.span_err(span, format!("expected identifier, found keyword `{}`", - token_str).as_slice()); + token_str)[]); } } @@ -604,7 +604,7 @@ impl<'a> Parser<'a> { if self.token.is_reserved_keyword() { let token_str = self.this_token_to_string(); self.fatal(format!("`{}` is a reserved keyword", - token_str).as_slice()) + token_str)[]) } } @@ -624,7 +624,7 @@ impl<'a> Parser<'a> { Parser::token_to_string(&token::BinOp(token::And)); self.fatal(format!("expected `{}`, found `{}`", found_token, - token_str).as_slice()) + token_str)[]) } } } @@ -645,7 +645,7 @@ impl<'a> Parser<'a> { Parser::token_to_string(&token::BinOp(token::Or)); self.fatal(format!("expected `{}`, found `{}`", token_str, - found_token).as_slice()) + found_token)[]) } } } @@ -711,7 +711,7 @@ impl<'a> Parser<'a> { let token_str = Parser::token_to_string(&token::Lt); self.fatal(format!("expected `{}`, found `{}`", token_str, - found_token).as_slice()) + found_token)[]) } } @@ -763,7 +763,7 @@ impl<'a> Parser<'a> { let this_token_str = self.this_token_to_string(); self.fatal(format!("expected `{}`, found `{}`", gt_str, - this_token_str).as_slice()) + this_token_str)[]) } } } @@ -1392,7 +1392,7 @@ impl<'a> Parser<'a> { let (inner_attrs, body) = p.parse_inner_attrs_and_block(); let mut attrs = attrs; - attrs.push_all(inner_attrs.as_slice()); + attrs.push_all(inner_attrs[]); ProvidedMethod(P(ast::Method { attrs: attrs, id: ast::DUMMY_NODE_ID, @@ -1411,7 +1411,7 @@ impl<'a> Parser<'a> { _ => { let token_str = p.this_token_to_string(); p.fatal((format!("expected `;` or `{{`, found `{}`", - token_str)).as_slice()) + token_str))[]) } } } @@ -1497,9 +1497,6 @@ impl<'a> Parser<'a> { } /// Parse a type. - /// - /// The second parameter specifies whether the `+` binary operator is - /// allowed in the type grammar. pub fn parse_ty(&mut self) -> P { maybe_whole!(no_clone self, NtTy); @@ -1548,7 +1545,7 @@ impl<'a> Parser<'a> { self.expect(&token::OpenDelim(token::Bracket)); let t = self.parse_ty_sum(); - // Parse the `, ..e` in `[ int, ..e ]` + // Parse the `; e` in `[ int; e ]` // where `e` is a const expression let t = match self.maybe_parse_fixed_vstore() { None => TyVec(t), @@ -1609,7 +1606,7 @@ impl<'a> Parser<'a> { } else { let this_token_str = self.this_token_to_string(); let msg = format!("expected type, found `{}`", this_token_str); - self.fatal(msg.as_slice()); + self.fatal(msg[]); }; let sp = mk_sp(lo, self.last_span.hi); @@ -1716,6 +1713,9 @@ impl<'a> Parser<'a> { self.bump(); self.bump(); Some(self.parse_expr()) + } else if self.check(&token::Semi) { + self.bump(); + Some(self.parse_expr()) } else { None } @@ -1753,14 +1753,14 @@ impl<'a> Parser<'a> { token::Str_(s) => { (true, - LitStr(token::intern_and_get_ident(parse::str_lit(s.as_str()).as_slice()), + LitStr(token::intern_and_get_ident(parse::str_lit(s.as_str())[]), ast::CookedStr)) } token::StrRaw(s, n) => { (true, LitStr( token::intern_and_get_ident( - parse::raw_str_lit(s.as_str()).as_slice()), + parse::raw_str_lit(s.as_str())[]), ast::RawStr(n))) } token::Binary(i) => @@ -2004,7 +2004,7 @@ impl<'a> Parser<'a> { }; } _ => { - self.fatal(format!("expected a lifetime name").as_slice()); + self.fatal(format!("expected a lifetime name")[]); } } } @@ -2042,7 +2042,7 @@ impl<'a> Parser<'a> { let msg = format!("expected `,` or `>` after lifetime \ name, found `{}`", this_token_str); - self.fatal(msg.as_slice()); + self.fatal(msg[]); } } } @@ -2262,6 +2262,12 @@ impl<'a> Parser<'a> { let count = self.parse_expr(); self.expect(&token::CloseDelim(token::Bracket)); ex = ExprRepeat(first_expr, count); + } else if self.check(&token::Semi) { + // Repeating vector syntax: [ 0; 512 ] + self.bump(); + let count = self.parse_expr(); + self.expect(&token::CloseDelim(token::Bracket)); + ex = ExprRepeat(first_expr, count); } else if self.check(&token::Comma) { // Vector with two or more elements. self.bump(); @@ -2511,7 +2517,7 @@ impl<'a> Parser<'a> { hi = self.span.hi; self.bump(); - let index = from_str::(n.as_str()); + let index = n.as_str().parse::(); match index { Some(n) => { let id = spanned(dot, hi, n); @@ -2529,16 +2535,16 @@ impl<'a> Parser<'a> { let last_span = self.last_span; let fstr = n.as_str(); self.span_err(last_span, - format!("unexpected token: `{}`", n.as_str()).as_slice()); + format!("unexpected token: `{}`", n.as_str())[]); if fstr.chars().all(|x| "0123456789.".contains_char(x)) { - let float = match from_str::(fstr) { + let float = match fstr.parse::() { Some(f) => f, None => continue, }; self.span_help(last_span, format!("try parenthesizing the first index; e.g., `(foo.{}){}`", float.trunc() as uint, - float.fract().to_string()[1..]).as_slice()); + float.fract().to_string()[1..])[]); } self.abort_if_errors(); @@ -2710,7 +2716,7 @@ impl<'a> Parser<'a> { }; let token_str = p.this_token_to_string(); p.fatal(format!("incorrect close delimiter: `{}`", - token_str).as_slice()) + token_str)[]) }, /* we ought to allow different depths of unquotation */ token::Dollar if p.quote_depth > 0u => { @@ -2728,7 +2734,7 @@ impl<'a> Parser<'a> { let seq = match seq { Spanned { node, .. } => node, }; - let name_num = macro_parser::count_names(seq.as_slice()); + let name_num = macro_parser::count_names(seq[]); TtSequence(mk_sp(sp.lo, p.span.hi), Rc::new(SequenceRepetition { tts: seq, @@ -2879,7 +2885,7 @@ impl<'a> Parser<'a> { let this_token_to_string = self.this_token_to_string(); self.span_err(span, format!("expected expression, found `{}`", - this_token_to_string).as_slice()); + this_token_to_string)[]); let box_span = mk_sp(lo, self.last_span.hi); self.span_help(box_span, "perhaps you meant `box() (foo)` instead?"); @@ -3114,7 +3120,7 @@ impl<'a> Parser<'a> { } let hi = self.span.hi; self.bump(); - return self.mk_expr(lo, hi, ExprMatch(discriminant, arms, MatchNormal)); + return self.mk_expr(lo, hi, ExprMatch(discriminant, arms, MatchSource::Normal)); } pub fn parse_arm(&mut self) -> Arm { @@ -3258,7 +3264,7 @@ impl<'a> Parser<'a> { if self.token != token::CloseDelim(token::Brace) { let token_str = self.this_token_to_string(); self.fatal(format!("expected `{}`, found `{}`", "}", - token_str).as_slice()) + token_str)[]) } etc = true; break; @@ -3279,7 +3285,7 @@ impl<'a> Parser<'a> { BindByRef(..) | BindByValue(MutMutable) => { let token_str = self.this_token_to_string(); self.fatal(format!("unexpected `{}`", - token_str).as_slice()) + token_str)[]) } _ => {} } @@ -3557,7 +3563,7 @@ impl<'a> Parser<'a> { let span = self.span; let tok_str = self.this_token_to_string(); self.span_fatal(span, - format!("expected identifier, found `{}`", tok_str).as_slice()); + format!("expected identifier, found `{}`", tok_str)[]); } let ident = self.parse_ident(); let last_span = self.last_span; @@ -3658,7 +3664,7 @@ impl<'a> Parser<'a> { let lo = self.span.lo; if self.token.is_keyword(keywords::Let) { - check_expected_item(self, item_attrs.as_slice()); + check_expected_item(self, item_attrs[]); self.expect_keyword(keywords::Let); let decl = self.parse_let(); P(spanned(lo, decl.span.hi, StmtDecl(decl, ast::DUMMY_NODE_ID))) @@ -3667,7 +3673,7 @@ impl<'a> Parser<'a> { && self.look_ahead(1, |t| *t == token::Not) { // it's a macro invocation: - check_expected_item(self, item_attrs.as_slice()); + check_expected_item(self, item_attrs[]); // Potential trouble: if we allow macros with paths instead of // idents, we'd need to look ahead past the whole path here... @@ -3695,7 +3701,7 @@ impl<'a> Parser<'a> { let tok_str = self.this_token_to_string(); self.fatal(format!("expected {}`(` or `{{`, found `{}`", ident_str, - tok_str).as_slice()) + tok_str)[]) }, }; @@ -3743,7 +3749,7 @@ impl<'a> Parser<'a> { } } else { let found_attrs = !item_attrs.is_empty(); - let item_err = Parser::expected_item_err(item_attrs.as_slice()); + let item_err = Parser::expected_item_err(item_attrs[]); match self.parse_item_or_view_item(item_attrs, false) { IoviItem(i) => { let hi = i.span.hi; @@ -3787,7 +3793,7 @@ impl<'a> Parser<'a> { let sp = self.span; let tok = self.this_token_to_string(); self.span_fatal_help(sp, - format!("expected `{{`, found `{}`", tok).as_slice(), + format!("expected `{{`, found `{}`", tok)[], "place this code inside a block"); } @@ -3841,13 +3847,13 @@ impl<'a> Parser<'a> { while self.token != token::CloseDelim(token::Brace) { // parsing items even when they're not allowed lets us give // better error messages and recover more gracefully. - attributes_box.push_all(self.parse_outer_attributes().as_slice()); + attributes_box.push_all(self.parse_outer_attributes()[]); match self.token { token::Semi => { if !attributes_box.is_empty() { let last_span = self.last_span; self.span_err(last_span, - Parser::expected_item_err(attributes_box.as_slice())); + Parser::expected_item_err(attributes_box[])); attributes_box = Vec::new(); } self.bump(); // empty @@ -3938,7 +3944,7 @@ impl<'a> Parser<'a> { if !attributes_box.is_empty() { let last_span = self.last_span; self.span_err(last_span, - Parser::expected_item_err(attributes_box.as_slice())); + Parser::expected_item_err(attributes_box[])); } let hi = self.span.hi; @@ -4179,6 +4185,10 @@ impl<'a> Parser<'a> { } /// Parses an optional `where` clause and places it in `generics`. + /// + /// ``` + /// where T : Trait + 'b, 'a : 'b + /// ``` fn parse_where_clause(&mut self, generics: &mut ast::Generics) { if !self.eat_keyword(keywords::Where) { return @@ -4187,58 +4197,79 @@ impl<'a> Parser<'a> { let mut parsed_something = false; loop { let lo = self.span.lo; - let path = match self.token { - token::Ident(..) => self.parse_path(NoTypesAllowed), - _ => break, - }; + match self.token { + token::OpenDelim(token::Brace) => { + break + } - if self.eat(&token::Colon) { - let bounds = self.parse_ty_param_bounds(); - let hi = self.span.hi; - let span = mk_sp(lo, hi); + token::Lifetime(..) => { + let bounded_lifetime = + self.parse_lifetime(); - if bounds.len() == 0 { - self.span_err(span, - "each predicate in a `where` clause must have \ - at least one bound in it"); + self.eat(&token::Colon); + + let bounds = + self.parse_lifetimes(token::BinOp(token::Plus)); + + let hi = self.span.hi; + let span = mk_sp(lo, hi); + + generics.where_clause.predicates.push(ast::WherePredicate::RegionPredicate( + ast::WhereRegionPredicate { + span: span, + lifetime: bounded_lifetime, + bounds: bounds + } + )); + + parsed_something = true; } - let ident = match ast_util::path_to_ident(&path) { - Some(ident) => ident, - None => { - self.span_err(path.span, "expected a single identifier \ - in bound where clause"); - break; - } - }; + _ => { + let bounded_ty = self.parse_ty(); - generics.where_clause.predicates.push( - ast::WherePredicate::BoundPredicate(ast::WhereBoundPredicate { - id: ast::DUMMY_NODE_ID, - span: span, - ident: ident, - bounds: bounds, - })); - parsed_something = true; - } else if self.eat(&token::Eq) { - let ty = self.parse_ty(); - let hi = self.span.hi; - let span = mk_sp(lo, hi); - generics.where_clause.predicates.push( - ast::WherePredicate::EqPredicate(ast::WhereEqPredicate { - id: ast::DUMMY_NODE_ID, - span: span, - path: path, - ty: ty, - })); - parsed_something = true; - // FIXME(#18433) - self.span_err(span, "equality constraints are not yet supported in where clauses"); - } else { - let last_span = self.last_span; - self.span_err(last_span, + if self.eat(&token::Colon) { + let bounds = self.parse_ty_param_bounds(); + let hi = self.span.hi; + let span = mk_sp(lo, hi); + + if bounds.len() == 0 { + self.span_err(span, + "each predicate in a `where` clause must have \ + at least one bound in it"); + } + + generics.where_clause.predicates.push(ast::WherePredicate::BoundPredicate( + ast::WhereBoundPredicate { + span: span, + bounded_ty: bounded_ty, + bounds: bounds, + })); + + parsed_something = true; + } else if self.eat(&token::Eq) { + // let ty = self.parse_ty(); + let hi = self.span.hi; + let span = mk_sp(lo, hi); + // generics.where_clause.predicates.push( + // ast::WherePredicate::EqPredicate(ast::WhereEqPredicate { + // id: ast::DUMMY_NODE_ID, + // span: span, + // path: panic!("NYI"), //bounded_ty, + // ty: ty, + // })); + // parsed_something = true; + // // FIXME(#18433) + self.span_err(span, + "equality constraints are not yet supported \ + in where clauses (#20041)"); + } else { + let last_span = self.last_span; + self.span_err(last_span, "unexpected token in `where` clause"); - } + } + } + }; if !self.eat(&token::Comma) { break @@ -4331,7 +4362,7 @@ impl<'a> Parser<'a> { _ => { let token_str = self.this_token_to_string(); self.fatal(format!("expected `self`, found `{}`", - token_str).as_slice()) + token_str)[]) } } } @@ -4485,7 +4516,7 @@ impl<'a> Parser<'a> { _ => { let token_str = self.this_token_to_string(); self.fatal(format!("expected `,` or `)`, found `{}`", - token_str).as_slice()) + token_str)[]) } } } @@ -4661,7 +4692,7 @@ impl<'a> Parser<'a> { let (inner_attrs, body) = self.parse_inner_attrs_and_block(); let body_span = body.span; let mut new_attrs = attrs; - new_attrs.push_all(inner_attrs.as_slice()); + new_attrs.push_all(inner_attrs[]); (ast::MethDecl(ident, generics, abi, @@ -4818,7 +4849,7 @@ impl<'a> Parser<'a> { if fields.len() == 0 { self.fatal(format!("unit-like struct definition should be \ written as `struct {};`", - token::get_ident(class_name)).as_slice()); + token::get_ident(class_name))[]); } self.bump(); } else if self.check(&token::OpenDelim(token::Paren)) { @@ -4842,7 +4873,7 @@ impl<'a> Parser<'a> { if fields.len() == 0 { self.fatal(format!("unit-like struct definition should be \ written as `struct {};`", - token::get_ident(class_name)).as_slice()); + token::get_ident(class_name))[]); } self.expect(&token::Semi); } else if self.eat(&token::Semi) { @@ -4853,7 +4884,7 @@ impl<'a> Parser<'a> { let token_str = self.this_token_to_string(); self.fatal(format!("expected `{}`, `(`, or `;` after struct \ name, found `{}`", "{", - token_str).as_slice()) + token_str)[]) } let _ = ast::DUMMY_NODE_ID; // FIXME: Workaround for crazy bug. @@ -4882,7 +4913,7 @@ impl<'a> Parser<'a> { let token_str = self.this_token_to_string(); self.span_fatal_help(span, format!("expected `,`, or `}}`, found `{}`", - token_str).as_slice(), + token_str)[], "struct fields should be separated by commas") } } @@ -4952,7 +4983,7 @@ impl<'a> Parser<'a> { let mut attrs = self.parse_outer_attributes(); if first { let mut tmp = attrs_remaining.clone(); - tmp.push_all(attrs.as_slice()); + tmp.push_all(attrs[]); attrs = tmp; first = false; } @@ -4969,7 +5000,7 @@ impl<'a> Parser<'a> { _ => { let token_str = self.this_token_to_string(); self.fatal(format!("expected item, found `{}`", - token_str).as_slice()) + token_str)[]) } } } @@ -4978,7 +5009,7 @@ impl<'a> Parser<'a> { // We parsed attributes for the first item but didn't find it let last_span = self.last_span; self.span_err(last_span, - Parser::expected_item_err(attrs_remaining.as_slice())); + Parser::expected_item_err(attrs_remaining[])); } ast::Mod { @@ -5048,7 +5079,7 @@ impl<'a> Parser<'a> { -> (ast::Item_, Vec ) { let mut prefix = Path::new(self.sess.span_diagnostic.cm.span_to_filename(self.span)); prefix.pop(); - let mod_path = Path::new(".").join_many(self.mod_path_stack.as_slice()); + let mod_path = Path::new(".").join_many(self.mod_path_stack[]); let dir_path = prefix.join(&mod_path); let mod_string = token::get_ident(id); let (file_path, owns_directory) = match ::attr::first_attr_value_str_by_name( @@ -5058,8 +5089,8 @@ impl<'a> Parser<'a> { let mod_name = mod_string.get().to_string(); let default_path_str = format!("{}.rs", mod_name); let secondary_path_str = format!("{}/mod.rs", mod_name); - let default_path = dir_path.join(default_path_str.as_slice()); - let secondary_path = dir_path.join(secondary_path_str.as_slice()); + let default_path = dir_path.join(default_path_str[]); + let secondary_path = dir_path.join(secondary_path_str[]); let default_exists = default_path.exists(); let secondary_exists = secondary_path.exists(); @@ -5074,13 +5105,13 @@ impl<'a> Parser<'a> { format!("maybe move this module `{0}` \ to its own directory via \ `{0}/mod.rs`", - this_module).as_slice()); + this_module)[]); if default_exists || secondary_exists { self.span_note(id_sp, format!("... or maybe `use` the module \ `{}` instead of possibly \ redeclaring it", - mod_name).as_slice()); + mod_name)[]); } self.abort_if_errors(); } @@ -5091,12 +5122,12 @@ impl<'a> Parser<'a> { (false, false) => { self.span_fatal_help(id_sp, format!("file not found for module `{}`", - mod_name).as_slice(), + mod_name)[], format!("name the file either {} or {} inside \ the directory {}", default_path_str, secondary_path_str, - dir_path.display()).as_slice()); + dir_path.display())[]); } (true, true) => { self.span_fatal_help( @@ -5105,7 +5136,7 @@ impl<'a> Parser<'a> { and {}", mod_name, default_path_str, - secondary_path_str).as_slice(), + secondary_path_str)[], "delete or rename one of them to remove the ambiguity"); } } @@ -5127,11 +5158,11 @@ impl<'a> Parser<'a> { let mut err = String::from_str("circular modules: "); let len = included_mod_stack.len(); for p in included_mod_stack.slice(i, len).iter() { - err.push_str(p.display().as_cow().as_slice()); + err.push_str(p.display().as_cow()[]); err.push_str(" -> "); } - err.push_str(path.display().as_cow().as_slice()); - self.span_fatal(id_sp, err.as_slice()); + err.push_str(path.display().as_cow()[]); + self.span_fatal(id_sp, err[]); } None => () } @@ -5212,7 +5243,7 @@ impl<'a> Parser<'a> { if !attrs_remaining.is_empty() { let last_span = self.last_span; self.span_err(last_span, - Parser::expected_item_err(attrs_remaining.as_slice())); + Parser::expected_item_err(attrs_remaining[])); } assert!(self.token == token::CloseDelim(token::Brace)); ast::ForeignMod { @@ -5253,7 +5284,7 @@ impl<'a> Parser<'a> { self.span_help(span, format!("perhaps you meant to enclose the crate name `{}` in \ a string?", - the_ident.as_str()).as_slice()); + the_ident.as_str())[]); None } else { None @@ -5279,7 +5310,7 @@ impl<'a> Parser<'a> { self.span_fatal(span, format!("expected extern crate name but \ found `{}`", - token_str).as_slice()); + token_str)[]); } }; @@ -5377,7 +5408,7 @@ impl<'a> Parser<'a> { self.span_err(start_span, format!("unit-like struct variant should be written \ without braces, as `{},`", - token::get_ident(ident)).as_slice()); + token::get_ident(ident))[]); } kind = StructVariantKind(struct_def); } else if self.check(&token::OpenDelim(token::Paren)) { @@ -5462,7 +5493,7 @@ impl<'a> Parser<'a> { format!("illegal ABI: expected one of [{}], \ found `{}`", abi::all_names().connect(", "), - the_string).as_slice()); + the_string)[]); None } } @@ -5524,7 +5555,7 @@ impl<'a> Parser<'a> { format!("`extern mod` is obsolete, use \ `extern crate` instead \ to refer to external \ - crates.").as_slice()) + crates.")[]) } return self.parse_item_extern_crate(lo, visibility, attrs); } @@ -5552,7 +5583,7 @@ impl<'a> Parser<'a> { let token_str = self.this_token_to_string(); self.span_fatal(span, format!("expected `{}` or `fn`, found `{}`", "{", - token_str).as_slice()); + token_str)[]); } if self.eat_keyword(keywords::Virtual) { @@ -5665,7 +5696,7 @@ impl<'a> Parser<'a> { if self.eat_keyword(keywords::Mod) { // MODULE ITEM let (ident, item_, extra_attrs) = - self.parse_item_mod(attrs.as_slice()); + self.parse_item_mod(attrs[]); let last_span = self.last_span; let item = self.mk_item(lo, last_span.hi, @@ -6000,7 +6031,7 @@ impl<'a> Parser<'a> { macros_allowed: bool) -> ParsedItemsAndViewItems { let mut attrs = first_item_attrs; - attrs.push_all(self.parse_outer_attributes().as_slice()); + attrs.push_all(self.parse_outer_attributes()[]); // First, parse view items. let mut view_items : Vec = Vec::new(); let mut items = Vec::new(); @@ -6082,7 +6113,7 @@ impl<'a> Parser<'a> { macros_allowed: bool) -> ParsedItemsAndViewItems { let mut attrs = first_item_attrs; - attrs.push_all(self.parse_outer_attributes().as_slice()); + attrs.push_all(self.parse_outer_attributes()[]); let mut foreign_items = Vec::new(); loop { match self.parse_foreign_item(attrs, macros_allowed) { diff --git a/src/libsyntax/parse/token.rs b/src/libsyntax/parse/token.rs index dad369792d7a1..f575d3d6c676b 100644 --- a/src/libsyntax/parse/token.rs +++ b/src/libsyntax/parse/token.rs @@ -28,7 +28,7 @@ use std::path::BytesContainer; use std::rc::Rc; #[allow(non_camel_case_types)] -#[deriving(Clone, Copy, Encodable, Decodable, PartialEq, Eq, Hash, Show)] +#[deriving(Clone, RustcEncodable, RustcDecodable, PartialEq, Eq, Hash, Show, Copy)] pub enum BinOpToken { Plus, Minus, @@ -43,7 +43,7 @@ pub enum BinOpToken { } /// A delimeter token -#[deriving(Clone, Copy, Encodable, Decodable, PartialEq, Eq, Hash, Show)] +#[deriving(Clone, RustcEncodable, RustcDecodable, PartialEq, Eq, Hash, Show, Copy)] pub enum DelimToken { /// A round parenthesis: `(` or `)` Paren, @@ -53,14 +53,14 @@ pub enum DelimToken { Brace, } -#[deriving(Clone, Copy, Encodable, Decodable, PartialEq, Eq, Hash, Show)] +#[deriving(Clone, RustcEncodable, RustcDecodable, PartialEq, Eq, Hash, Show, Copy)] pub enum IdentStyle { /// `::` follows the identifier with no whitespace in-between. ModName, Plain, } -#[deriving(Clone, Copy, Encodable, Decodable, PartialEq, Eq, Hash, Show)] +#[deriving(Clone, RustcEncodable, RustcDecodable, PartialEq, Eq, Hash, Show, Copy)] pub enum Lit { Byte(ast::Name), Char(ast::Name), @@ -86,7 +86,7 @@ impl Lit { } #[allow(non_camel_case_types)] -#[deriving(Clone, Encodable, Decodable, PartialEq, Eq, Hash, Show)] +#[deriving(Clone, RustcEncodable, RustcDecodable, PartialEq, Eq, Hash, Show)] pub enum Token { /* Expression-operator symbols. */ Eq, @@ -334,7 +334,7 @@ impl Token { } } -#[deriving(Clone, Encodable, Decodable, PartialEq, Eq, Hash)] +#[deriving(Clone, RustcEncodable, RustcDecodable, PartialEq, Eq, Hash)] /// For interpolation during macro expansion. pub enum Nonterminal { NtItem(P), @@ -454,7 +454,7 @@ macro_rules! declare_special_idents_and_keywords {( $(init_vec.push($si_str);)* $(init_vec.push($sk_str);)* $(init_vec.push($rk_str);)* - interner::StrInterner::prefill(init_vec.as_slice()) + interner::StrInterner::prefill(init_vec[]) } }} @@ -602,10 +602,14 @@ impl InternedString { #[inline] pub fn get<'a>(&'a self) -> &'a str { - self.string.as_slice() + self.string[] } } +impl Deref for InternedString { + fn deref(&self) -> &str { &*self.string } +} + impl BytesContainer for InternedString { fn container_as_bytes<'a>(&'a self) -> &'a [u8] { // FIXME #12938: This is a workaround for the incorrect signature @@ -620,49 +624,49 @@ impl BytesContainer for InternedString { impl fmt::Show for InternedString { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - write!(f, "{}", self.string.as_slice()) + write!(f, "{}", self.string[]) } } #[allow(deprecated)] impl<'a> Equiv<&'a str> for InternedString { fn equiv(&self, other: & &'a str) -> bool { - (*other) == self.string.as_slice() + (*other) == self.string[] } } impl<'a> PartialEq<&'a str> for InternedString { #[inline(always)] fn eq(&self, other: & &'a str) -> bool { - PartialEq::eq(self.string.as_slice(), *other) + PartialEq::eq(self.string[], *other) } #[inline(always)] fn ne(&self, other: & &'a str) -> bool { - PartialEq::ne(self.string.as_slice(), *other) + PartialEq::ne(self.string[], *other) } } impl<'a> PartialEq for &'a str { #[inline(always)] fn eq(&self, other: &InternedString) -> bool { - PartialEq::eq(*self, other.string.as_slice()) + PartialEq::eq(*self, other.string[]) } #[inline(always)] fn ne(&self, other: &InternedString) -> bool { - PartialEq::ne(*self, other.string.as_slice()) + PartialEq::ne(*self, other.string[]) } } impl, E> Decodable for InternedString { fn decode(d: &mut D) -> Result { Ok(get_name(get_ident_interner().intern( - try!(d.read_str()).as_slice()))) + try!(d.read_str())[]))) } } impl, E> Encodable for InternedString { fn encode(&self, s: &mut S) -> Result<(), E> { - s.emit_str(self.string.as_slice()) + s.emit_str(self.string[]) } } diff --git a/src/libsyntax/print/pp.rs b/src/libsyntax/print/pp.rs index bfa47a46e7465..ab0e0f9585c4e 100644 --- a/src/libsyntax/print/pp.rs +++ b/src/libsyntax/print/pp.rs @@ -139,12 +139,12 @@ pub fn buf_str(toks: Vec, } s.push_str(format!("{}={}", szs[i], - tok_str(toks[i].clone())).as_slice()); + tok_str(toks[i].clone()))[]); i += 1u; i %= n; } s.push(']'); - return s.into_string(); + s } #[deriving(Copy)] @@ -601,7 +601,7 @@ impl Printer { assert_eq!(l, len); // assert!(l <= space); self.space -= len; - self.print_str(s.as_slice()) + self.print_str(s[]) } Eof => { // Eof should never get here. diff --git a/src/libsyntax/print/pprust.rs b/src/libsyntax/print/pprust.rs index d2cc0cba3173c..21410395a90b3 100644 --- a/src/libsyntax/print/pprust.rs +++ b/src/libsyntax/print/pprust.rs @@ -30,6 +30,7 @@ use ptr::P; use std::{ascii, mem}; use std::io::{mod, IoResult}; +use std::iter; pub enum AnnNode<'a> { NodeIdent(&'a ast::Ident), @@ -113,7 +114,7 @@ pub fn print_crate<'a>(cm: &'a CodeMap, out, ann, is_expanded); - try!(s.print_mod(&krate.module, krate.attrs.as_slice())); + try!(s.print_mod(&krate.module, krate.attrs[])); try!(s.print_remaining_comments()); eof(&mut s.s) } @@ -197,56 +198,56 @@ pub fn binop_to_string(op: BinOpToken) -> &'static str { pub fn token_to_string(tok: &Token) -> String { match *tok { - token::Eq => "=".into_string(), - token::Lt => "<".into_string(), - token::Le => "<=".into_string(), - token::EqEq => "==".into_string(), - token::Ne => "!=".into_string(), - token::Ge => ">=".into_string(), - token::Gt => ">".into_string(), - token::Not => "!".into_string(), - token::Tilde => "~".into_string(), - token::OrOr => "||".into_string(), - token::AndAnd => "&&".into_string(), - token::BinOp(op) => binop_to_string(op).into_string(), + token::Eq => "=".to_string(), + token::Lt => "<".to_string(), + token::Le => "<=".to_string(), + token::EqEq => "==".to_string(), + token::Ne => "!=".to_string(), + token::Ge => ">=".to_string(), + token::Gt => ">".to_string(), + token::Not => "!".to_string(), + token::Tilde => "~".to_string(), + token::OrOr => "||".to_string(), + token::AndAnd => "&&".to_string(), + token::BinOp(op) => binop_to_string(op).to_string(), token::BinOpEq(op) => format!("{}=", binop_to_string(op)), /* Structural symbols */ - token::At => "@".into_string(), - token::Dot => ".".into_string(), - token::DotDot => "..".into_string(), - token::DotDotDot => "...".into_string(), - token::Comma => ",".into_string(), - token::Semi => ";".into_string(), - token::Colon => ":".into_string(), - token::ModSep => "::".into_string(), - token::RArrow => "->".into_string(), - token::LArrow => "<-".into_string(), - token::FatArrow => "=>".into_string(), - token::OpenDelim(token::Paren) => "(".into_string(), - token::CloseDelim(token::Paren) => ")".into_string(), - token::OpenDelim(token::Bracket) => "[".into_string(), - token::CloseDelim(token::Bracket) => "]".into_string(), - token::OpenDelim(token::Brace) => "{".into_string(), - token::CloseDelim(token::Brace) => "}".into_string(), - token::Pound => "#".into_string(), - token::Dollar => "$".into_string(), - token::Question => "?".into_string(), + token::At => "@".to_string(), + token::Dot => ".".to_string(), + token::DotDot => "..".to_string(), + token::DotDotDot => "...".to_string(), + token::Comma => ",".to_string(), + token::Semi => ";".to_string(), + token::Colon => ":".to_string(), + token::ModSep => "::".to_string(), + token::RArrow => "->".to_string(), + token::LArrow => "<-".to_string(), + token::FatArrow => "=>".to_string(), + token::OpenDelim(token::Paren) => "(".to_string(), + token::CloseDelim(token::Paren) => ")".to_string(), + token::OpenDelim(token::Bracket) => "[".to_string(), + token::CloseDelim(token::Bracket) => "]".to_string(), + token::OpenDelim(token::Brace) => "{".to_string(), + token::CloseDelim(token::Brace) => "}".to_string(), + token::Pound => "#".to_string(), + token::Dollar => "$".to_string(), + token::Question => "?".to_string(), /* Literals */ token::Literal(lit, suf) => { let mut out = match lit { token::Byte(b) => format!("b'{}'", b.as_str()), token::Char(c) => format!("'{}'", c.as_str()), - token::Float(c) => c.as_str().into_string(), - token::Integer(c) => c.as_str().into_string(), + token::Float(c) => c.as_str().to_string(), + token::Integer(c) => c.as_str().to_string(), token::Str_(s) => format!("\"{}\"", s.as_str()), token::StrRaw(s, n) => format!("r{delim}\"{string}\"{delim}", - delim="#".repeat(n), + delim=repeat("#", n), string=s.as_str()), token::Binary(v) => format!("b\"{}\"", v.as_str()), token::BinaryRaw(s, n) => format!("br{delim}\"{string}\"{delim}", - delim="#".repeat(n), + delim=repeat("#", n), string=s.as_str()), }; @@ -258,17 +259,17 @@ pub fn token_to_string(tok: &Token) -> String { } /* Name components */ - token::Ident(s, _) => token::get_ident(s).get().into_string(), + token::Ident(s, _) => token::get_ident(s).get().to_string(), token::Lifetime(s) => format!("{}", token::get_ident(s)), - token::Underscore => "_".into_string(), + token::Underscore => "_".to_string(), /* Other */ - token::DocComment(s) => s.as_str().into_string(), + token::DocComment(s) => s.as_str().to_string(), token::SubstNt(s, _) => format!("${}", s), token::MatchNt(s, t, _, _) => format!("${}:{}", s, t), - token::Eof => "".into_string(), - token::Whitespace => " ".into_string(), - token::Comment => "/* */".into_string(), + token::Eof => "".to_string(), + token::Whitespace => " ".to_string(), + token::Comment => "/* */".to_string(), token::Shebang(s) => format!("/* shebang: {}*/", s.as_str()), token::Interpolated(ref nt) => match *nt { @@ -276,12 +277,12 @@ pub fn token_to_string(tok: &Token) -> String { token::NtMeta(ref e) => meta_item_to_string(&**e), token::NtTy(ref e) => ty_to_string(&**e), token::NtPath(ref e) => path_to_string(&**e), - token::NtItem(..) => "an interpolated item".into_string(), - token::NtBlock(..) => "an interpolated block".into_string(), - token::NtStmt(..) => "an interpolated statement".into_string(), - token::NtPat(..) => "an interpolated pattern".into_string(), - token::NtIdent(..) => "an interpolated identifier".into_string(), - token::NtTT(..) => "an interpolated tt".into_string(), + token::NtItem(..) => "an interpolated item".to_string(), + token::NtBlock(..) => "an interpolated block".to_string(), + token::NtStmt(..) => "an interpolated statement".to_string(), + token::NtPat(..) => "an interpolated pattern".to_string(), + token::NtIdent(..) => "an interpolated identifier".to_string(), + token::NtTT(..) => "an interpolated tt".to_string(), } } } @@ -577,7 +578,7 @@ impl<'a> State<'a> { pub fn synth_comment(&mut self, text: String) -> IoResult<()> { try!(word(&mut self.s, "/*")); try!(space(&mut self.s)); - try!(word(&mut self.s, text.as_slice())); + try!(word(&mut self.s, text[])); try!(space(&mut self.s)); word(&mut self.s, "*/") } @@ -682,7 +683,7 @@ impl<'a> State<'a> { } ast::TyTup(ref elts) => { try!(self.popen()); - try!(self.commasep(Inconsistent, elts.as_slice(), + try!(self.commasep(Inconsistent, elts[], |s, ty| s.print_type(&**ty))); if elts.len() == 1 { try!(word(&mut self.s, ",")); @@ -737,10 +738,10 @@ impl<'a> State<'a> { } ast::TyObjectSum(ref ty, ref bounds) => { try!(self.print_type(&**ty)); - try!(self.print_bounds("+", bounds.as_slice())); + try!(self.print_bounds("+", bounds[])); } ast::TyPolyTraitRef(ref bounds) => { - try!(self.print_bounds("", bounds.as_slice())); + try!(self.print_bounds("", bounds[])); } ast::TyQPath(ref qpath) => { try!(word(&mut self.s, "<")); @@ -755,7 +756,7 @@ impl<'a> State<'a> { ast::TyFixedLengthVec(ref ty, ref v) => { try!(word(&mut self.s, "[")); try!(self.print_type(&**ty)); - try!(word(&mut self.s, ", ..")); + try!(word(&mut self.s, "; ")); try!(self.print_expr(&**v)); try!(word(&mut self.s, "]")); } @@ -775,7 +776,7 @@ impl<'a> State<'a> { item: &ast::ForeignItem) -> IoResult<()> { try!(self.hardbreak_if_not_bol()); try!(self.maybe_print_comment(item.span.lo)); - try!(self.print_outer_attributes(item.attrs.as_slice())); + try!(self.print_outer_attributes(item.attrs[])); match item.node { ast::ForeignItemFn(ref decl, ref generics) => { try!(self.print_fn(&**decl, None, abi::Rust, item.ident, generics, @@ -786,7 +787,7 @@ impl<'a> State<'a> { } ast::ForeignItemStatic(ref t, m) => { try!(self.head(visibility_qualified(item.vis, - "static").as_slice())); + "static")[])); if m { try!(self.word_space("mut")); } @@ -822,12 +823,12 @@ impl<'a> State<'a> { pub fn print_item(&mut self, item: &ast::Item) -> IoResult<()> { try!(self.hardbreak_if_not_bol()); try!(self.maybe_print_comment(item.span.lo)); - try!(self.print_outer_attributes(item.attrs.as_slice())); + try!(self.print_outer_attributes(item.attrs[])); try!(self.ann.pre(self, NodeItem(item))); match item.node { ast::ItemStatic(ref ty, m, ref expr) => { try!(self.head(visibility_qualified(item.vis, - "static").as_slice())); + "static")[])); if m == ast::MutMutable { try!(self.word_space("mut")); } @@ -844,7 +845,7 @@ impl<'a> State<'a> { } ast::ItemConst(ref ty, ref expr) => { try!(self.head(visibility_qualified(item.vis, - "const").as_slice())); + "const")[])); try!(self.print_ident(item.ident)); try!(self.word_space(":")); try!(self.print_type(&**ty)); @@ -867,29 +868,29 @@ impl<'a> State<'a> { item.vis )); try!(word(&mut self.s, " ")); - try!(self.print_block_with_attrs(&**body, item.attrs.as_slice())); + try!(self.print_block_with_attrs(&**body, item.attrs[])); } ast::ItemMod(ref _mod) => { try!(self.head(visibility_qualified(item.vis, - "mod").as_slice())); + "mod")[])); try!(self.print_ident(item.ident)); try!(self.nbsp()); try!(self.bopen()); - try!(self.print_mod(_mod, item.attrs.as_slice())); + try!(self.print_mod(_mod, item.attrs[])); try!(self.bclose(item.span)); } ast::ItemForeignMod(ref nmod) => { try!(self.head("extern")); - try!(self.word_nbsp(nmod.abi.to_string().as_slice())); + try!(self.word_nbsp(nmod.abi.to_string()[])); try!(self.bopen()); - try!(self.print_foreign_mod(nmod, item.attrs.as_slice())); + try!(self.print_foreign_mod(nmod, item.attrs[])); try!(self.bclose(item.span)); } ast::ItemTy(ref ty, ref params) => { try!(self.ibox(indent_unit)); try!(self.ibox(0u)); try!(self.word_nbsp(visibility_qualified(item.vis, - "type").as_slice())); + "type")[])); try!(self.print_ident(item.ident)); try!(self.print_generics(params)); try!(self.end()); // end the inner ibox @@ -911,7 +912,7 @@ impl<'a> State<'a> { )); } ast::ItemStruct(ref struct_def, ref generics) => { - try!(self.head(visibility_qualified(item.vis,"struct").as_slice())); + try!(self.head(visibility_qualified(item.vis,"struct")[])); try!(self.print_struct(&**struct_def, generics, item.ident, item.span)); } @@ -944,7 +945,7 @@ impl<'a> State<'a> { try!(space(&mut self.s)); try!(self.bopen()); - try!(self.print_inner_attributes(item.attrs.as_slice())); + try!(self.print_inner_attributes(item.attrs[])); for impl_item in impl_items.iter() { match *impl_item { ast::MethodImplItem(ref meth) => { @@ -970,7 +971,7 @@ impl<'a> State<'a> { try!(self.print_trait_ref(tref)); try!(word(&mut self.s, "?")); } - try!(self.print_bounds(":", bounds.as_slice())); + try!(self.print_bounds(":", bounds[])); try!(self.print_where_clause(generics)); try!(word(&mut self.s, " ")); try!(self.bopen()); @@ -988,7 +989,7 @@ impl<'a> State<'a> { try!(self.print_ident(item.ident)); try!(self.cbox(indent_unit)); try!(self.popen()); - try!(self.print_tts(tts.as_slice())); + try!(self.print_tts(tts[])); try!(self.pclose()); try!(word(&mut self.s, ";")); try!(self.end()); @@ -1022,12 +1023,12 @@ impl<'a> State<'a> { generics: &ast::Generics, ident: ast::Ident, span: codemap::Span, visibility: ast::Visibility) -> IoResult<()> { - try!(self.head(visibility_qualified(visibility, "enum").as_slice())); + try!(self.head(visibility_qualified(visibility, "enum")[])); try!(self.print_ident(ident)); try!(self.print_generics(generics)); try!(self.print_where_clause(generics)); try!(space(&mut self.s)); - self.print_variants(enum_definition.variants.as_slice(), span) + self.print_variants(enum_definition.variants[], span) } pub fn print_variants(&mut self, @@ -1037,7 +1038,7 @@ impl<'a> State<'a> { for v in variants.iter() { try!(self.space_if_not_bol()); try!(self.maybe_print_comment(v.span.lo)); - try!(self.print_outer_attributes(v.node.attrs.as_slice())); + try!(self.print_outer_attributes(v.node.attrs[])); try!(self.ibox(indent_unit)); try!(self.print_variant(&**v)); try!(word(&mut self.s, ",")); @@ -1066,7 +1067,7 @@ impl<'a> State<'a> { if !struct_def.fields.is_empty() { try!(self.popen()); try!(self.commasep( - Inconsistent, struct_def.fields.as_slice(), + Inconsistent, struct_def.fields[], |s, field| { match field.node.kind { ast::NamedField(..) => panic!("unexpected named field"), @@ -1094,7 +1095,7 @@ impl<'a> State<'a> { ast::NamedField(ident, visibility) => { try!(self.hardbreak_if_not_bol()); try!(self.maybe_print_comment(field.span.lo)); - try!(self.print_outer_attributes(field.node.attrs.as_slice())); + try!(self.print_outer_attributes(field.node.attrs[])); try!(self.print_visibility(visibility)); try!(self.print_ident(ident)); try!(self.word_nbsp(":")); @@ -1118,7 +1119,7 @@ impl<'a> State<'a> { pub fn print_tt(&mut self, tt: &ast::TokenTree) -> IoResult<()> { match *tt { ast::TtToken(_, ref tk) => { - try!(word(&mut self.s, token_to_string(tk).as_slice())); + try!(word(&mut self.s, token_to_string(tk)[])); match *tk { parse::token::DocComment(..) => { hardbreak(&mut self.s) @@ -1127,11 +1128,11 @@ impl<'a> State<'a> { } } ast::TtDelimited(_, ref delimed) => { - try!(word(&mut self.s, token_to_string(&delimed.open_token()).as_slice())); + try!(word(&mut self.s, token_to_string(&delimed.open_token())[])); try!(space(&mut self.s)); - try!(self.print_tts(delimed.tts.as_slice())); + try!(self.print_tts(delimed.tts[])); try!(space(&mut self.s)); - word(&mut self.s, token_to_string(&delimed.close_token()).as_slice()) + word(&mut self.s, token_to_string(&delimed.close_token())[]) }, ast::TtSequence(_, ref seq) => { try!(word(&mut self.s, "$(")); @@ -1141,7 +1142,7 @@ impl<'a> State<'a> { try!(word(&mut self.s, ")")); match seq.separator { Some(ref tk) => { - try!(word(&mut self.s, token_to_string(tk).as_slice())); + try!(word(&mut self.s, token_to_string(tk)[])); } None => {}, } @@ -1172,7 +1173,7 @@ impl<'a> State<'a> { if !args.is_empty() { try!(self.popen()); try!(self.commasep(Consistent, - args.as_slice(), + args[], |s, arg| s.print_type(&*arg.ty))); try!(self.pclose()); } @@ -1196,7 +1197,7 @@ impl<'a> State<'a> { pub fn print_ty_method(&mut self, m: &ast::TypeMethod) -> IoResult<()> { try!(self.hardbreak_if_not_bol()); try!(self.maybe_print_comment(m.span.lo)); - try!(self.print_outer_attributes(m.attrs.as_slice())); + try!(self.print_outer_attributes(m.attrs[])); try!(self.print_ty_fn(None, None, m.unsafety, @@ -1228,7 +1229,7 @@ impl<'a> State<'a> { pub fn print_method(&mut self, meth: &ast::Method) -> IoResult<()> { try!(self.hardbreak_if_not_bol()); try!(self.maybe_print_comment(meth.span.lo)); - try!(self.print_outer_attributes(meth.attrs.as_slice())); + try!(self.print_outer_attributes(meth.attrs[])); match meth.node { ast::MethDecl(ident, ref generics, @@ -1246,7 +1247,7 @@ impl<'a> State<'a> { Some(&explicit_self.node), vis)); try!(word(&mut self.s, " ")); - self.print_block_with_attrs(&**body, meth.attrs.as_slice()) + self.print_block_with_attrs(&**body, meth.attrs[]) }, ast::MethMac(codemap::Spanned { node: ast::MacInvocTT(ref pth, ref tts, _), ..}) => { @@ -1255,7 +1256,7 @@ impl<'a> State<'a> { try!(word(&mut self.s, "! ")); try!(self.cbox(indent_unit)); try!(self.popen()); - try!(self.print_tts(tts.as_slice())); + try!(self.print_tts(tts[])); try!(self.pclose()); try!(word(&mut self.s, ";")); self.end() @@ -1522,7 +1523,7 @@ impl<'a> State<'a> { ast::ExprVec(ref exprs) => { try!(self.ibox(indent_unit)); try!(word(&mut self.s, "[")); - try!(self.commasep_exprs(Inconsistent, exprs.as_slice())); + try!(self.commasep_exprs(Inconsistent, exprs[])); try!(word(&mut self.s, "]")); try!(self.end()); } @@ -1531,8 +1532,7 @@ impl<'a> State<'a> { try!(self.ibox(indent_unit)); try!(word(&mut self.s, "[")); try!(self.print_expr(&**element)); - try!(word(&mut self.s, ",")); - try!(word(&mut self.s, "..")); + try!(self.word_space(";")); try!(self.print_expr(&**count)); try!(word(&mut self.s, "]")); try!(self.end()); @@ -1543,7 +1543,7 @@ impl<'a> State<'a> { try!(word(&mut self.s, "{")); try!(self.commasep_cmnt( Consistent, - fields.as_slice(), + fields[], |s, field| { try!(s.ibox(indent_unit)); try!(s.print_ident(field.ident.node)); @@ -1569,7 +1569,7 @@ impl<'a> State<'a> { } ast::ExprTup(ref exprs) => { try!(self.popen()); - try!(self.commasep_exprs(Inconsistent, exprs.as_slice())); + try!(self.commasep_exprs(Inconsistent, exprs[])); if exprs.len() == 1 { try!(word(&mut self.s, ",")); } @@ -1577,7 +1577,7 @@ impl<'a> State<'a> { } ast::ExprCall(ref func, ref args) => { try!(self.print_expr_maybe_paren(&**func)); - try!(self.print_call_post(args.as_slice())); + try!(self.print_call_post(args[])); } ast::ExprMethodCall(ident, ref tys, ref args) => { let base_args = args.slice_from(1); @@ -1586,7 +1586,7 @@ impl<'a> State<'a> { try!(self.print_ident(ident.node)); if tys.len() > 0u { try!(word(&mut self.s, "::<")); - try!(self.commasep(Inconsistent, tys.as_slice(), + try!(self.commasep(Inconsistent, tys[], |s, ty| s.print_type(&**ty))); try!(word(&mut self.s, ">")); } @@ -1787,20 +1787,16 @@ impl<'a> State<'a> { } } ast::ExprInlineAsm(ref a) => { - if a.volatile { - try!(word(&mut self.s, "__volatile__ asm!")); - } else { - try!(word(&mut self.s, "asm!")); - } + try!(word(&mut self.s, "asm!")); try!(self.popen()); try!(self.print_string(a.asm.get(), a.asm_str_style)); try!(self.word_space(":")); - try!(self.commasep(Inconsistent, a.outputs.as_slice(), + try!(self.commasep(Inconsistent, a.outputs[], |s, &(ref co, ref o, is_rw)| { match co.get().slice_shift_char() { Some(('=', operand)) if is_rw => { - try!(s.print_string(format!("+{}", operand).as_slice(), + try!(s.print_string(format!("+{}", operand)[], ast::CookedStr)) } _ => try!(s.print_string(co.get(), ast::CookedStr)) @@ -1813,7 +1809,7 @@ impl<'a> State<'a> { try!(space(&mut self.s)); try!(self.word_space(":")); - try!(self.commasep(Inconsistent, a.inputs.as_slice(), + try!(self.commasep(Inconsistent, a.inputs[], |s, &(ref co, ref o)| { try!(s.print_string(co.get(), ast::CookedStr)); try!(s.popen()); @@ -1824,11 +1820,33 @@ impl<'a> State<'a> { try!(space(&mut self.s)); try!(self.word_space(":")); - try!(self.commasep(Inconsistent, a.clobbers.as_slice(), + try!(self.commasep(Inconsistent, a.clobbers[], |s, co| { try!(s.print_string(co.get(), ast::CookedStr)); Ok(()) })); + + let mut options = vec!(); + if a.volatile { + options.push("volatile"); + } + if a.alignstack { + options.push("alignstack"); + } + if a.dialect == ast::AsmDialect::AsmIntel { + options.push("intel"); + } + + if options.len() > 0 { + try!(space(&mut self.s)); + try!(self.word_space(":")); + try!(self.commasep(Inconsistent, &*options, + |s, &co| { + try!(s.print_string(co, ast::CookedStr)); + Ok(()) + })); + } + try!(self.pclose()); } ast::ExprMac(ref m) => try!(self.print_mac(m, token::Paren)), @@ -1878,7 +1896,7 @@ impl<'a> State<'a> { pub fn print_ident(&mut self, ident: ast::Ident) -> IoResult<()> { if self.encode_idents_with_hygiene { let encoded = ident.encode_with_hygiene(); - try!(word(&mut self.s, encoded.as_slice())) + try!(word(&mut self.s, encoded[])) } else { try!(word(&mut self.s, token::get_ident(ident).get())) } @@ -1886,7 +1904,7 @@ impl<'a> State<'a> { } pub fn print_uint(&mut self, i: uint) -> IoResult<()> { - word(&mut self.s, i.to_string().as_slice()) + word(&mut self.s, i.to_string()[]) } pub fn print_name(&mut self, name: ast::Name) -> IoResult<()> { @@ -1960,7 +1978,7 @@ impl<'a> State<'a> { } try!(self.commasep( Inconsistent, - data.types.as_slice(), + data.types[], |s, ty| s.print_type(&**ty))); comma = true; } @@ -1983,7 +2001,7 @@ impl<'a> State<'a> { try!(word(&mut self.s, "(")); try!(self.commasep( Inconsistent, - data.inputs.as_slice(), + data.inputs[], |s, ty| s.print_type(&**ty))); try!(word(&mut self.s, ")")); @@ -2036,7 +2054,7 @@ impl<'a> State<'a> { Some(ref args) => { if !args.is_empty() { try!(self.popen()); - try!(self.commasep(Inconsistent, args.as_slice(), + try!(self.commasep(Inconsistent, args[], |s, p| s.print_pat(&**p))); try!(self.pclose()); } @@ -2048,7 +2066,7 @@ impl<'a> State<'a> { try!(self.nbsp()); try!(self.word_space("{")); try!(self.commasep_cmnt( - Consistent, fields.as_slice(), + Consistent, fields[], |s, f| { try!(s.cbox(indent_unit)); if !f.node.is_shorthand { @@ -2069,7 +2087,7 @@ impl<'a> State<'a> { ast::PatTup(ref elts) => { try!(self.popen()); try!(self.commasep(Inconsistent, - elts.as_slice(), + elts[], |s, p| s.print_pat(&**p))); if elts.len() == 1 { try!(word(&mut self.s, ",")); @@ -2094,7 +2112,7 @@ impl<'a> State<'a> { ast::PatVec(ref before, ref slice, ref after) => { try!(word(&mut self.s, "[")); try!(self.commasep(Inconsistent, - before.as_slice(), + before[], |s, p| s.print_pat(&**p))); for p in slice.iter() { if !before.is_empty() { try!(self.word_space(",")); } @@ -2108,7 +2126,7 @@ impl<'a> State<'a> { if !after.is_empty() { try!(self.word_space(",")); } } try!(self.commasep(Inconsistent, - after.as_slice(), + after[], |s, p| s.print_pat(&**p))); try!(word(&mut self.s, "]")); } @@ -2125,7 +2143,7 @@ impl<'a> State<'a> { } try!(self.cbox(indent_unit)); try!(self.ibox(0u)); - try!(self.print_outer_attributes(arm.attrs.as_slice())); + try!(self.print_outer_attributes(arm.attrs[])); let mut first = true; for p in arm.pats.iter() { if first { @@ -2225,7 +2243,7 @@ impl<'a> State<'a> { // HACK(eddyb) ignore the separately printed self argument. let args = if first { - decl.inputs.as_slice() + decl.inputs[] } else { decl.inputs.slice_from(1) }; @@ -2387,7 +2405,7 @@ impl<'a> State<'a> { ints.push(i); } - try!(self.commasep(Inconsistent, ints.as_slice(), |s, &idx| { + try!(self.commasep(Inconsistent, ints[], |s, &idx| { if idx < generics.lifetimes.len() { let lifetime = &generics.lifetimes[idx]; s.print_lifetime_def(lifetime) @@ -2408,7 +2426,7 @@ impl<'a> State<'a> { try!(self.word_space("?")); } try!(self.print_ident(param.ident)); - try!(self.print_bounds(":", param.bounds.as_slice())); + try!(self.print_bounds(":", param.bounds[])); match param.default { Some(ref default) => { try!(space(&mut self.s)); @@ -2437,12 +2455,26 @@ impl<'a> State<'a> { } match predicate { - &ast::WherePredicate::BoundPredicate(ast::WhereBoundPredicate{ident, + &ast::WherePredicate::BoundPredicate(ast::WhereBoundPredicate{ref bounded_ty, ref bounds, ..}) => { - try!(self.print_ident(ident)); + try!(self.print_type(&**bounded_ty)); try!(self.print_bounds(":", bounds.as_slice())); } + &ast::WherePredicate::RegionPredicate(ast::WhereRegionPredicate{ref lifetime, + ref bounds, + ..}) => { + try!(self.print_lifetime(lifetime)); + try!(word(&mut self.s, ":")); + + for (i, bound) in bounds.iter().enumerate() { + try!(self.print_lifetime(bound)); + + if i != 0 { + try!(word(&mut self.s, ":")); + } + } + } &ast::WherePredicate::EqPredicate(ast::WhereEqPredicate{ref path, ref ty, ..}) => { try!(self.print_path(path, false)); try!(space(&mut self.s)); @@ -2470,7 +2502,7 @@ impl<'a> State<'a> { try!(word(&mut self.s, name.get())); try!(self.popen()); try!(self.commasep(Consistent, - items.as_slice(), + items[], |s, i| s.print_meta_item(&**i))); try!(self.pclose()); } @@ -2506,7 +2538,7 @@ impl<'a> State<'a> { try!(self.print_path(path, false)); try!(word(&mut self.s, "::{")); } - try!(self.commasep(Inconsistent, idents.as_slice(), |s, w| { + try!(self.commasep(Inconsistent, idents[], |s, w| { match w.node { ast::PathListIdent { name, .. } => { s.print_ident(name) @@ -2524,7 +2556,7 @@ impl<'a> State<'a> { pub fn print_view_item(&mut self, item: &ast::ViewItem) -> IoResult<()> { try!(self.hardbreak_if_not_bol()); try!(self.maybe_print_comment(item.span.lo)); - try!(self.print_outer_attributes(item.attrs.as_slice())); + try!(self.print_outer_attributes(item.attrs[])); try!(self.print_visibility(item.vis)); match item.node { ast::ViewItemExternCrate(id, ref optional_path, _) => { @@ -2666,7 +2698,7 @@ impl<'a> State<'a> { try!(self.pclose()); } - try!(self.print_bounds(":", bounds.as_slice())); + try!(self.print_bounds(":", bounds[])); try!(self.print_fn_output(decl)); @@ -2725,7 +2757,7 @@ impl<'a> State<'a> { try!(self.maybe_print_comment(lit.span.lo)); match self.next_lit(lit.span.lo) { Some(ref ltrl) => { - return word(&mut self.s, (*ltrl).lit.as_slice()); + return word(&mut self.s, (*ltrl).lit[]); } _ => () } @@ -2735,7 +2767,7 @@ impl<'a> State<'a> { let mut res = String::from_str("b'"); ascii::escape_default(byte, |c| res.push(c as char)); res.push('\''); - word(&mut self.s, res.as_slice()) + word(&mut self.s, res[]) } ast::LitChar(ch) => { let mut res = String::from_str("'"); @@ -2743,27 +2775,27 @@ impl<'a> State<'a> { res.push(c); } res.push('\''); - word(&mut self.s, res.as_slice()) + word(&mut self.s, res[]) } ast::LitInt(i, t) => { match t { ast::SignedIntLit(st, ast::Plus) => { word(&mut self.s, - ast_util::int_ty_to_string(st, Some(i as i64)).as_slice()) + ast_util::int_ty_to_string(st, Some(i as i64))[]) } ast::SignedIntLit(st, ast::Minus) => { let istr = ast_util::int_ty_to_string(st, Some(-(i as i64))); word(&mut self.s, - format!("-{}", istr).as_slice()) + format!("-{}", istr)[]) } ast::UnsignedIntLit(ut) => { - word(&mut self.s, ast_util::uint_ty_to_string(ut, Some(i)).as_slice()) + word(&mut self.s, ast_util::uint_ty_to_string(ut, Some(i))[]) } ast::UnsuffixedIntLit(ast::Plus) => { - word(&mut self.s, format!("{}", i).as_slice()) + word(&mut self.s, format!("{}", i)[]) } ast::UnsuffixedIntLit(ast::Minus) => { - word(&mut self.s, format!("-{}", i).as_slice()) + word(&mut self.s, format!("-{}", i)[]) } } } @@ -2772,7 +2804,7 @@ impl<'a> State<'a> { format!( "{}{}", f.get(), - ast_util::float_ty_to_string(t).as_slice()).as_slice()) + ast_util::float_ty_to_string(t)[])[]) } ast::LitFloatUnsuffixed(ref f) => word(&mut self.s, f.get()), ast::LitBool(val) => { @@ -2784,7 +2816,7 @@ impl<'a> State<'a> { ascii::escape_default(ch as u8, |ch| escaped.push(ch as char)); } - word(&mut self.s, format!("b\"{}\"", escaped).as_slice()) + word(&mut self.s, format!("b\"{}\"", escaped)[]) } } } @@ -2825,7 +2857,7 @@ impl<'a> State<'a> { comments::Mixed => { assert_eq!(cmnt.lines.len(), 1u); try!(zerobreak(&mut self.s)); - try!(word(&mut self.s, cmnt.lines[0].as_slice())); + try!(word(&mut self.s, cmnt.lines[0][])); zerobreak(&mut self.s) } comments::Isolated => { @@ -2834,7 +2866,7 @@ impl<'a> State<'a> { // Don't print empty lines because they will end up as trailing // whitespace if !line.is_empty() { - try!(word(&mut self.s, line.as_slice())); + try!(word(&mut self.s, line[])); } try!(hardbreak(&mut self.s)); } @@ -2843,13 +2875,13 @@ impl<'a> State<'a> { comments::Trailing => { try!(word(&mut self.s, " ")); if cmnt.lines.len() == 1u { - try!(word(&mut self.s, cmnt.lines[0].as_slice())); + try!(word(&mut self.s, cmnt.lines[0][])); hardbreak(&mut self.s) } else { try!(self.ibox(0u)); for line in cmnt.lines.iter() { if !line.is_empty() { - try!(word(&mut self.s, line.as_slice())); + try!(word(&mut self.s, line[])); } try!(hardbreak(&mut self.s)); } @@ -2878,11 +2910,11 @@ impl<'a> State<'a> { } ast::RawStr(n) => { (format!("r{delim}\"{string}\"{delim}", - delim="#".repeat(n), + delim=repeat("#", n), string=st)) } }; - word(&mut self.s, st.as_slice()) + word(&mut self.s, st[]) } pub fn next_comment(&mut self) -> Option { @@ -2913,7 +2945,7 @@ impl<'a> State<'a> { Some(abi::Rust) => Ok(()), Some(abi) => { try!(self.word_nbsp("extern")); - self.word_nbsp(abi.to_string().as_slice()) + self.word_nbsp(abi.to_string()[]) } None => Ok(()) } @@ -2924,7 +2956,7 @@ impl<'a> State<'a> { match opt_abi { Some(abi) => { try!(self.word_nbsp("extern")); - self.word_nbsp(abi.to_string().as_slice()) + self.word_nbsp(abi.to_string()[]) } None => Ok(()) } @@ -2940,7 +2972,7 @@ impl<'a> State<'a> { if abi != abi::Rust { try!(self.word_nbsp("extern")); - try!(self.word_nbsp(abi.to_string().as_slice())); + try!(self.word_nbsp(abi.to_string()[])); } word(&mut self.s, "fn") @@ -2954,6 +2986,8 @@ impl<'a> State<'a> { } } +fn repeat(s: &str, n: uint) -> String { iter::repeat(s).take(n).collect() } + #[cfg(test)] mod test { use super::*; diff --git a/src/libsyntax/std_inject.rs b/src/libsyntax/std_inject.rs index e98be046586ea..e1c8ff5011b26 100644 --- a/src/libsyntax/std_inject.rs +++ b/src/libsyntax/std_inject.rs @@ -40,7 +40,7 @@ pub fn maybe_inject_prelude(krate: ast::Crate) -> ast::Crate { } fn use_std(krate: &ast::Crate) -> bool { - !attr::contains_name(krate.attrs.as_slice(), "no_std") + !attr::contains_name(krate.attrs[], "no_std") } fn no_prelude(attrs: &[ast::Attribute]) -> bool { @@ -56,7 +56,7 @@ impl<'a> fold::Folder for StandardLibraryInjector<'a> { // The name to use in `extern crate "name" as std;` let actual_crate_name = match self.alt_std_name { - Some(ref s) => token::intern_and_get_ident(s.as_slice()), + Some(ref s) => token::intern_and_get_ident(s[]), None => token::intern_and_get_ident("std"), }; @@ -118,7 +118,7 @@ impl<'a> fold::Folder for PreludeInjector<'a> { attr::mark_used(&no_std_attr); krate.attrs.push(no_std_attr); - if !no_prelude(krate.attrs.as_slice()) { + if !no_prelude(krate.attrs[]) { // only add `use std::prelude::*;` if there wasn't a // `#![no_implicit_prelude]` at the crate level. // fold_mod() will insert glob path. @@ -138,7 +138,7 @@ impl<'a> fold::Folder for PreludeInjector<'a> { } fn fold_item(&mut self, item: P) -> SmallVector> { - if !no_prelude(item.attrs.as_slice()) { + if !no_prelude(item.attrs[]) { // only recur if there wasn't `#![no_implicit_prelude]` // on this item, i.e. this means that the prelude is not // implicitly imported though the whole subtree diff --git a/src/libsyntax/test.rs b/src/libsyntax/test.rs index 155cabb153cfa..bc7dda8c44acc 100644 --- a/src/libsyntax/test.rs +++ b/src/libsyntax/test.rs @@ -73,14 +73,14 @@ pub fn modify_for_testing(sess: &ParseSess, // We generate the test harness when building in the 'test' // configuration, either with the '--test' or '--cfg test' // command line options. - let should_test = attr::contains_name(krate.config.as_slice(), "test"); + let should_test = attr::contains_name(krate.config[], "test"); // Check for #[reexport_test_harness_main = "some_name"] which // creates a `use some_name = __test::main;`. This needs to be // unconditional, so that the attribute is still marked as used in // non-test builds. let reexport_test_harness_main = - attr::first_attr_value_str_by_name(krate.attrs.as_slice(), + attr::first_attr_value_str_by_name(krate.attrs[], "reexport_test_harness_main"); if should_test { @@ -119,7 +119,7 @@ impl<'a> fold::Folder for TestHarnessGenerator<'a> { self.cx.path.push(ident); } debug!("current path: {}", - ast_util::path_name_i(self.cx.path.as_slice())); + ast_util::path_name_i(self.cx.path[])); if is_test_fn(&self.cx, &*i) || is_bench_fn(&self.cx, &*i) { match i.node { @@ -277,8 +277,8 @@ fn strip_test_functions(krate: ast::Crate) -> ast::Crate { // When not compiling with --test we should not compile the // #[test] functions config::strip_items(krate, |attrs| { - !attr::contains_name(attrs.as_slice(), "test") && - !attr::contains_name(attrs.as_slice(), "bench") + !attr::contains_name(attrs[], "test") && + !attr::contains_name(attrs[], "bench") }) } @@ -291,7 +291,7 @@ enum HasTestSignature { fn is_test_fn(cx: &TestCtxt, i: &ast::Item) -> bool { - let has_test_attr = attr::contains_name(i.attrs.as_slice(), "test"); + let has_test_attr = attr::contains_name(i.attrs[], "test"); fn has_test_signature(i: &ast::Item) -> HasTestSignature { match &i.node { @@ -329,7 +329,7 @@ fn is_test_fn(cx: &TestCtxt, i: &ast::Item) -> bool { } fn is_bench_fn(cx: &TestCtxt, i: &ast::Item) -> bool { - let has_bench_attr = attr::contains_name(i.attrs.as_slice(), "bench"); + let has_bench_attr = attr::contains_name(i.attrs[], "bench"); fn has_test_signature(i: &ast::Item) -> bool { match i.node { @@ -384,7 +384,7 @@ We're going to be building a module that looks more or less like: mod __test { extern crate test (name = "test", vers = "..."); fn main() { - test::test_main_static(::os::args().as_slice(), tests) + test::test_main_static(::os::args()[], tests) } static tests : &'static [test::TestDescAndFn] = &[ @@ -510,8 +510,8 @@ fn mk_tests(cx: &TestCtxt) -> P { } fn is_test_crate(krate: &ast::Crate) -> bool { - match attr::find_crate_name(krate.attrs.as_slice()) { - Some(ref s) if "test" == s.get().as_slice() => true, + match attr::find_crate_name(krate.attrs[]) { + Some(ref s) if "test" == s.get()[] => true, _ => false } } @@ -551,11 +551,11 @@ fn mk_test_desc_and_fn_rec(cx: &TestCtxt, test: &Test) -> P { // creates $name: $expr let field = |name, expr| ecx.field_imm(span, ecx.ident_of(name), expr); - debug!("encoding {}", ast_util::path_name_i(path.as_slice())); + debug!("encoding {}", ast_util::path_name_i(path[])); // path to the #[test] function: "foo::bar::baz" - let path_string = ast_util::path_name_i(path.as_slice()); - let name_expr = ecx.expr_str(span, token::intern_and_get_ident(path_string.as_slice())); + let path_string = ast_util::path_name_i(path[]); + let name_expr = ecx.expr_str(span, token::intern_and_get_ident(path_string[])); // self::test::StaticTestName($name_expr) let name_expr = ecx.expr_call(span, diff --git a/src/libsyntax/util/interner.rs b/src/libsyntax/util/interner.rs index 590a04ce2210f..97eb43165833a 100644 --- a/src/libsyntax/util/interner.rs +++ b/src/libsyntax/util/interner.rs @@ -95,41 +95,37 @@ pub struct RcStr { string: Rc, } +impl RcStr { + pub fn new(string: &str) -> RcStr { + RcStr { + string: Rc::new(string.to_string()), + } + } +} + impl Eq for RcStr {} impl Ord for RcStr { fn cmp(&self, other: &RcStr) -> Ordering { - self.as_slice().cmp(other.as_slice()) - } -} - -impl Str for RcStr { - #[inline] - fn as_slice<'a>(&'a self) -> &'a str { - let s: &'a str = self.string.as_slice(); - s + self[].cmp(other[]) } } impl fmt::Show for RcStr { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { use std::fmt::Show; - self.as_slice().fmt(f) + self[].fmt(f) } } impl BorrowFrom for str { fn borrow_from(owned: &RcStr) -> &str { - owned.string.as_slice() + owned.string[] } } -impl RcStr { - pub fn new(string: &str) -> RcStr { - RcStr { - string: Rc::new(string.into_string()), - } - } +impl Deref for RcStr { + fn deref(&self) -> &str { self.string[] } } /// A StrInterner differs from Interner in that it accepts diff --git a/src/libsyntax/util/small_vector.rs b/src/libsyntax/util/small_vector.rs index 8d050e34abf4c..946181770c8e9 100644 --- a/src/libsyntax/util/small_vector.rs +++ b/src/libsyntax/util/small_vector.rs @@ -8,7 +8,7 @@ // option. This file may not be copied, modified, or distributed // except according to those terms. use self::SmallVectorRepr::*; -use self::MoveItemsRepr::*; +use self::IntoIterRepr::*; use std::mem; use std::slice; @@ -111,17 +111,17 @@ impl SmallVector { /// Deprecated: use `into_iter`. #[deprecated = "use into_iter"] - pub fn move_iter(self) -> MoveItems { + pub fn move_iter(self) -> IntoIter { self.into_iter() } - pub fn into_iter(self) -> MoveItems { + pub fn into_iter(self) -> IntoIter { let repr = match self.repr { Zero => ZeroIterator, One(v) => OneIterator(v), Many(vs) => ManyIterator(vs.into_iter()) }; - MoveItems { repr: repr } + IntoIter { repr: repr } } pub fn len(&self) -> uint { @@ -135,17 +135,17 @@ impl SmallVector { pub fn is_empty(&self) -> bool { self.len() == 0 } } -pub struct MoveItems { - repr: MoveItemsRepr, +pub struct IntoIter { + repr: IntoIterRepr, } -enum MoveItemsRepr { +enum IntoIterRepr { ZeroIterator, OneIterator(T), - ManyIterator(vec::MoveItems), + ManyIterator(vec::IntoIter), } -impl Iterator for MoveItems { +impl Iterator for IntoIter { fn next(&mut self) -> Option { match self.repr { ZeroIterator => None, diff --git a/src/libsyntax/visit.rs b/src/libsyntax/visit.rs index b89e9a59349ce..9938feb171eb0 100644 --- a/src/libsyntax/visit.rs +++ b/src/libsyntax/visit.rs @@ -583,13 +583,21 @@ pub fn walk_generics<'v, V: Visitor<'v>>(visitor: &mut V, generics: &'v Generics walk_lifetime_decls_helper(visitor, &generics.lifetimes); for predicate in generics.where_clause.predicates.iter() { match predicate { - &ast::WherePredicate::BoundPredicate(ast::WhereBoundPredicate{span, - ident, + &ast::WherePredicate::BoundPredicate(ast::WhereBoundPredicate{ref bounded_ty, ref bounds, ..}) => { - visitor.visit_ident(span, ident); + visitor.visit_ty(&**bounded_ty); walk_ty_param_bounds_helper(visitor, bounds); } + &ast::WherePredicate::RegionPredicate(ast::WhereRegionPredicate{ref lifetime, + ref bounds, + ..}) => { + visitor.visit_lifetime_ref(lifetime); + + for bound in bounds.iter() { + visitor.visit_lifetime_ref(bound); + } + } &ast::WherePredicate::EqPredicate(ast::WhereEqPredicate{id, ref path, ref ty, diff --git a/src/libterm/terminfo/mod.rs b/src/libterm/terminfo/mod.rs index 65f8415835a36..d944d0362fbe5 100644 --- a/src/libterm/terminfo/mod.rs +++ b/src/libterm/terminfo/mod.rs @@ -180,7 +180,7 @@ impl TerminfoTerminal { } }; - let entry = open(term.as_slice()); + let entry = open(term[]); if entry.is_err() { if os::getenv("MSYSCON").map_or(false, |s| { "mintty.exe" == s diff --git a/src/libterm/terminfo/searcher.rs b/src/libterm/terminfo/searcher.rs index 33bfd69f71bb8..395fac52d8dac 100644 --- a/src/libterm/terminfo/searcher.rs +++ b/src/libterm/terminfo/searcher.rs @@ -61,13 +61,13 @@ pub fn get_dbpath_for_term(term: &str) -> Option> { for p in dirs_to_search.iter() { if p.exists() { let f = first_char.to_string(); - let newp = p.join_many(&[f.as_slice(), term]); + let newp = p.join_many(&[f[], term]); if newp.exists() { return Some(box newp); } // on some installations the dir is named after the hex of the char (e.g. OS X) let f = format!("{:x}", first_char as uint); - let newp = p.join_many(&[f.as_slice(), term]); + let newp = p.join_many(&[f[], term]); if newp.exists() { return Some(box newp); } diff --git a/src/libtest/lib.rs b/src/libtest/lib.rs index 5b04a1fed896c..4b73fe04c859d 100644 --- a/src/libtest/lib.rs +++ b/src/libtest/lib.rs @@ -37,6 +37,7 @@ extern crate getopts; extern crate regex; extern crate serialize; +extern crate "serialize" as rustc_serialize; extern crate term; pub use self::TestFn::*; @@ -65,6 +66,7 @@ use std::io::fs::PathExtensions; use std::io::stdio::StdWriter; use std::io::{File, ChanReader, ChanWriter}; use std::io; +use std::iter::repeat; use std::num::{Float, FloatMath, Int}; use std::os; use std::str::FromStr; @@ -121,7 +123,7 @@ impl TestDesc { fn padded_name(&self, column_count: uint, align: NamePadding) -> String { let mut name = String::from_str(self.name.as_slice()); let fill = column_count.saturating_sub(name.len()); - let mut pad = " ".repeat(fill); + let mut pad = repeat(" ").take(fill).collect::(); match align { PadNone => name, PadOnLeft => { @@ -213,7 +215,7 @@ pub struct TestDescAndFn { pub testfn: TestFn, } -#[deriving(Clone, Copy, Encodable, Decodable, PartialEq, Show)] +#[deriving(Clone, RustcEncodable, RustcDecodable, PartialEq, Show, Copy)] pub struct Metric { value: f64, noise: f64 @@ -426,7 +428,7 @@ pub fn parse_opts(args: &[String]) -> Option { let ratchet_noise_percent = matches.opt_str("ratchet-noise-percent"); let ratchet_noise_percent = - ratchet_noise_percent.map(|s| from_str::(s.as_slice()).unwrap()); + ratchet_noise_percent.map(|s| s.as_slice().parse::().unwrap()); let save_metrics = matches.opt_str("save-metrics"); let save_metrics = save_metrics.map(|s| Path::new(s)); @@ -489,7 +491,8 @@ pub fn opt_shard(maybestr: Option) -> Option<(uint,uint)> { None => None, Some(s) => { let mut it = s.split('.'); - match (it.next().and_then(from_str::), it.next().and_then(from_str::), + match (it.next().and_then(|s| s.parse::()), + it.next().and_then(|s| s.parse::()), it.next()) { (Some(a), Some(b), None) => { if a <= 0 || a > b { diff --git a/src/libtest/stats.rs b/src/libtest/stats.rs index 7441b39f35b23..41146cded704c 100644 --- a/src/libtest/stats.rs +++ b/src/libtest/stats.rs @@ -11,7 +11,7 @@ #![allow(missing_docs)] use std::collections::hash_map; -use std::collections::hash_map::{Occupied, Vacant}; +use std::collections::hash_map::Entry::{Occupied, Vacant}; use std::fmt::Show; use std::hash::Hash; use std::io; diff --git a/src/libtime/lib.rs b/src/libtime/lib.rs index 1b7f5cdc4af51..e58a0229d6962 100644 --- a/src/libtime/lib.rs +++ b/src/libtime/lib.rs @@ -24,7 +24,9 @@ #[cfg(test)] #[phase(plugin, link)] extern crate log; +#[cfg(stage0)] extern crate serialize; +extern crate "serialize" as rustc_serialize; extern crate libc; pub use self::ParseError::*; @@ -76,7 +78,8 @@ mod imp { } /// A record specifying a time value in seconds and nanoseconds. -#[deriving(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Encodable, Decodable, Show)] +#[deriving(Clone, PartialEq, Eq, PartialOrd, Ord, RustcEncodable, + RustcDecodable, Show, Copy)] pub struct Timespec { pub sec: i64, pub nsec: i32, @@ -97,29 +100,6 @@ impl Timespec { } } -// NOTE(stage0): Remove impl after a snapshot -#[cfg(stage0)] -impl Add for Timespec { - fn add(&self, other: &Duration) -> Timespec { - let d_sec = other.num_seconds(); - // It is safe to unwrap the nanoseconds, because there cannot be - // more than one second left, which fits in i64 and in i32. - let d_nsec = (*other - Duration::seconds(d_sec)) - .num_nanoseconds().unwrap() as i32; - let mut sec = self.sec + d_sec; - let mut nsec = self.nsec + d_nsec; - if nsec >= NSEC_PER_SEC { - nsec -= NSEC_PER_SEC; - sec += 1; - } else if nsec < 0 { - nsec += NSEC_PER_SEC; - sec -= 1; - } - Timespec::new(sec, nsec) - } -} - -#[cfg(not(stage0))] // NOTE(stage0): Remove cfg after a snapshot impl Add for Timespec { fn add(self, other: Duration) -> Timespec { let d_sec = other.num_seconds(); @@ -140,17 +120,6 @@ impl Add for Timespec { } } -// NOTE(stage0): Remove impl after a snapshot -#[cfg(stage0)] -impl Sub for Timespec { - fn sub(&self, other: &Timespec) -> Duration { - let sec = self.sec - other.sec; - let nsec = self.nsec - other.nsec; - Duration::seconds(sec) + Duration::nanoseconds(nsec as i64) - } -} - -#[cfg(not(stage0))] // NOTE(stage0): Remove cfg after a snapshot impl Sub for Timespec { fn sub(self, other: Timespec) -> Duration { let sec = self.sec - other.sec; diff --git a/src/libunicode/lib.rs b/src/libunicode/lib.rs index 1f75daa7bdecd..d33362ec23295 100644 --- a/src/libunicode/lib.rs +++ b/src/libunicode/lib.rs @@ -28,8 +28,7 @@ html_root_url = "http://doc.rust-lang.org/nightly/", html_playground_url = "http://play.rust-lang.org/")] #![no_std] -#![feature(globs)] -#![feature(unboxed_closures)] +#![feature(globs, macro_rules, slicing_syntax, unboxed_closures)] extern crate core; @@ -74,11 +73,14 @@ pub mod char { } pub mod str { - pub use u_str::{UnicodeStrPrelude, Words, Graphemes, GraphemeIndices}; + pub use u_str::{UnicodeStr, Words, Graphemes, GraphemeIndices}; + pub use u_str::{utf8_char_width, is_utf16, Utf16Items, Utf16Item}; + pub use u_str::{utf16_items, Utf16Encoder}; } -// this lets us use #[deriving(Clone)] +// this lets us use #[deriving(..)] mod std { pub use core::clone; pub use core::cmp; + pub use core::fmt; } diff --git a/src/libunicode/u_str.rs b/src/libunicode/u_str.rs index 5e98109c432aa..a3d4dd057d002 100644 --- a/src/libunicode/u_str.rs +++ b/src/libunicode/u_str.rs @@ -16,120 +16,41 @@ //! UnicodeChar trait. use self::GraphemeState::*; +use core::prelude::*; + +use core::char; use core::cmp; -use core::slice::SliceExt; -use core::iter::{Filter, AdditiveIterator, Iterator, IteratorExt}; -use core::iter::{DoubleEndedIterator, DoubleEndedIteratorExt}; -use core::kinds::Sized; -use core::option::Option; -use core::option::Option::{None, Some}; -use core::str::{CharSplits, StrPrelude}; +use core::iter::{Filter, AdditiveIterator}; +use core::mem; +use core::num::Int; +use core::slice; +use core::str::CharSplits; + use u_char::UnicodeChar; use tables::grapheme::GraphemeCat; /// An iterator over the words of a string, separated by a sequence of whitespace /// FIXME: This should be opaque -pub type Words<'a> = Filter<&'a str, CharSplits<'a, fn(char) -> bool>, fn(&&str) -> bool>; +#[stable] +pub struct Words<'a> { + inner: Filter<&'a str, CharSplits<'a, fn(char) -> bool>, fn(&&str) -> bool>, +} /// Methods for Unicode string slices -pub trait UnicodeStrPrelude for Sized? { - /// Returns an iterator over the - /// [grapheme clusters](http://www.unicode.org/reports/tr29/#Grapheme_Cluster_Boundaries) - /// of the string. - /// - /// If `is_extended` is true, the iterator is over the *extended grapheme clusters*; - /// otherwise, the iterator is over the *legacy grapheme clusters*. - /// [UAX#29](http://www.unicode.org/reports/tr29/#Grapheme_Cluster_Boundaries) - /// recommends extended grapheme cluster boundaries for general processing. - /// - /// # Example - /// - /// ```rust - /// let gr1 = "a\u{0310}e\u{0301}o\u{0308}\u{0332}".graphemes(true).collect::>(); - /// let b: &[_] = &["a\u{0310}", "e\u{0301}", "o\u{0308}\u{0332}"]; - /// assert_eq!(gr1.as_slice(), b); - /// let gr2 = "a\r\nb🇷🇺🇸🇹".graphemes(true).collect::>(); - /// let b: &[_] = &["a", "\r\n", "b", "🇷🇺🇸🇹"]; - /// assert_eq!(gr2.as_slice(), b); - /// ``` +#[allow(missing_docs)] // docs in libcollections +pub trait UnicodeStr for Sized? { fn graphemes<'a>(&'a self, is_extended: bool) -> Graphemes<'a>; - - /// Returns an iterator over the grapheme clusters of self and their byte offsets. - /// See `graphemes()` method for more information. - /// - /// # Example - /// - /// ```rust - /// let gr_inds = "a̐éö̲\r\n".grapheme_indices(true).collect::>(); - /// let b: &[_] = &[(0u, "a̐"), (3, "é"), (6, "ö̲"), (11, "\r\n")]; - /// assert_eq!(gr_inds.as_slice(), b); - /// ``` fn grapheme_indices<'a>(&'a self, is_extended: bool) -> GraphemeIndices<'a>; - - /// An iterator over the words of a string (subsequences separated - /// by any sequence of whitespace). Sequences of whitespace are - /// collapsed, so empty "words" are not included. - /// - /// # Example - /// - /// ```rust - /// let some_words = " Mary had\ta little \n\t lamb"; - /// let v: Vec<&str> = some_words.words().collect(); - /// assert_eq!(v, vec!["Mary", "had", "a", "little", "lamb"]); - /// ``` fn words<'a>(&'a self) -> Words<'a>; - - /// Returns true if the string contains only whitespace. - /// - /// Whitespace characters are determined by `char::is_whitespace`. - /// - /// # Example - /// - /// ```rust - /// assert!(" \t\n".is_whitespace()); - /// assert!("".is_whitespace()); - /// - /// assert!( !"abc".is_whitespace()); - /// ``` fn is_whitespace(&self) -> bool; - - /// Returns true if the string contains only alphanumeric code - /// points. - /// - /// Alphanumeric characters are determined by `char::is_alphanumeric`. - /// - /// # Example - /// - /// ```rust - /// assert!("Löwe老虎Léopard123".is_alphanumeric()); - /// assert!("".is_alphanumeric()); - /// - /// assert!( !" &*~".is_alphanumeric()); - /// ``` fn is_alphanumeric(&self) -> bool; - - /// Returns a string's displayed width in columns, treating control - /// characters as zero-width. - /// - /// `is_cjk` determines behavior for characters in the Ambiguous category: - /// if `is_cjk` is `true`, these are 2 columns wide; otherwise, they are 1. - /// In CJK locales, `is_cjk` should be `true`, else it should be `false`. - /// [Unicode Standard Annex #11](http://www.unicode.org/reports/tr11/) - /// recommends that these characters be treated as 1 column (i.e., - /// `is_cjk` = `false`) if the locale is unknown. fn width(&self, is_cjk: bool) -> uint; - - /// Returns a string with leading and trailing whitespace removed. fn trim<'a>(&'a self) -> &'a str; - - /// Returns a string with leading whitespace removed. fn trim_left<'a>(&'a self) -> &'a str; - - /// Returns a string with trailing whitespace removed. fn trim_right<'a>(&'a self) -> &'a str; } -impl UnicodeStrPrelude for str { +impl UnicodeStr for str { #[inline] fn graphemes(&self, is_extended: bool) -> Graphemes { Graphemes { string: self, extended: is_extended, cat: None, catb: None } @@ -143,9 +64,12 @@ impl UnicodeStrPrelude for str { #[inline] fn words(&self) -> Words { fn is_not_empty(s: &&str) -> bool { !s.is_empty() } + let is_not_empty: fn(&&str) -> bool = is_not_empty; // coerce to fn pointer + fn is_whitespace(c: char) -> bool { c.is_whitespace() } + let is_whitespace: fn(char) -> bool = is_whitespace; // coerce to fn pointer - self.split(is_whitespace).filter(is_not_empty) + Words { inner: self.split(is_whitespace).filter(is_not_empty) } } #[inline] @@ -428,3 +352,195 @@ impl<'a> DoubleEndedIterator<&'a str> for Graphemes<'a> { Some(retstr) } } + +// https://tools.ietf.org/html/rfc3629 +static UTF8_CHAR_WIDTH: [u8, ..256] = [ +1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1, +1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1, // 0x1F +1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1, +1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1, // 0x3F +1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1, +1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1, // 0x5F +1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1, +1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1, // 0x7F +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, // 0x9F +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, // 0xBF +0,0,2,2,2,2,2,2,2,2,2,2,2,2,2,2, +2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2, // 0xDF +3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3, // 0xEF +4,4,4,4,4,0,0,0,0,0,0,0,0,0,0,0, // 0xFF +]; + +/// Given a first byte, determine how many bytes are in this UTF-8 character +#[inline] +pub fn utf8_char_width(b: u8) -> uint { + return UTF8_CHAR_WIDTH[b as uint] as uint; +} + +/// Determines if a vector of `u16` contains valid UTF-16 +pub fn is_utf16(v: &[u16]) -> bool { + let mut it = v.iter(); + macro_rules! next { ($ret:expr) => { + match it.next() { Some(u) => *u, None => return $ret } + } + } + loop { + let u = next!(true); + + match char::from_u32(u as u32) { + Some(_) => {} + None => { + let u2 = next!(false); + if u < 0xD7FF || u > 0xDBFF || + u2 < 0xDC00 || u2 > 0xDFFF { return false; } + } + } + } +} + +/// An iterator that decodes UTF-16 encoded codepoints from a vector +/// of `u16`s. +#[deriving(Clone)] +pub struct Utf16Items<'a> { + iter: slice::Iter<'a, u16> +} +/// The possibilities for values decoded from a `u16` stream. +#[deriving(PartialEq, Eq, Clone, Show)] +pub enum Utf16Item { + /// A valid codepoint. + ScalarValue(char), + /// An invalid surrogate without its pair. + LoneSurrogate(u16) +} + +impl Copy for Utf16Item {} + +impl Utf16Item { + /// Convert `self` to a `char`, taking `LoneSurrogate`s to the + /// replacement character (U+FFFD). + #[inline] + pub fn to_char_lossy(&self) -> char { + match *self { + Utf16Item::ScalarValue(c) => c, + Utf16Item::LoneSurrogate(_) => '\u{FFFD}' + } + } +} + +impl<'a> Iterator for Utf16Items<'a> { + fn next(&mut self) -> Option { + let u = match self.iter.next() { + Some(u) => *u, + None => return None + }; + + if u < 0xD800 || 0xDFFF < u { + // not a surrogate + Some(Utf16Item::ScalarValue(unsafe {mem::transmute(u as u32)})) + } else if u >= 0xDC00 { + // a trailing surrogate + Some(Utf16Item::LoneSurrogate(u)) + } else { + // preserve state for rewinding. + let old = self.iter; + + let u2 = match self.iter.next() { + Some(u2) => *u2, + // eof + None => return Some(Utf16Item::LoneSurrogate(u)) + }; + if u2 < 0xDC00 || u2 > 0xDFFF { + // not a trailing surrogate so we're not a valid + // surrogate pair, so rewind to redecode u2 next time. + self.iter = old; + return Some(Utf16Item::LoneSurrogate(u)) + } + + // all ok, so lets decode it. + let c = ((u - 0xD800) as u32 << 10 | (u2 - 0xDC00) as u32) + 0x1_0000; + Some(Utf16Item::ScalarValue(unsafe {mem::transmute(c)})) + } + } + + #[inline] + fn size_hint(&self) -> (uint, Option) { + let (low, high) = self.iter.size_hint(); + // we could be entirely valid surrogates (2 elements per + // char), or entirely non-surrogates (1 element per char) + (low / 2, high) + } +} + +/// Create an iterator over the UTF-16 encoded codepoints in `v`, +/// returning invalid surrogates as `LoneSurrogate`s. +/// +/// # Example +/// +/// ```rust +/// use unicode::str::Utf16Item::{ScalarValue, LoneSurrogate}; +/// +/// // 𝄞music +/// let v = [0xD834, 0xDD1E, 0x006d, 0x0075, +/// 0x0073, 0xDD1E, 0x0069, 0x0063, +/// 0xD834]; +/// +/// assert_eq!(unicode::str::utf16_items(&v).collect::>(), +/// vec![ScalarValue('𝄞'), +/// ScalarValue('m'), ScalarValue('u'), ScalarValue('s'), +/// LoneSurrogate(0xDD1E), +/// ScalarValue('i'), ScalarValue('c'), +/// LoneSurrogate(0xD834)]); +/// ``` +pub fn utf16_items<'a>(v: &'a [u16]) -> Utf16Items<'a> { + Utf16Items { iter : v.iter() } +} + +/// Iterator adaptor for encoding `char`s to UTF-16. +#[deriving(Clone)] +pub struct Utf16Encoder { + chars: I, + extra: u16 +} + +impl Utf16Encoder { + /// Create an UTF-16 encoder from any `char` iterator. + pub fn new(chars: I) -> Utf16Encoder where I: Iterator { + Utf16Encoder { chars: chars, extra: 0 } + } +} + +impl Iterator for Utf16Encoder where I: Iterator { + #[inline] + fn next(&mut self) -> Option { + if self.extra != 0 { + let tmp = self.extra; + self.extra = 0; + return Some(tmp); + } + + let mut buf = [0u16, ..2]; + self.chars.next().map(|ch| { + let n = ch.encode_utf16(buf[mut]).unwrap_or(0); + if n == 2 { self.extra = buf[1]; } + buf[0] + }) + } + + #[inline] + fn size_hint(&self) -> (uint, Option) { + let (low, high) = self.chars.size_hint(); + // every char gets either one u16 or two u16, + // so this iterator is between 1 or 2 times as + // long as the underlying iterator. + (low, high.and_then(|n| n.checked_mul(2))) + } +} + +impl<'a> Iterator<&'a str> for Words<'a> { + fn next(&mut self) -> Option<&'a str> { self.inner.next() } +} +impl<'a> DoubleEndedIterator<&'a str> for Words<'a> { + fn next_back(&mut self) -> Option<&'a str> { self.inner.next_back() } +} diff --git a/src/rt/arch/arm/_context.S b/src/rt/arch/arm/_context.S deleted file mode 100644 index 38fc4827f5861..0000000000000 --- a/src/rt/arch/arm/_context.S +++ /dev/null @@ -1,69 +0,0 @@ -// Mark stack as non-executable -#if defined(__linux__) && defined(__ELF__) -.section .note.GNU-stack, "", %progbits -#endif - -.text -.code 32 -.arm -#if defined(__APPLE__) -.align 2 -#else -.align -#endif - -#if defined(__APPLE__) - #define SWAP_REGISTERS _rust_swap_registers - #define BOOTSTRAP_TASK _rust_bootstrap_green_task -#else - #define SWAP_REGISTERS rust_swap_registers - #define BOOTSTRAP_TASK rust_bootstrap_green_task -#endif - -.globl SWAP_REGISTERS -SWAP_REGISTERS: - str r0, [r0, #0] - str r3, [r0, #12] - str r4, [r0, #16] - str r5, [r0, #20] - str r6, [r0, #24] - str r7, [r0, #28] - str r8, [r0, #32] - str r9, [r0, #36] - str r10, [r0, #40] - str r11, [r0, #44] - str r12, [r0, #48] - str sp, [r0, #52] - str lr, [r0, #56] - - mrs r2, cpsr - str r2, [r0, #64] - - - ldr r0, [r1, #0] - ldr r3, [r1, #12] - ldr r4, [r1, #16] - ldr r5, [r1, #20] - ldr r6, [r1, #24] - ldr r7, [r1, #28] - ldr r8, [r1, #32] - ldr r9, [r1, #36] - ldr r10, [r1, #40] - ldr r11, [r1, #44] - ldr r12, [r1, #48] - - ldr sp, [r1, #52] - ldr lr, [r1, #56] - - ldr r2, [r1, #64] - msr cpsr_cxsf, r2 - - mov pc, lr - -// For reasons of this existence, see the comments in x86_64/_context.S -.globl BOOTSTRAP_TASK -BOOTSTRAP_TASK: - mov r0, r0 - mov r1, r3 - mov r2, r4 - mov pc, r5 diff --git a/src/rt/arch/i386/_context.S b/src/rt/arch/i386/_context.S deleted file mode 100644 index 6b79a82e4acaf..0000000000000 --- a/src/rt/arch/i386/_context.S +++ /dev/null @@ -1,65 +0,0 @@ -// Mark stack as non-executable -#if defined(__linux__) && defined(__ELF__) -.section .note.GNU-stack, "", @progbits -#endif - - .text - -/* -Callee save registers: - ebp, ebx, esi, edi - -Caller save registers: - eax, ecx, edx -*/ - -/* -Saves a set of registers. This is used by our implementation of -getcontext. - -The registers_t variable is in (%esp) -*/ - -#if defined(__APPLE__) || defined(_WIN32) -#define SWAP_REGISTERS _rust_swap_registers -#else -#define SWAP_REGISTERS rust_swap_registers -#endif - -// swap_registers(registers_t *oregs, registers_t *regs) -.globl SWAP_REGISTERS -SWAP_REGISTERS: - // save the old context - movl 4(%esp), %eax - movl %ebx, 4(%eax) - movl %ebp, 16(%eax) - movl %esi, 20(%eax) - movl %edi, 24(%eax) - - // save the flags - pushf - popl %ecx - movl %ecx, 44(%eax) - - // save the return address as the instruction pointer - // and save the stack pointer of the caller - popl %ecx - movl %esp, 28(%eax) - movl %ecx, 48(%eax) - - // restore the new context - movl 4(%esp), %eax - - movl 4(%eax), %ebx - movl 16(%eax), %ebp - movl 20(%eax), %esi - movl 24(%eax), %edi - movl 28(%eax), %esp - - // restore the flags - movl 44(%eax), %ecx - push %ecx - popf - - // Return! - jmp *48(%eax) diff --git a/src/rt/arch/mips/_context.S b/src/rt/arch/mips/_context.S deleted file mode 100644 index cfe77cc30456b..0000000000000 --- a/src/rt/arch/mips/_context.S +++ /dev/null @@ -1,88 +0,0 @@ -// Mark stack as non-executable -#if defined(__linux__) && defined(__ELF__) -.section .note.GNU-stack, "", @progbits -#endif - -.text -.globl rust_swap_registers -.align 2 -.set nomips16 -.ent rust_swap_registers -rust_swap_registers: - .set noreorder - .set nomacro - .set noat - sw $1, 1 * 4($4) - sw $2, 2 * 4($4) - sw $3, 3 * 4($4) - sw $4, 4 * 4($4) - sw $5, 5 * 4($4) - sw $6, 6 * 4($4) - sw $7, 7 * 4($4) - - sw $8, 8 * 4($4) - sw $9, 9 * 4($4) - sw $10, 10 * 4($4) - sw $11, 11 * 4($4) - sw $12, 12 * 4($4) - sw $13, 13 * 4($4) - sw $14, 14 * 4($4) - sw $15, 15 * 4($4) - - sw $16, 16 * 4($4) - sw $17, 17 * 4($4) - sw $18, 18 * 4($4) - sw $19, 19 * 4($4) - sw $20, 20 * 4($4) - sw $21, 21 * 4($4) - sw $22, 22 * 4($4) - sw $23, 23 * 4($4) - - sw $24, 24 * 4($4) - sw $25, 25 * 4($4) - sw $26, 26 * 4($4) - sw $27, 27 * 4($4) - sw $28, 28 * 4($4) - sw $29, 29 * 4($4) - sw $30, 30 * 4($4) - sw $31, 31 * 4($4) - - lw $1, 1 * 4($5) - lw $2, 2 * 4($5) - lw $3, 3 * 4($5) - lw $4, 4 * 4($5) - lw $6, 6 * 4($5) - lw $7, 7 * 4($5) - - lw $8, 8 * 4($5) - lw $9, 9 * 4($5) - lw $10, 10 * 4($5) - lw $11, 11 * 4($5) - lw $12, 12 * 4($5) - lw $13, 13 * 4($5) - lw $14, 14 * 4($5) - lw $15, 15 * 4($5) - - lw $16, 16 * 4($5) - lw $17, 17 * 4($5) - lw $18, 18 * 4($5) - lw $19, 19 * 4($5) - lw $20, 20 * 4($5) - lw $21, 21 * 4($5) - lw $22, 22 * 4($5) - lw $23, 23 * 4($5) - - lw $24, 24 * 4($5) - lw $25, 25 * 4($5) - lw $26, 26 * 4($5) - lw $27, 27 * 4($5) - lw $28, 28 * 4($5) - lw $29, 29 * 4($5) - lw $30, 30 * 4($5) - lw $31, 31 * 4($5) - - lw $5, 5 * 4($5) - - jr $31 - nop -.end rust_swap_registers diff --git a/src/rt/arch/mipsel/_context.S b/src/rt/arch/mipsel/_context.S deleted file mode 100644 index cfe77cc30456b..0000000000000 --- a/src/rt/arch/mipsel/_context.S +++ /dev/null @@ -1,88 +0,0 @@ -// Mark stack as non-executable -#if defined(__linux__) && defined(__ELF__) -.section .note.GNU-stack, "", @progbits -#endif - -.text -.globl rust_swap_registers -.align 2 -.set nomips16 -.ent rust_swap_registers -rust_swap_registers: - .set noreorder - .set nomacro - .set noat - sw $1, 1 * 4($4) - sw $2, 2 * 4($4) - sw $3, 3 * 4($4) - sw $4, 4 * 4($4) - sw $5, 5 * 4($4) - sw $6, 6 * 4($4) - sw $7, 7 * 4($4) - - sw $8, 8 * 4($4) - sw $9, 9 * 4($4) - sw $10, 10 * 4($4) - sw $11, 11 * 4($4) - sw $12, 12 * 4($4) - sw $13, 13 * 4($4) - sw $14, 14 * 4($4) - sw $15, 15 * 4($4) - - sw $16, 16 * 4($4) - sw $17, 17 * 4($4) - sw $18, 18 * 4($4) - sw $19, 19 * 4($4) - sw $20, 20 * 4($4) - sw $21, 21 * 4($4) - sw $22, 22 * 4($4) - sw $23, 23 * 4($4) - - sw $24, 24 * 4($4) - sw $25, 25 * 4($4) - sw $26, 26 * 4($4) - sw $27, 27 * 4($4) - sw $28, 28 * 4($4) - sw $29, 29 * 4($4) - sw $30, 30 * 4($4) - sw $31, 31 * 4($4) - - lw $1, 1 * 4($5) - lw $2, 2 * 4($5) - lw $3, 3 * 4($5) - lw $4, 4 * 4($5) - lw $6, 6 * 4($5) - lw $7, 7 * 4($5) - - lw $8, 8 * 4($5) - lw $9, 9 * 4($5) - lw $10, 10 * 4($5) - lw $11, 11 * 4($5) - lw $12, 12 * 4($5) - lw $13, 13 * 4($5) - lw $14, 14 * 4($5) - lw $15, 15 * 4($5) - - lw $16, 16 * 4($5) - lw $17, 17 * 4($5) - lw $18, 18 * 4($5) - lw $19, 19 * 4($5) - lw $20, 20 * 4($5) - lw $21, 21 * 4($5) - lw $22, 22 * 4($5) - lw $23, 23 * 4($5) - - lw $24, 24 * 4($5) - lw $25, 25 * 4($5) - lw $26, 26 * 4($5) - lw $27, 27 * 4($5) - lw $28, 28 * 4($5) - lw $29, 29 * 4($5) - lw $30, 30 * 4($5) - lw $31, 31 * 4($5) - - lw $5, 5 * 4($5) - - jr $31 - nop -.end rust_swap_registers diff --git a/src/rt/arch/x86_64/_context.S b/src/rt/arch/x86_64/_context.S deleted file mode 100644 index 36caf7720c40c..0000000000000 --- a/src/rt/arch/x86_64/_context.S +++ /dev/null @@ -1,192 +0,0 @@ -// Mark stack as non-executable -#if defined(__linux__) && defined(__ELF__) -.section .note.GNU-stack, "", @progbits -#endif - -#include "regs.h" -#define ARG0 RUSTRT_ARG0_S -#define ARG1 RUSTRT_ARG1_S - - .text - -/* -According to ABI documentation found at -http://www.x86-64.org/documentation.html -and Microsoft discussion at -http://msdn.microsoft.com/en-US/library/9z1stfyw%28v=VS.80%29.aspx. - -BOTH CALLING CONVENTIONS - -Callee save registers: - R12--R15, RDI, RSI, RBX, RBP, RSP - XMM0--XMM5 - -Caller save registers: - RAX, RCX, RDX, R8--R11 - XMM6--XMM15 - Floating point stack - -MAC/AMD CALLING CONVENTIONS - -Integer arguments go in registers: - rdi, rsi, rdx, rcx, r8, r9 - -User flags have no specified role and are not preserved - across calls, with the exception of DF in %rFLAGS, - which must be clear (set to "forward" direction) - on function entry and return. - -MICROSOFT CALLING CONVENTIONS - -Return value: RAX - -First four arguments: - RCX, RDX, R8, R9 - XMM0, XMM1, XMM2, XMM3 -*/ - -/* - Stores current registers into arg0/RCX and restores - registers found in arg1/RDX. This is used by our - implementation of getcontext. Only saves/restores nonvolatile - registers and the register used for the first argument. - Volatile registers in general ought to be saved by the caller - anyhow. -*/ - -#if defined(__APPLE__) -#define SWAP_REGISTERS _rust_swap_registers -#else -#define SWAP_REGISTERS rust_swap_registers -#endif - -// swap_registers(registers_t *oregs, registers_t *regs) -.globl SWAP_REGISTERS -SWAP_REGISTERS: - // n.b. when we enter, the return address is at the top of - // the stack (i.e., 0(%RSP)) and the argument is in - // RUSTRT_ARG0_S. We - // simply save all NV registers into oregs. - // We then restore all NV registers from regs. This restores - // the old stack pointer, which should include the proper - // return address. We can therefore just return normally to - // jump back into the old code. - - // Save instruction pointer: - pop %rax - mov %rax, (RUSTRT_IP*8)(RUSTRT_ARG0_S) - - // Save non-volatile integer registers: - // (including RSP) - mov %rbx, (RUSTRT_RBX*8)(ARG0) - mov %rsp, (RUSTRT_RSP*8)(ARG0) - mov %rbp, (RUSTRT_RBP*8)(ARG0) - mov %r12, (RUSTRT_R12*8)(ARG0) - mov %r13, (RUSTRT_R13*8)(ARG0) - mov %r14, (RUSTRT_R14*8)(ARG0) - mov %r15, (RUSTRT_R15*8)(ARG0) - -#if defined(__MINGW32__) || defined(_WINDOWS) - mov %rdi, (RUSTRT_RDI*8)(ARG0) - mov %rsi, (RUSTRT_RSI*8)(ARG0) -#endif - - // Save 0th argument register: - mov ARG0, (RUSTRT_ARG0*8)(ARG0) - - // Save non-volatile XMM registers: -#if defined(__MINGW32__) || defined(_WINDOWS) - movapd %xmm6, (RUSTRT_XMM6*8)(ARG0) - movapd %xmm7, (RUSTRT_XMM7*8)(ARG0) - movapd %xmm8, (RUSTRT_XMM8*8)(ARG0) - movapd %xmm9, (RUSTRT_XMM9*8)(ARG0) - movapd %xmm10, (RUSTRT_XMM10*8)(ARG0) - movapd %xmm11, (RUSTRT_XMM11*8)(ARG0) - movapd %xmm12, (RUSTRT_XMM12*8)(ARG0) - movapd %xmm13, (RUSTRT_XMM13*8)(ARG0) - movapd %xmm14, (RUSTRT_XMM14*8)(ARG0) - movapd %xmm15, (RUSTRT_XMM15*8)(ARG0) -#else - movapd %xmm0, (RUSTRT_XMM0*8)(ARG0) - movapd %xmm1, (RUSTRT_XMM1*8)(ARG0) - movapd %xmm2, (RUSTRT_XMM2*8)(ARG0) - movapd %xmm3, (RUSTRT_XMM3*8)(ARG0) - movapd %xmm4, (RUSTRT_XMM4*8)(ARG0) - movapd %xmm5, (RUSTRT_XMM5*8)(ARG0) -#endif - - // Restore non-volatile integer registers: - // (including RSP) - mov (RUSTRT_RBX*8)(ARG1), %rbx - mov (RUSTRT_RSP*8)(ARG1), %rsp - mov (RUSTRT_RBP*8)(ARG1), %rbp - mov (RUSTRT_R12*8)(ARG1), %r12 - mov (RUSTRT_R13*8)(ARG1), %r13 - mov (RUSTRT_R14*8)(ARG1), %r14 - mov (RUSTRT_R15*8)(ARG1), %r15 - -#if defined(__MINGW32__) || defined(_WINDOWS) - mov (RUSTRT_RDI*8)(ARG1), %rdi - mov (RUSTRT_RSI*8)(ARG1), %rsi -#endif - - // Restore 0th argument register: - mov (RUSTRT_ARG0*8)(ARG1), ARG0 - - // Restore non-volatile XMM registers: -#if defined(__MINGW32__) || defined(_WINDOWS) - movapd (RUSTRT_XMM6*8)(ARG1), %xmm6 - movapd (RUSTRT_XMM7*8)(ARG1), %xmm7 - movapd (RUSTRT_XMM8*8)(ARG1), %xmm8 - movapd (RUSTRT_XMM9*8)(ARG1), %xmm9 - movapd (RUSTRT_XMM10*8)(ARG1), %xmm10 - movapd (RUSTRT_XMM11*8)(ARG1), %xmm11 - movapd (RUSTRT_XMM12*8)(ARG1), %xmm12 - movapd (RUSTRT_XMM13*8)(ARG1), %xmm13 - movapd (RUSTRT_XMM14*8)(ARG1), %xmm14 - movapd (RUSTRT_XMM15*8)(ARG1), %xmm15 -#else - movapd (RUSTRT_XMM0*8)(ARG1), %xmm0 - movapd (RUSTRT_XMM1*8)(ARG1), %xmm1 - movapd (RUSTRT_XMM2*8)(ARG1), %xmm2 - movapd (RUSTRT_XMM3*8)(ARG1), %xmm3 - movapd (RUSTRT_XMM4*8)(ARG1), %xmm4 - movapd (RUSTRT_XMM5*8)(ARG1), %xmm5 -#endif - - // Jump to the instruction pointer - // found in regs: - jmp *(RUSTRT_IP*8)(ARG1) - -// This function below, rust_bootstrap_green_task, is used to initialize a green -// task. This code is the very first code that is run whenever a green task -// starts. The only assumptions that this code makes is that it has a register -// context previously set up by Context::new() and some values are in some -// special registers. -// -// In theory the register context could be set up and then the context switching -// would plop us directly into some 'extern "C" fn', but not all platforms have -// the argument registers saved throughout a context switch (linux doesn't save -// rdi/rsi, the first two argument registers). Instead of modifying all context -// switches, instead the initial data for starting a green thread is shoved into -// unrelated registers (r12/13, etc) which always need to be saved on context -// switches anyway. -// -// With this strategy we get the benefit of being able to pass a fair bit of -// contextual data from the start of a green task to its init function, as well -// as not hindering any context switches. -// -// If you alter this code in any way, you likely need to update -// src/libgreen/context.rs as well. - -#if defined(__APPLE__) -#define BOOTSTRAP _rust_bootstrap_green_task -#else -#define BOOTSTRAP rust_bootstrap_green_task -#endif -.globl BOOTSTRAP -BOOTSTRAP: - mov %r12, RUSTRT_ARG0_S - mov %r13, RUSTRT_ARG1_S - mov %r14, RUSTRT_ARG2_S - jmpq *%r15 diff --git a/src/rt/arch/x86_64/regs.h b/src/rt/arch/x86_64/regs.h deleted file mode 100644 index 25160ca68a6b3..0000000000000 --- a/src/rt/arch/x86_64/regs.h +++ /dev/null @@ -1,65 +0,0 @@ -// Copyright 2012 The Rust Project Developers. See the COPYRIGHT -// file at the top-level directory of this distribution and at -// http://rust-lang.org/COPYRIGHT. -// -// Licensed under the Apache License, Version 2.0 or the MIT license -// , at your -// option. This file may not be copied, modified, or distributed -// except according to those terms. - -// This is loosely kept in sync with src/libstd/rt/context.rs - -#define RUSTRT_RBX 0 -#define RUSTRT_RSP 1 -#define RUSTRT_RBP 2 -// RCX on Windows, RDI elsewhere -#define RUSTRT_ARG0 3 -#define RUSTRT_R12 4 -#define RUSTRT_R13 5 -#define RUSTRT_R14 6 -#define RUSTRT_R15 7 -#define RUSTRT_IP 8 -#if defined(__MINGW32__) || defined(_WINDOWS) - #define RUSTRT_RDI 9 - #define RUSTRT_RSI 10 - #define RUSTRT_ST1 11 - #define RUSTRT_ST2 12 - #define RUSTRT_XMM6 14 - #define RUSTRT_XMM7 16 - #define RUSTRT_XMM8 18 - #define RUSTRT_XMM9 20 - #define RUSTRT_XMM10 22 - #define RUSTRT_XMM11 24 - #define RUSTRT_XMM12 26 - #define RUSTRT_XMM13 28 - #define RUSTRT_XMM14 30 - #define RUSTRT_XMM15 32 - #define RUSTRT_MAX 34 -#else - // Not used, just padding - #define RUSTRT_XXX 9 - #define RUSTRT_XMM0 10 - #define RUSTRT_XMM1 12 - #define RUSTRT_XMM2 14 - #define RUSTRT_XMM3 16 - #define RUSTRT_XMM4 18 - #define RUSTRT_XMM5 20 - #define RUSTRT_MAX 22 -#endif - -// ARG0 is the register in which the first argument goes. -// Naturally this depends on your operating system. -#if defined(__MINGW32__) || defined(_WINDOWS) -# define RUSTRT_ARG0_S %rcx -# define RUSTRT_ARG1_S %rdx -# define RUSTRT_ARG2_S %r8 -# define RUSTRT_ARG3_S %r9 -#else -# define RUSTRT_ARG0_S %rdi -# define RUSTRT_ARG1_S %rsi -# define RUSTRT_ARG2_S %rdx -# define RUSTRT_ARG3_S %rcx -# define RUSTRT_ARG4_S %r8 -# define RUSTRT_ARG5_S %r9 -#endif diff --git a/src/snapshots.txt b/src/snapshots.txt index 071d9f758eb1a..c3cdf4acba150 100644 --- a/src/snapshots.txt +++ b/src/snapshots.txt @@ -1,3 +1,12 @@ +S 2014-12-20 8443b09 + freebsd-x86_64 004f54dce86faeebc15abf92c8742634b53987e6 + linux-i386 3daf531aed03f5769402f2fef852377e2838db98 + linux-x86_64 4f3c8b092dd4fe159d6f25a217cf62e0e899b365 + macos-i386 2a3e647b9c400505bd49cfe56091e866c83574ca + macos-x86_64 5e730efc34d79a33f464a87686c10eace0760a2e + winnt-i386 8ea056043de82096d5ce5abc98c8c74ebac7e77d + winnt-x86_64 9804100dafae9b64a76e0ea7e1be157719dae151 + S 2014-12-15 1b97cd3 freebsd-x86_64 a5d7ff81ec04e01e64dc201c7aa2d875ebd0cbbb linux-i386 47e13c2f1d26a0d13e593e0881a80ca103aa7b2e diff --git a/src/test/auxiliary/nested_item.rs b/src/test/auxiliary/nested_item.rs index 96bae65639073..d97a2e3cda129 100644 --- a/src/test/auxiliary/nested_item.rs +++ b/src/test/auxiliary/nested_item.rs @@ -28,7 +28,7 @@ impl Foo { pub struct Parser; impl> Parser { fn in_doctype(&mut self) { - static DOCTYPEPattern: [char, ..6] = ['O', 'C', 'T', 'Y', 'P', 'E']; + static DOCTYPEPattern: [char; 6] = ['O', 'C', 'T', 'Y', 'P', 'E']; } } diff --git a/src/test/bench/noise.rs b/src/test/bench/noise.rs index 025f8467d2067..75cf864ce4961 100644 --- a/src/test/bench/noise.rs +++ b/src/test/bench/noise.rs @@ -37,20 +37,20 @@ fn gradient(orig: Vec2, grad: Vec2, p: Vec2) -> f32 { } struct Noise2DContext { - rgradients: [Vec2, ..256], - permutations: [i32, ..256], + rgradients: [Vec2; 256], + permutations: [i32; 256], } impl Noise2DContext { fn new() -> Noise2DContext { let mut rng = StdRng::new().unwrap(); - let mut rgradients = [Vec2 { x: 0.0, y: 0.0 }, ..256]; + let mut rgradients = [Vec2 { x: 0.0, y: 0.0 }; 256]; for x in rgradients.iter_mut() { *x = random_gradient(&mut rng); } - let mut permutations = [0i32, ..256]; + let mut permutations = [0i32; 256]; for (i, x) in permutations.iter_mut().enumerate() { *x = i as i32; } @@ -65,7 +65,7 @@ impl Noise2DContext { self.rgradients[(idx & 255) as uint] } - fn get_gradients(&self, x: f32, y: f32) -> ([Vec2, ..4], [Vec2, ..4]) { + fn get_gradients(&self, x: f32, y: f32) -> ([Vec2; 4], [Vec2; 4]) { let x0f = x.floor(); let y0f = y.floor(); let x1f = x0f + 1.0; @@ -102,7 +102,7 @@ impl Noise2DContext { fn main() { let symbols = [' ', '░', '▒', '▓', '█', '█']; - let mut pixels = [0f32, ..256*256]; + let mut pixels = [0f32; 256*256]; let n2d = Noise2DContext::new(); for _ in range(0u, 100) { diff --git a/src/test/bench/shootout-fannkuch-redux.rs b/src/test/bench/shootout-fannkuch-redux.rs index 4849421a3f01e..723b2b722d7e1 100644 --- a/src/test/bench/shootout-fannkuch-redux.rs +++ b/src/test/bench/shootout-fannkuch-redux.rs @@ -64,14 +64,14 @@ fn next_permutation(perm: &mut [i32], count: &mut [i32]) { } struct P { - p: [i32, .. 16], + p: [i32; 16], } impl Copy for P {} struct Perm { - cnt: [i32, .. 16], - fact: [u32, .. 16], + cnt: [i32; 16], + fact: [u32; 16], n: u32, permcount: u32, perm: P, @@ -81,21 +81,21 @@ impl Copy for Perm {} impl Perm { fn new(n: u32) -> Perm { - let mut fact = [1, .. 16]; + let mut fact = [1; 16]; for i in range(1, n as uint + 1) { fact[i] = fact[i - 1] * i as u32; } Perm { - cnt: [0, .. 16], + cnt: [0; 16], fact: fact, n: n, permcount: 0, - perm: P { p: [0, .. 16 ] } + perm: P { p: [0; 16 ] } } } fn get(&mut self, mut idx: i32) -> P { - let mut pp = [0u8, .. 16]; + let mut pp = [0u8; 16]; self.permcount = idx as u32; for (i, place) in self.perm.p.iter_mut().enumerate() { *place = i as i32 + 1; diff --git a/src/test/bench/shootout-fasta-redux.rs b/src/test/bench/shootout-fasta-redux.rs index afffbe5bed4e4..eb18cfdaed3ad 100644 --- a/src/test/bench/shootout-fasta-redux.rs +++ b/src/test/bench/shootout-fasta-redux.rs @@ -64,7 +64,7 @@ const ALU: &'static str = "GGCCGGGCGCGGTGGCTCACGCCTGTAATCCCAGCACTTTG\ const NULL_AMINO_ACID: AminoAcid = AminoAcid { c: ' ' as u8, p: 0.0 }; -static IUB: [AminoAcid, ..15] = [ +static IUB: [AminoAcid;15] = [ AminoAcid { c: 'a' as u8, p: 0.27 }, AminoAcid { c: 'c' as u8, p: 0.12 }, AminoAcid { c: 'g' as u8, p: 0.12 }, @@ -82,7 +82,7 @@ static IUB: [AminoAcid, ..15] = [ AminoAcid { c: 'Y' as u8, p: 0.02 }, ]; -static HOMO_SAPIENS: [AminoAcid, ..4] = [ +static HOMO_SAPIENS: [AminoAcid;4] = [ AminoAcid { c: 'a' as u8, p: 0.3029549426680 }, AminoAcid { c: 'c' as u8, p: 0.1979883004921 }, AminoAcid { c: 'g' as u8, p: 0.1975473066391 }, @@ -148,8 +148,8 @@ impl<'a, W: Writer> RepeatFasta<'a, W> { } } -fn make_lookup(a: &[AminoAcid]) -> [AminoAcid, ..LOOKUP_SIZE] { - let mut lookup = [ NULL_AMINO_ACID, ..LOOKUP_SIZE ]; +fn make_lookup(a: &[AminoAcid]) -> [AminoAcid;LOOKUP_SIZE] { + let mut lookup = [ NULL_AMINO_ACID;LOOKUP_SIZE ]; let mut j = 0; for (i, slot) in lookup.iter_mut().enumerate() { while a[j].p < (i as f32) { @@ -162,7 +162,7 @@ fn make_lookup(a: &[AminoAcid]) -> [AminoAcid, ..LOOKUP_SIZE] { struct RandomFasta<'a, W:'a> { seed: u32, - lookup: [AminoAcid, ..LOOKUP_SIZE], + lookup: [AminoAcid;LOOKUP_SIZE], out: &'a mut W, } @@ -193,7 +193,7 @@ impl<'a, W: Writer> RandomFasta<'a, W> { fn make(&mut self, n: uint) -> IoResult<()> { let lines = n / LINE_LEN; let chars_left = n % LINE_LEN; - let mut buf = [0, ..LINE_LEN + 1]; + let mut buf = [0;LINE_LEN + 1]; for _ in range(0, lines) { for i in range(0u, LINE_LEN) { diff --git a/src/test/bench/shootout-fasta.rs b/src/test/bench/shootout-fasta.rs index 1f0bed055211d..2de61cf3572c9 100644 --- a/src/test/bench/shootout-fasta.rs +++ b/src/test/bench/shootout-fasta.rs @@ -89,7 +89,7 @@ fn make_fasta>( -> std::io::IoResult<()> { try!(wr.write(header.as_bytes())); - let mut line = [0u8, .. LINE_LENGTH + 1]; + let mut line = [0u8; LINE_LENGTH + 1]; while n > 0 { let nb = min(LINE_LENGTH, n); for i in range(0, nb) { diff --git a/src/test/bench/shootout-k-nucleotide.rs b/src/test/bench/shootout-k-nucleotide.rs index d112fe60674da..6aa6b02857fbe 100644 --- a/src/test/bench/shootout-k-nucleotide.rs +++ b/src/test/bench/shootout-k-nucleotide.rs @@ -46,10 +46,10 @@ use std::string::String; use std::slice; use std::sync::{Arc, Future}; -static TABLE: [u8, ..4] = [ 'A' as u8, 'C' as u8, 'G' as u8, 'T' as u8 ]; +static TABLE: [u8;4] = [ 'A' as u8, 'C' as u8, 'G' as u8, 'T' as u8 ]; static TABLE_SIZE: uint = 2 << 16; -static OCCURRENCES: [&'static str, ..5] = [ +static OCCURRENCES: [&'static str;5] = [ "GGT", "GGTA", "GGTATT", @@ -130,7 +130,7 @@ struct Table { struct Items<'a> { cur: Option<&'a Entry>, - items: slice::Items<'a, Option>>, + items: slice::Iter<'a, Option>>, } impl Table { diff --git a/src/test/bench/shootout-nbody.rs b/src/test/bench/shootout-nbody.rs index 3f36c16aff63f..dab67331120a9 100644 --- a/src/test/bench/shootout-nbody.rs +++ b/src/test/bench/shootout-nbody.rs @@ -45,7 +45,7 @@ const SOLAR_MASS: f64 = 4.0 * PI * PI; const YEAR: f64 = 365.24; const N_BODIES: uint = 5; -static BODIES: [Planet, ..N_BODIES] = [ +static BODIES: [Planet;N_BODIES] = [ // Sun Planet { x: 0.0, y: 0.0, z: 0.0, @@ -102,7 +102,7 @@ struct Planet { impl Copy for Planet {} -fn advance(bodies: &mut [Planet, ..N_BODIES], dt: f64, steps: int) { +fn advance(bodies: &mut [Planet;N_BODIES], dt: f64, steps: int) { for _ in range(0, steps) { let mut b_slice = bodies.as_mut_slice(); loop { @@ -135,7 +135,7 @@ fn advance(bodies: &mut [Planet, ..N_BODIES], dt: f64, steps: int) { } } -fn energy(bodies: &[Planet, ..N_BODIES]) -> f64 { +fn energy(bodies: &[Planet;N_BODIES]) -> f64 { let mut e = 0.0; let mut bodies = bodies.iter(); loop { @@ -155,7 +155,7 @@ fn energy(bodies: &[Planet, ..N_BODIES]) -> f64 { e } -fn offset_momentum(bodies: &mut [Planet, ..N_BODIES]) { +fn offset_momentum(bodies: &mut [Planet;N_BODIES]) { let mut px = 0.0; let mut py = 0.0; let mut pz = 0.0; diff --git a/src/test/bench/shootout-reverse-complement.rs b/src/test/bench/shootout-reverse-complement.rs index 312ee2dd27e4d..d746ec1dbabd5 100644 --- a/src/test/bench/shootout-reverse-complement.rs +++ b/src/test/bench/shootout-reverse-complement.rs @@ -50,17 +50,17 @@ use std::ptr::{copy_memory}; use std::io::{IoResult, EndOfFile}; struct Tables { - table8: [u8, ..1 << 8], - table16: [u16, ..1 << 16] + table8: [u8;1 << 8], + table16: [u16;1 << 16] } impl Tables { fn new() -> Tables { - let mut table8 = [0, ..1 << 8]; + let mut table8 = [0;1 << 8]; for (i, v) in table8.iter_mut().enumerate() { *v = Tables::computed_cpl8(i as u8); } - let mut table16 = [0, ..1 << 16]; + let mut table16 = [0;1 << 16]; for (i, v) in table16.iter_mut().enumerate() { *v = table8[i & 255] as u16 << 8 | table8[i >> 8] as u16; diff --git a/src/test/bench/sudoku.rs b/src/test/bench/sudoku.rs index c55f85f40e8b6..5fb7e2c3a8498 100644 --- a/src/test/bench/sudoku.rs +++ b/src/test/bench/sudoku.rs @@ -46,7 +46,7 @@ impl Sudoku { return Sudoku { grid: g } } - pub fn from_vec(vec: &[[u8, ..9], ..9]) -> Sudoku { + pub fn from_vec(vec: &[[u8;9];9]) -> Sudoku { let g = Vec::from_fn(9u, |i| { Vec::from_fn(9u, |j| { vec[i][j] }) }); @@ -198,7 +198,7 @@ impl Colors { } } -static DEFAULT_SUDOKU: [[u8, ..9], ..9] = [ +static DEFAULT_SUDOKU: [[u8;9];9] = [ /* 0 1 2 3 4 5 6 7 8 */ /* 0 */ [0u8, 4u8, 0u8, 6u8, 0u8, 0u8, 0u8, 3u8, 2u8], /* 1 */ [0u8, 0u8, 8u8, 0u8, 2u8, 0u8, 0u8, 0u8, 0u8], @@ -212,7 +212,7 @@ static DEFAULT_SUDOKU: [[u8, ..9], ..9] = [ ]; #[cfg(test)] -static DEFAULT_SOLUTION: [[u8, ..9], ..9] = [ +static DEFAULT_SOLUTION: [[u8;9];9] = [ /* 0 1 2 3 4 5 6 7 8 */ /* 0 */ [1u8, 4u8, 9u8, 6u8, 7u8, 5u8, 8u8, 3u8, 2u8], /* 1 */ [5u8, 3u8, 8u8, 1u8, 2u8, 9u8, 7u8, 4u8, 6u8], diff --git a/src/test/compile-fail/better-expected.rs b/src/test/compile-fail/better-expected.rs index 489f892726a3b..2e0f2a174c606 100644 --- a/src/test/compile-fail/better-expected.rs +++ b/src/test/compile-fail/better-expected.rs @@ -9,5 +9,5 @@ // except according to those terms. fn main() { - let x: [int ..3]; //~ ERROR expected one of `(`, `+`, `,`, `::`, or `]`, found `..` + let x: [int 3]; //~ ERROR expected one of `(`, `+`, `,`, `::`, `;`, or `]`, found `3` } diff --git a/src/test/compile-fail/borrowck-autoref-3261.rs b/src/test/compile-fail/borrowck-autoref-3261.rs index 8c6e76e774619..1b4e5891f941d 100644 --- a/src/test/compile-fail/borrowck-autoref-3261.rs +++ b/src/test/compile-fail/borrowck-autoref-3261.rs @@ -20,7 +20,7 @@ impl X { } fn main() { - let mut x = X(Either::Right(main)); + let mut x = X(Either::Right(main as fn())); (&mut x).with( |opt| { //~ ERROR cannot borrow `x` as mutable more than once at a time match opt { diff --git a/src/test/compile-fail/borrowck-for-loop-correct-cmt-for-pattern.rs b/src/test/compile-fail/borrowck-for-loop-correct-cmt-for-pattern.rs index 93a4383b4f59f..f0d42bb9ac115 100644 --- a/src/test/compile-fail/borrowck-for-loop-correct-cmt-for-pattern.rs +++ b/src/test/compile-fail/borrowck-for-loop-correct-cmt-for-pattern.rs @@ -11,7 +11,7 @@ // Issue #16205. struct Foo { - a: [Box, ..3], + a: [Box; 3], } fn main() { diff --git a/src/test/compile-fail/cast-to-bare-fn.rs b/src/test/compile-fail/cast-to-bare-fn.rs index 10a829fd79455..1db813292b012 100644 --- a/src/test/compile-fail/cast-to-bare-fn.rs +++ b/src/test/compile-fail/cast-to-bare-fn.rs @@ -13,7 +13,7 @@ fn foo(_x: int) { } fn main() { let v: u64 = 5; let x = foo as extern "C" fn() -> int; - //~^ ERROR non-scalar cast + //~^ ERROR mismatched types let y = v as extern "Rust" fn(int) -> (int, int); //~^ ERROR non-scalar cast y(x()); diff --git a/src/test/compile-fail/coerce-bare-fn-to-closure-and-proc.rs b/src/test/compile-fail/coerce-bare-fn-to-closure-and-proc.rs index 27e339180a6cf..52f4c4749e224 100644 --- a/src/test/compile-fail/coerce-bare-fn-to-closure-and-proc.rs +++ b/src/test/compile-fail/coerce-bare-fn-to-closure-and-proc.rs @@ -8,12 +8,21 @@ // option. This file may not be copied, modified, or distributed // except according to those terms. +// Test that coercions from fn item types are ok, but not fn pointer +// types to closures/procs are not allowed. + fn foo() {} -fn main() { +fn fn_item_type() { let f = foo; let f_closure: || = f; - //~^ ERROR: cannot coerce non-statically resolved bare fn to closure - //~^^ HELP: consider embedding the function in a closure } + +fn fn_pointer_type() { + let f = foo as fn(); + let f_closure: || = f; + //~^ ERROR: mismatched types +} + +fn main() { } diff --git a/src/test/compile-fail/coercion-slice.rs b/src/test/compile-fail/coercion-slice.rs index bb020688f5884..b6b46fadb13da 100644 --- a/src/test/compile-fail/coercion-slice.rs +++ b/src/test/compile-fail/coercion-slice.rs @@ -8,8 +8,8 @@ // option. This file may not be copied, modified, or distributed // except according to those terms. -// Tests that we forbid coercion from `[T, ..n]` to `&[T]` +// Tests that we forbid coercion from `[T; n]` to `&[T]` fn main() { - let _: &[int] = [0i]; //~ERROR: mismatched types: expected `&[int]`, found `[int, ..1]` + let _: &[int] = [0i]; //~ERROR: mismatched types: expected `&[int]`, found `[int; 1]` } diff --git a/src/test/compile-fail/const-cast-wrong-type.rs b/src/test/compile-fail/const-cast-wrong-type.rs index 223426dc7c689..b359744183418 100644 --- a/src/test/compile-fail/const-cast-wrong-type.rs +++ b/src/test/compile-fail/const-cast-wrong-type.rs @@ -8,7 +8,7 @@ // option. This file may not be copied, modified, or distributed // except according to those terms. -static a: [u8, ..3] = ['h' as u8, 'i' as u8, 0 as u8]; +static a: [u8; 3] = ['h' as u8, 'i' as u8, 0 as u8]; static b: *const i8 = &a as *const i8; //~ ERROR mismatched types fn main() { diff --git a/src/test/compile-fail/dst-bad-coerce1.rs b/src/test/compile-fail/dst-bad-coerce1.rs index 59499ac070d6d..c77ae25e0cf51 100644 --- a/src/test/compile-fail/dst-bad-coerce1.rs +++ b/src/test/compile-fail/dst-bad-coerce1.rs @@ -20,9 +20,9 @@ trait Bar {} pub fn main() { // With a vec of ints. let f1 = Fat { ptr: [1, 2, 3] }; - let f2: &Fat<[int, ..3]> = &f1; + let f2: &Fat<[int; 3]> = &f1; let f3: &Fat<[uint]> = f2; - //~^ ERROR mismatched types: expected `&Fat<[uint]>`, found `&Fat<[int, ..3]>` + //~^ ERROR mismatched types: expected `&Fat<[uint]>`, found `&Fat<[int; 3]>` // With a trait. let f1 = Fat { ptr: Foo }; diff --git a/src/test/compile-fail/dst-bad-coerce2.rs b/src/test/compile-fail/dst-bad-coerce2.rs index e1a754b633208..6eb650e978117 100644 --- a/src/test/compile-fail/dst-bad-coerce2.rs +++ b/src/test/compile-fail/dst-bad-coerce2.rs @@ -21,7 +21,7 @@ impl Bar for Foo {} pub fn main() { // With a vec of ints. let f1 = Fat { ptr: [1, 2, 3] }; - let f2: &Fat<[int, ..3]> = &f1; + let f2: &Fat<[int; 3]> = &f1; let f3: &mut Fat<[int]> = f2; //~ ERROR mismatched types // With a trait. diff --git a/src/test/compile-fail/dst-bad-coerce3.rs b/src/test/compile-fail/dst-bad-coerce3.rs index 7cf647a26d7a7..b0bd517637464 100644 --- a/src/test/compile-fail/dst-bad-coerce3.rs +++ b/src/test/compile-fail/dst-bad-coerce3.rs @@ -21,7 +21,7 @@ impl Bar for Foo {} fn baz<'a>() { // With a vec of ints. let f1 = Fat { ptr: [1, 2, 3] }; - let f2: &Fat<[int, ..3]> = &f1; //~ ERROR `f1` does not live long enough + let f2: &Fat<[int; 3]> = &f1; //~ ERROR `f1` does not live long enough let f3: &'a Fat<[int]> = f2; // With a trait. diff --git a/src/test/compile-fail/dst-bad-coerce4.rs b/src/test/compile-fail/dst-bad-coerce4.rs index 9010185f76b6a..783a32d63028a 100644 --- a/src/test/compile-fail/dst-bad-coerce4.rs +++ b/src/test/compile-fail/dst-bad-coerce4.rs @@ -17,6 +17,6 @@ struct Fat { pub fn main() { // With a vec of ints. let f1: &Fat<[int]> = &Fat { ptr: [1, 2, 3] }; - let f2: &Fat<[int, ..3]> = f1; - //~^ ERROR mismatched types: expected `&Fat<[int, ..3]>`, found `&Fat<[int]>` + let f2: &Fat<[int; 3]> = f1; + //~^ ERROR mismatched types: expected `&Fat<[int; 3]>`, found `&Fat<[int]>` } diff --git a/src/test/compile-fail/dst-bad-deep.rs b/src/test/compile-fail/dst-bad-deep.rs index 506322d41f531..0833a74f1daf9 100644 --- a/src/test/compile-fail/dst-bad-deep.rs +++ b/src/test/compile-fail/dst-bad-deep.rs @@ -18,7 +18,7 @@ struct Fat { } pub fn main() { - let f: Fat<[int, ..3]> = Fat { ptr: [5i, 6, 7] }; + let f: Fat<[int; 3]> = Fat { ptr: [5i, 6, 7] }; let g: &Fat<[int]> = &f; let h: &Fat> = &Fat { ptr: *g }; //~^ ERROR the trait `core::kinds::Sized` is not implemented diff --git a/src/test/compile-fail/fn-item-type.rs b/src/test/compile-fail/fn-item-type.rs new file mode 100644 index 0000000000000..dd4a24bfb2fdc --- /dev/null +++ b/src/test/compile-fail/fn-item-type.rs @@ -0,0 +1,25 @@ +// Copyright 2014 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +// Test that the types of distinct fn items are not compatible by +// default. See also `run-pass/fn-item-type-*.rs`. + +fn foo(x: int) -> int { x * 2 } +fn bar(x: int) -> int { x * 4 } + +fn eq(x: T, y: T) { } + +fn main() { + let f = if true { foo } else { bar }; + //~^ ERROR expected fn item, found a different fn item + + eq(foo, bar); + //~^ ERROR expected fn item, found a different fn item +} diff --git a/src/test/compile-fail/huge-array-simple.rs b/src/test/compile-fail/huge-array-simple.rs index 17f85c7bd2b8d..a9dda771b7ff8 100644 --- a/src/test/compile-fail/huge-array-simple.rs +++ b/src/test/compile-fail/huge-array-simple.rs @@ -11,5 +11,5 @@ // error-pattern: too big for the current fn main() { - let fat : [u8, ..(1<<61)+(1<<31)] = [0, ..(1u64<<61) as uint +(1u64<<31) as uint]; + let fat : [u8; (1<<61)+(1<<31)] = [0; (1u64<<61) as uint +(1u64<<31) as uint]; } diff --git a/src/test/compile-fail/huge-array.rs b/src/test/compile-fail/huge-array.rs index 4b91564154b6a..029e9651cb3cd 100644 --- a/src/test/compile-fail/huge-array.rs +++ b/src/test/compile-fail/huge-array.rs @@ -8,13 +8,13 @@ // option. This file may not be copied, modified, or distributed // except according to those terms. -// error-pattern: ..1518599999 +// error-pattern:; 1518599999 fn generic(t: T) { - let s: [T, ..1518600000] = [t, ..1518600000]; + let s: [T; 1518600000] = [t; 1518600000]; } fn main() { - let x: [u8, ..1518599999] = [0, ..1518599999]; - generic::<[u8, ..1518599999]>(x); + let x: [u8; 1518599999] = [0; 1518599999]; + generic::<[u8; 1518599999]>(x); } diff --git a/src/test/compile-fail/huge-enum.rs b/src/test/compile-fail/huge-enum.rs index 4a85cb5753b7f..7c7a75abf3fc7 100644 --- a/src/test/compile-fail/huge-enum.rs +++ b/src/test/compile-fail/huge-enum.rs @@ -14,10 +14,10 @@ #[cfg(target_word_size = "32")] fn main() { - let big: Option<[u32, ..(1<<29)-1]> = None; + let big: Option<[u32; (1<<29)-1]> = None; } #[cfg(target_word_size = "64")] fn main() { - let big: Option<[u32, ..(1<<45)-1]> = None; + let big: Option<[u32; (1<<45)-1]> = None; } diff --git a/src/test/compile-fail/issue-10764.rs b/src/test/compile-fail/issue-10764.rs index 0733744b65210..cd4ec495556c5 100644 --- a/src/test/compile-fail/issue-10764.rs +++ b/src/test/compile-fail/issue-10764.rs @@ -12,4 +12,4 @@ fn f(_: extern "Rust" fn()) {} extern fn bar() {} fn main() { f(bar) } -//~^ ERROR: expected `fn()`, found `extern "C" fn()` +//~^ ERROR mismatched types diff --git a/src/test/compile-fail/issue-13359.rs b/src/test/compile-fail/issue-13359.rs index 227ed3fb83420..5c72c7388a9ee 100644 --- a/src/test/compile-fail/issue-13359.rs +++ b/src/test/compile-fail/issue-13359.rs @@ -14,8 +14,8 @@ fn bar(_s: u32) { } fn main() { foo(1*(1 as int)); - //~^ ERROR: mismatched types: expected `i16`, found `int` (expected `i16`, found `int`) + //~^ ERROR: mismatched types: expected `i16`, found `int` (expected i16, found int) bar(1*(1 as uint)); - //~^ ERROR: mismatched types: expected `u32`, found `uint` (expected `u32`, found `uint`) + //~^ ERROR: mismatched types: expected `u32`, found `uint` (expected u32, found uint) } diff --git a/src/test/compile-fail/issue-13446.rs b/src/test/compile-fail/issue-13446.rs index 162324b7c59b2..a0a7660428d41 100644 --- a/src/test/compile-fail/issue-13446.rs +++ b/src/test/compile-fail/issue-13446.rs @@ -13,7 +13,7 @@ // error-pattern: mismatched types -static VEC: [u32, ..256] = vec!(); +static VEC: [u32; 256] = vec!(); fn main() {} diff --git a/src/test/compile-fail/issue-13482-2.rs b/src/test/compile-fail/issue-13482-2.rs index 4ec8c2b1b7ea5..ef7d3d4d158d9 100644 --- a/src/test/compile-fail/issue-13482-2.rs +++ b/src/test/compile-fail/issue-13482-2.rs @@ -14,7 +14,7 @@ fn main() { let x = [1,2]; let y = match x { [] => None, - //~^ ERROR types: expected `[_#0i, ..2]`, found `[_#7t, ..0]` + //~^ ERROR types: expected `[_#0i; 2]`, found `[_#7t; 0]` // (expected array of 2 elements, found array of 0 elements) [a,_] => Some(a) }; diff --git a/src/test/compile-fail/issue-13482.rs b/src/test/compile-fail/issue-13482.rs index 18070ed53b04a..157280b1719ad 100644 --- a/src/test/compile-fail/issue-13482.rs +++ b/src/test/compile-fail/issue-13482.rs @@ -12,7 +12,7 @@ fn main() { let x = [1,2]; let y = match x { [] => None, -//~^ ERROR types: expected `[_, ..2]`, found `[_, ..0]` +//~^ ERROR types: expected `[_; 2]`, found `[_; 0]` // (expected array of 2 elements, found array of 0 elements) [a,_] => Some(a) }; diff --git a/src/test/compile-fail/issue-14845.rs b/src/test/compile-fail/issue-14845.rs index bc606d8139f01..5166d84a02543 100644 --- a/src/test/compile-fail/issue-14845.rs +++ b/src/test/compile-fail/issue-14845.rs @@ -10,15 +10,15 @@ struct X { - a: [u8, ..1] + a: [u8; 1] } fn main() { let x = X { a: [0] }; let _f = &x.a as *mut u8; - //~^ ERROR mismatched types: expected `*mut u8`, found `&[u8, ..1]` + //~^ ERROR mismatched types: expected `*mut u8`, found `&[u8; 1]` let local = [0u8]; let _v = &local as *mut u8; - //~^ ERROR mismatched types: expected `*mut u8`, found `&[u8, ..1]` + //~^ ERROR mismatched types: expected `*mut u8`, found `&[u8; 1]` } diff --git a/src/test/compile-fail/issue-17252.rs b/src/test/compile-fail/issue-17252.rs index 4a6b80d765b71..4adb3f041a3f4 100644 --- a/src/test/compile-fail/issue-17252.rs +++ b/src/test/compile-fail/issue-17252.rs @@ -11,10 +11,10 @@ static FOO: uint = FOO; //~ ERROR recursive constant fn main() { - let _x: [u8, ..FOO]; // caused stack overflow prior to fix + let _x: [u8; FOO]; // caused stack overflow prior to fix let _y: uint = 1 + { static BAR: uint = BAR; //~ ERROR recursive constant - let _z: [u8, ..BAR]; // caused stack overflow prior to fix + let _z: [u8; BAR]; // caused stack overflow prior to fix 1 }; } diff --git a/src/test/compile-fail/issue-17441.rs b/src/test/compile-fail/issue-17441.rs index 11c815da1c7f5..e5da5c5504ef3 100644 --- a/src/test/compile-fail/issue-17441.rs +++ b/src/test/compile-fail/issue-17441.rs @@ -10,7 +10,7 @@ fn main() { let _foo = &[1u, 2] as [uint]; - //~^ ERROR cast to unsized type: `&[uint, ..2]` as `[uint]` + //~^ ERROR cast to unsized type: `&[uint; 2]` as `[uint]` //~^^ HELP consider using an implicit coercion to `&[uint]` instead let _bar = box 1u as std::fmt::Show; //~^ ERROR cast to unsized type: `Box` as `core::fmt::Show` @@ -19,6 +19,6 @@ fn main() { //~^ ERROR cast to unsized type: `uint` as `core::fmt::Show` //~^^ HELP consider using a box or reference as appropriate let _quux = [1u, 2] as [uint]; - //~^ ERROR cast to unsized type: `[uint, ..2]` as `[uint]` + //~^ ERROR cast to unsized type: `[uint; 2]` as `[uint]` //~^^ HELP consider using a box or reference as appropriate } diff --git a/src/test/compile-fail/issue-17718-borrow-interior.rs b/src/test/compile-fail/issue-17718-borrow-interior.rs index 1f763dbdc9fa6..8aa5fdf1c4d21 100644 --- a/src/test/compile-fail/issue-17718-borrow-interior.rs +++ b/src/test/compile-fail/issue-17718-borrow-interior.rs @@ -15,7 +15,7 @@ static B: &'static uint = &A.a; static C: &'static uint = &(A.a); //~^ ERROR: cannot refer to the interior of another static -static D: [uint, ..1] = [1]; +static D: [uint; 1] = [1]; static E: uint = D[0]; //~^ ERROR: cannot refer to other statics by value static F: &'static uint = &D[0]; diff --git a/src/test/compile-fail/issue-19244-1.rs b/src/test/compile-fail/issue-19244-1.rs index 7ca83f21305f1..fafe6377397a3 100644 --- a/src/test/compile-fail/issue-19244-1.rs +++ b/src/test/compile-fail/issue-19244-1.rs @@ -11,6 +11,6 @@ const TUP: (uint,) = (42,); fn main() { - let a: [int, ..TUP.1]; + let a: [int; TUP.1]; //~^ ERROR expected constant expr for array length: tuple index out of bounds } diff --git a/src/test/compile-fail/issue-19244-2.rs b/src/test/compile-fail/issue-19244-2.rs index d9aeecc02222c..95965ca35f944 100644 --- a/src/test/compile-fail/issue-19244-2.rs +++ b/src/test/compile-fail/issue-19244-2.rs @@ -12,6 +12,6 @@ struct MyStruct { field: uint } const STRUCT: MyStruct = MyStruct { field: 42 }; fn main() { - let a: [int, ..STRUCT.nonexistent_field]; + let a: [int; STRUCT.nonexistent_field]; //~^ ERROR expected constant expr for array length: nonexistent struct field } diff --git a/src/test/compile-fail/issue-19922.rs b/src/test/compile-fail/issue-19922.rs new file mode 100644 index 0000000000000..e3ced3028098b --- /dev/null +++ b/src/test/compile-fail/issue-19922.rs @@ -0,0 +1,18 @@ +// Copyright 2014 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +enum Homura { + Akemi { madoka: () } +} + +fn main() { + let homura = Homura::Akemi { kaname: () }; + //~^ ERROR struct variant `Homura::Akemi` has no field named `kaname` +} diff --git a/src/test/compile-fail/issue-19991.rs b/src/test/compile-fail/issue-19991.rs new file mode 100644 index 0000000000000..0f1dbfa349277 --- /dev/null +++ b/src/test/compile-fail/issue-19991.rs @@ -0,0 +1,18 @@ +// Copyright 2014 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +// Test if the sugared if-let construct correctly prints "missing an else clause" when an else +// clause does not exist, instead of the unsympathetic "match arms have incompatible types" + +fn main() { + if let Some(homura) = Some("madoka") { //~ ERROR missing an else clause: expected `()` + 765i32 + }; +} diff --git a/src/test/compile-fail/issue-2149.rs b/src/test/compile-fail/issue-2149.rs index 1150f40db762f..3343e92252f8e 100644 --- a/src/test/compile-fail/issue-2149.rs +++ b/src/test/compile-fail/issue-2149.rs @@ -22,5 +22,5 @@ impl vec_monad for Vec { } fn main() { ["hi"].bind(|x| [x] ); - //~^ ERROR type `[&str, ..1]` does not implement any method in scope named `bind` + //~^ ERROR type `[&str; 1]` does not implement any method in scope named `bind` } diff --git a/src/test/compile-fail/issue-4517.rs b/src/test/compile-fail/issue-4517.rs index f61ed35fca359..1c5fd9be1bd4d 100644 --- a/src/test/compile-fail/issue-4517.rs +++ b/src/test/compile-fail/issue-4517.rs @@ -11,8 +11,8 @@ fn bar(int_param: int) {} fn main() { - let foo: [u8, ..4] = [1u8, ..4u]; + let foo: [u8; 4] = [1u8; 4u]; bar(foo); - //~^ ERROR mismatched types: expected `int`, found `[u8, ..4]` + //~^ ERROR mismatched types: expected `int`, found `[u8; 4]` // (expected int, found vector) } diff --git a/src/test/compile-fail/issue-9575.rs b/src/test/compile-fail/issue-9575.rs index aa3d9d9fef080..6e8f7ffb68da4 100644 --- a/src/test/compile-fail/issue-9575.rs +++ b/src/test/compile-fail/issue-9575.rs @@ -10,6 +10,6 @@ #[start] fn start(argc: int, argv: *const *const u8, crate_map: *const u8) -> int { - //~^ ERROR start function expects type: `fn(int, *const *const u8) -> int` + //~^ ERROR incorrect number of function parameters 0 } diff --git a/src/test/compile-fail/issue-9957.rs b/src/test/compile-fail/issue-9957.rs index a90a1ac1a75f0..573d847cbe3b8 100644 --- a/src/test/compile-fail/issue-9957.rs +++ b/src/test/compile-fail/issue-9957.rs @@ -11,5 +11,5 @@ pub extern crate core; //~ ERROR: `pub` visibility is not allowed fn main() { - pub use std::bool; //~ ERROR: imports in functions are never reachable + pub use std::uint; //~ ERROR: imports in functions are never reachable } diff --git a/src/test/compile-fail/lint-uppercase-variables.rs b/src/test/compile-fail/lint-uppercase-variables.rs index eb5c475e7ef4f..19373c806f14d 100644 --- a/src/test/compile-fail/lint-uppercase-variables.rs +++ b/src/test/compile-fail/lint-uppercase-variables.rs @@ -29,7 +29,7 @@ fn main() { println!("{}", Test); let mut f = File::open(&Path::new("something.txt")); - let mut buff = [0u8, ..16]; + let mut buff = [0u8; 16]; match f.read(&mut buff) { Ok(cnt) => println!("read this many bytes: {}", cnt), Err(IoError{ kind: EndOfFile, .. }) => println!("Got end of file: {}", EndOfFile.to_string()), diff --git a/src/test/compile-fail/move-fragments-9.rs b/src/test/compile-fail/move-fragments-9.rs index ce05087f65979..0b095ff6f820d 100644 --- a/src/test/compile-fail/move-fragments-9.rs +++ b/src/test/compile-fail/move-fragments-9.rs @@ -8,7 +8,7 @@ // option. This file may not be copied, modified, or distributed // except according to those terms. -// Test moving array structures, e.g. `[T, ..3]` as well as moving +// Test moving array structures, e.g. `[T; 3]` as well as moving // elements in and out of such arrays. // // Note also that the `test_move_array_then_overwrite` tests represent @@ -18,14 +18,14 @@ pub struct D { d: int } impl Drop for D { fn drop(&mut self) { } } #[rustc_move_fragments] -pub fn test_move_array_via_return(a: [D, ..3]) -> [D, ..3] { +pub fn test_move_array_via_return(a: [D; 3]) -> [D; 3] { //~^ ERROR assigned_leaf_path: `$(local a)` //~| ERROR moved_leaf_path: `$(local a)` return a; } #[rustc_move_fragments] -pub fn test_move_array_into_recv(a: [D, ..3], recv: &mut [D, ..3]) { +pub fn test_move_array_into_recv(a: [D; 3], recv: &mut [D; 3]) { //~^ ERROR parent_of_fragments: `$(local recv)` //~| ERROR assigned_leaf_path: `$(local a)` //~| ERROR moved_leaf_path: `$(local a)` @@ -34,7 +34,7 @@ pub fn test_move_array_into_recv(a: [D, ..3], recv: &mut [D, ..3]) { } #[rustc_move_fragments] -pub fn test_extract_array_elem(a: [D, ..3], i: uint) -> D { +pub fn test_extract_array_elem(a: [D; 3], i: uint) -> D { //~^ ERROR parent_of_fragments: `$(local a)` //~| ERROR assigned_leaf_path: `$(local i)` //~| ERROR moved_leaf_path: `$(local a).[]` @@ -43,7 +43,7 @@ pub fn test_extract_array_elem(a: [D, ..3], i: uint) -> D { } #[rustc_move_fragments] -pub fn test_overwrite_array_elem(mut a: [D, ..3], i: uint, d: D) { +pub fn test_overwrite_array_elem(mut a: [D; 3], i: uint, d: D) { //~^ ERROR parent_of_fragments: `$(local mut a)` //~| ERROR assigned_leaf_path: `$(local i)` //~| ERROR assigned_leaf_path: `$(local d)` @@ -59,7 +59,7 @@ pub fn test_overwrite_array_elem(mut a: [D, ..3], i: uint, d: D) { // See RFC PR 320 for more discussion. #[rustc_move_fragments] -pub fn test_move_array_then_overwrite_elem1(mut a: [D, ..3], i: uint, recv: &mut [D, ..3], d: D) { +pub fn test_move_array_then_overwrite_elem1(mut a: [D; 3], i: uint, recv: &mut [D; 3], d: D) { //~^ ERROR parent_of_fragments: `$(local mut a)` //~| ERROR parent_of_fragments: `$(local recv)` //~| ERROR assigned_leaf_path: `$(local recv).*` @@ -76,8 +76,8 @@ pub fn test_move_array_then_overwrite_elem1(mut a: [D, ..3], i: uint, recv: &mut } #[rustc_move_fragments] -pub fn test_move_array_then_overwrite_elem2(mut a: [D, ..3], i: uint, j: uint, - recv: &mut [D, ..3], d1: D, d2: D) { +pub fn test_move_array_then_overwrite_elem2(mut a: [D; 3], i: uint, j: uint, + recv: &mut [D; 3], d1: D, d2: D) { //~^^ ERROR parent_of_fragments: `$(local mut a)` //~| ERROR parent_of_fragments: `$(local recv)` //~| ERROR assigned_leaf_path: `$(local recv).*` diff --git a/src/test/compile-fail/moves-based-on-type-exprs.rs b/src/test/compile-fail/moves-based-on-type-exprs.rs index 678808f166cde..d8d84e558a947 100644 --- a/src/test/compile-fail/moves-based-on-type-exprs.rs +++ b/src/test/compile-fail/moves-based-on-type-exprs.rs @@ -89,7 +89,7 @@ fn f100() { fn f110() { let x = vec!("hi".to_string()); - let _y = [x.into_iter().next().unwrap(), ..1]; + let _y = [x.into_iter().next().unwrap(); 1]; touch(&x); //~ ERROR use of moved value: `x` } diff --git a/src/test/compile-fail/non-constant-enum-for-vec-repeat.rs b/src/test/compile-fail/non-constant-enum-for-vec-repeat.rs index 3ccce591ee728..a1dc2ab2041a5 100644 --- a/src/test/compile-fail/non-constant-enum-for-vec-repeat.rs +++ b/src/test/compile-fail/non-constant-enum-for-vec-repeat.rs @@ -11,6 +11,6 @@ enum State { ST_NULL, ST_WHITESPACE } fn main() { - [State::ST_NULL, ..(State::ST_WHITESPACE as uint)]; + [State::ST_NULL; (State::ST_WHITESPACE as uint)]; //~^ ERROR expected constant integer for repeat count, found non-constant expression } diff --git a/src/test/compile-fail/non-constant-expr-for-fixed-len-vec.rs b/src/test/compile-fail/non-constant-expr-for-fixed-len-vec.rs index 91551941c0656..85d734ddaf2b9 100644 --- a/src/test/compile-fail/non-constant-expr-for-fixed-len-vec.rs +++ b/src/test/compile-fail/non-constant-expr-for-fixed-len-vec.rs @@ -12,7 +12,7 @@ fn main() { fn bar(n: int) { - let _x: [int, ..n]; + let _x: [int; n]; //~^ ERROR expected constant expr for array length: non-constant path in constant expr } } diff --git a/src/test/compile-fail/non-constant-expr-for-vec-repeat.rs b/src/test/compile-fail/non-constant-expr-for-vec-repeat.rs index 299e9d3dced3c..2e063e5237c44 100644 --- a/src/test/compile-fail/non-constant-expr-for-vec-repeat.rs +++ b/src/test/compile-fail/non-constant-expr-for-vec-repeat.rs @@ -12,6 +12,6 @@ fn main() { fn bar(n: uint) { - let _x = [0, ..n]; //~ ERROR expected constant integer for repeat count, found variable + let _x = [0; n]; //~ ERROR expected constant integer for repeat count, found variable } } diff --git a/src/test/compile-fail/non-exhaustive-pattern-witness.rs b/src/test/compile-fail/non-exhaustive-pattern-witness.rs index 6e1c3db10140f..d35e3ad3c55b0 100644 --- a/src/test/compile-fail/non-exhaustive-pattern-witness.rs +++ b/src/test/compile-fail/non-exhaustive-pattern-witness.rs @@ -12,7 +12,7 @@ struct Foo { first: bool, - second: Option<[uint, ..4]> + second: Option<[uint; 4]> } enum Color { diff --git a/src/test/compile-fail/packed-struct-generic-transmute.rs b/src/test/compile-fail/packed-struct-generic-transmute.rs index d699f69864e38..5c0aba42b9683 100644 --- a/src/test/compile-fail/packed-struct-generic-transmute.rs +++ b/src/test/compile-fail/packed-struct-generic-transmute.rs @@ -33,7 +33,7 @@ struct Oof { fn main() { let foo = Foo { bar: [1u8, 2, 3, 4, 5], baz: 10i32 }; unsafe { - let oof: Oof<[u8, .. 5], i32> = mem::transmute(foo); + let oof: Oof<[u8; 5], i32> = mem::transmute(foo); println!("{} {}", oof.rab[], oof.zab); } } diff --git a/src/test/compile-fail/region-lifetime-bounds-on-fns-where-clause.rs b/src/test/compile-fail/region-lifetime-bounds-on-fns-where-clause.rs new file mode 100644 index 0000000000000..3e6a95b04f743 --- /dev/null +++ b/src/test/compile-fail/region-lifetime-bounds-on-fns-where-clause.rs @@ -0,0 +1,39 @@ +// Copyright 2014 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +fn a<'a, 'b>(x: &mut &'a int, y: &mut &'b int) where 'b: 'a { + // Note: this is legal because of the `'b:'a` declaration. + *x = *y; +} + +fn b<'a, 'b>(x: &mut &'a int, y: &mut &'b int) { + // Illegal now because there is no `'b:'a` declaration. + *x = *y; //~ ERROR cannot infer +} + +fn c<'a,'b>(x: &mut &'a int, y: &mut &'b int) { + // Here we try to call `foo` but do not know that `'a` and `'b` are + // related as required. + a(x, y); //~ ERROR cannot infer +} + +fn d() { + // 'a and 'b are early bound in the function `a` because they appear + // inconstraints: + let _: fn(&mut &int, &mut &int) = a; //~ ERROR mismatched types +} + +fn e() { + // 'a and 'b are late bound in the function `b` because there are + // no constraints: + let _: fn(&mut &int, &mut &int) = b; +} + +fn main() { } diff --git a/src/test/compile-fail/region-multiple-lifetime-bounds-on-fns-where-clause.rs b/src/test/compile-fail/region-multiple-lifetime-bounds-on-fns-where-clause.rs new file mode 100644 index 0000000000000..2d635e9fc2718 --- /dev/null +++ b/src/test/compile-fail/region-multiple-lifetime-bounds-on-fns-where-clause.rs @@ -0,0 +1,41 @@ +// Copyright 2014 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +fn a<'a, 'b, 'c>(x: &mut &'a int, y: &mut &'b int, z: &mut &'c int) where 'b: 'a + 'c { + // Note: this is legal because of the `'b:'a` declaration. + *x = *y; + *z = *y; +} + +fn b<'a, 'b, 'c>(x: &mut &'a int, y: &mut &'b int, z: &mut &'c int) { + // Illegal now because there is no `'b:'a` declaration. + *x = *y; //~ ERROR cannot infer + *z = *y; //~ ERROR cannot infer +} + +fn c<'a,'b, 'c>(x: &mut &'a int, y: &mut &'b int, z: &mut &'c int) { + // Here we try to call `foo` but do not know that `'a` and `'b` are + // related as required. + a(x, y, z); //~ ERROR cannot infer +} + +fn d() { + // 'a and 'b are early bound in the function `a` because they appear + // inconstraints: + let _: fn(&mut &int, &mut &int, &mut &int) = a; //~ ERROR mismatched types +} + +fn e() { + // 'a and 'b are late bound in the function `b` because there are + // no constraints: + let _: fn(&mut &int, &mut &int, &mut &int) = b; +} + +fn main() { } diff --git a/src/test/compile-fail/regions-lifetime-bounds-on-fns.rs b/src/test/compile-fail/regions-lifetime-bounds-on-fns.rs index 773d6e2c70365..4a42728da6f58 100644 --- a/src/test/compile-fail/regions-lifetime-bounds-on-fns.rs +++ b/src/test/compile-fail/regions-lifetime-bounds-on-fns.rs @@ -15,7 +15,7 @@ fn a<'a, 'b:'a>(x: &mut &'a int, y: &mut &'b int) { fn b<'a, 'b>(x: &mut &'a int, y: &mut &'b int) { // Illegal now because there is no `'b:'a` declaration. - *x = *y; //~ ERROR mismatched types + *x = *y; //~ ERROR cannot infer } fn c<'a,'b>(x: &mut &'a int, y: &mut &'b int) { diff --git a/src/test/compile-fail/regions-nested-fns.rs b/src/test/compile-fail/regions-nested-fns.rs index cf0b615bb01ab..f4654367970d1 100644 --- a/src/test/compile-fail/regions-nested-fns.rs +++ b/src/test/compile-fail/regions-nested-fns.rs @@ -12,10 +12,10 @@ fn ignore(t: T) {} fn nested<'x>(x: &'x int) { let y = 3; - let mut ay = &y; //~ ERROR cannot infer + let mut ay = &y; ignore::< for<'z>|&'z int|>(|z| { - ay = x; + ay = x; //~ ERROR cannot infer ay = &y; ay = z; }); diff --git a/src/test/compile-fail/removed-syntax-fixed-vec.rs b/src/test/compile-fail/removed-syntax-fixed-vec.rs index fe49d1f4a8d85..0a8420c19c33f 100644 --- a/src/test/compile-fail/removed-syntax-fixed-vec.rs +++ b/src/test/compile-fail/removed-syntax-fixed-vec.rs @@ -8,4 +8,4 @@ // option. This file may not be copied, modified, or distributed // except according to those terms. -type v = [int * 3]; //~ ERROR expected one of `(`, `+`, `,`, `::`, or `]`, found `*` +type v = [int * 3]; //~ ERROR expected one of `(`, `+`, `,`, `::`, `;`, or `]`, found `*` diff --git a/src/test/compile-fail/removed-syntax-mut-vec-expr.rs b/src/test/compile-fail/removed-syntax-mut-vec-expr.rs index 437f871f8eabd..30302bbd16ef8 100644 --- a/src/test/compile-fail/removed-syntax-mut-vec-expr.rs +++ b/src/test/compile-fail/removed-syntax-mut-vec-expr.rs @@ -11,5 +11,5 @@ fn f() { let v = [mut 1, 2, 3, 4]; //~^ ERROR expected identifier, found keyword `mut` - //~^^ ERROR expected one of `!`, `,`, `.`, `::`, `]`, `{`, or an operator, found `1` + //~^^ ERROR expected one of `!`, `,`, `.`, `::`, `;`, `]`, `{`, or an operator, found `1` } diff --git a/src/test/compile-fail/removed-syntax-mut-vec-ty.rs b/src/test/compile-fail/removed-syntax-mut-vec-ty.rs index af469fadf986d..9c6056bd72a1c 100644 --- a/src/test/compile-fail/removed-syntax-mut-vec-ty.rs +++ b/src/test/compile-fail/removed-syntax-mut-vec-ty.rs @@ -10,4 +10,4 @@ type v = [mut int]; //~^ ERROR expected identifier, found keyword `mut` - //~^^ ERROR expected one of `(`, `+`, `,`, `::`, or `]`, found `int` + //~^^ ERROR expected one of `(`, `+`, `,`, `::`, `;`, or `]`, found `int` diff --git a/src/test/compile-fail/repeat-to-run-dtor-twice.rs b/src/test/compile-fail/repeat-to-run-dtor-twice.rs index 8fdf586b3d1de..d3126cf44d1b1 100644 --- a/src/test/compile-fail/repeat-to-run-dtor-twice.rs +++ b/src/test/compile-fail/repeat-to-run-dtor-twice.rs @@ -24,6 +24,6 @@ impl Drop for Foo { fn main() { let a = Foo { x: 3 }; - let _ = [ a, ..5 ]; + let _ = [ a; 5 ]; //~^ ERROR the trait `core::kinds::Copy` is not implemented for the type `Foo` } diff --git a/src/test/compile-fail/repeat_count.rs b/src/test/compile-fail/repeat_count.rs index 38fbb426fb198..3b0ef0c293af7 100644 --- a/src/test/compile-fail/repeat_count.rs +++ b/src/test/compile-fail/repeat_count.rs @@ -12,18 +12,18 @@ fn main() { let n = 1; - let a = [0, ..n]; //~ ERROR expected constant integer for repeat count, found variable - let b = [0, ..()]; + let a = [0; n]; //~ ERROR expected constant integer for repeat count, found variable + let b = [0; ()]; //~^ ERROR expected constant integer for repeat count, found non-constant expression //~^^ ERROR: expected `uint`, found `()` - let c = [0, ..true]; //~ ERROR expected positive integer for repeat count, found boolean + let c = [0; true]; //~ ERROR expected positive integer for repeat count, found boolean //~^ ERROR: expected `uint`, found `bool` - let d = [0, ..0.5]; //~ ERROR expected positive integer for repeat count, found float + let d = [0; 0.5]; //~ ERROR expected positive integer for repeat count, found float //~^ ERROR: expected `uint`, found `_` - let e = [0, .."foo"]; //~ ERROR expected positive integer for repeat count, found string + let e = [0; "foo"]; //~ ERROR expected positive integer for repeat count, found string //~^ ERROR: expected `uint`, found `&'static str` - let f = [0, ..-4]; + let f = [0; -4]; //~^ ERROR expected positive integer for repeat count, found negative integer - let f = [0u, ..-1]; + let f = [0u; -1]; //~^ ERROR expected positive integer for repeat count, found negative integer } diff --git a/src/test/compile-fail/resolve-conflict-type-vs-import.rs b/src/test/compile-fail/resolve-conflict-type-vs-import.rs index fa072fa62ab7b..de934286a7cba 100644 --- a/src/test/compile-fail/resolve-conflict-type-vs-import.rs +++ b/src/test/compile-fail/resolve-conflict-type-vs-import.rs @@ -8,10 +8,10 @@ // option. This file may not be copied, modified, or distributed // except according to those terms. -use std::slice::Items; -//~^ ERROR import `Items` conflicts with type in this module +use std::slice::Iter; +//~^ ERROR import `Iter` conflicts with type in this module -struct Items; +struct Iter; fn main() { } diff --git a/src/test/compile-fail/static-reference-to-fn-1.rs b/src/test/compile-fail/static-reference-to-fn-1.rs index c0d430908a137..bce397c47932f 100644 --- a/src/test/compile-fail/static-reference-to-fn-1.rs +++ b/src/test/compile-fail/static-reference-to-fn-1.rs @@ -24,7 +24,7 @@ fn foo() -> Option { fn create() -> A<'static> { A { - func: &foo, //~ ERROR borrowed value does not live long enough + func: &foo, //~ ERROR mismatched types } } diff --git a/src/test/compile-fail/static-reference-to-fn-2.rs b/src/test/compile-fail/static-reference-to-fn-2.rs index 3a0f0a193cfe1..d7255c3ba0694 100644 --- a/src/test/compile-fail/static-reference-to-fn-2.rs +++ b/src/test/compile-fail/static-reference-to-fn-2.rs @@ -9,9 +9,11 @@ // except according to those terms. struct StateMachineIter<'a> { - statefn: &'a fn(&mut StateMachineIter<'a>) -> Option<&'static str> + statefn: &'a StateMachineFunc<'a> } +type StateMachineFunc<'a> = fn(&mut StateMachineIter<'a>) -> Option<&'static str>; + impl<'a> Iterator<&'static str> for StateMachineIter<'a> { fn next(&mut self) -> Option<&'static str> { return (*self.statefn)(self); @@ -19,19 +21,19 @@ impl<'a> Iterator<&'static str> for StateMachineIter<'a> { } fn state1(self_: &mut StateMachineIter) -> Option<&'static str> { - self_.statefn = &state2; + self_.statefn = &(state2 as StateMachineFunc); //~^ ERROR borrowed value does not live long enough return Some("state1"); } fn state2(self_: &mut StateMachineIter) -> Option<(&'static str)> { - self_.statefn = &state3; + self_.statefn = &(state3 as StateMachineFunc); //~^ ERROR borrowed value does not live long enough return Some("state2"); } fn state3(self_: &mut StateMachineIter) -> Option<(&'static str)> { - self_.statefn = &finished; + self_.statefn = &(finished as StateMachineFunc); //~^ ERROR borrowed value does not live long enough return Some("state3"); } @@ -42,7 +44,7 @@ fn finished(_: &mut StateMachineIter) -> Option<(&'static str)> { fn state_iter() -> StateMachineIter<'static> { StateMachineIter { - statefn: &state1 //~ ERROR borrowed value does not live long enough + statefn: &(state1 as StateMachineFunc) //~ ERROR borrowed value does not live long enough } } diff --git a/src/test/compile-fail/static-vec-repeat-not-constant.rs b/src/test/compile-fail/static-vec-repeat-not-constant.rs index 03be2cc8f0f02..ff84ed5bf0cd4 100644 --- a/src/test/compile-fail/static-vec-repeat-not-constant.rs +++ b/src/test/compile-fail/static-vec-repeat-not-constant.rs @@ -10,7 +10,7 @@ fn foo() -> int { 23 } -static a: [int, ..2] = [foo(), ..2]; +static a: [int; 2] = [foo(); 2]; //~^ ERROR: function calls in constants are limited to struct and enum constructors fn main() {} diff --git a/src/test/compile-fail/transmute-type-parameters.rs b/src/test/compile-fail/transmute-type-parameters.rs index 53391a0e8947b..2286c0e75bd9d 100644 --- a/src/test/compile-fail/transmute-type-parameters.rs +++ b/src/test/compile-fail/transmute-type-parameters.rs @@ -20,7 +20,7 @@ unsafe fn g(x: (T, int)) { let _: int = transmute(x); //~ ERROR cannot transmute } -unsafe fn h(x: [T, ..10]) { +unsafe fn h(x: [T; 10]) { let _: int = transmute(x); //~ ERROR cannot transmute } diff --git a/src/test/compile-fail/vector-cast-weirdness.rs b/src/test/compile-fail/vector-cast-weirdness.rs index e096e5eb43629..c5109ce473e9e 100644 --- a/src/test/compile-fail/vector-cast-weirdness.rs +++ b/src/test/compile-fail/vector-cast-weirdness.rs @@ -12,20 +12,20 @@ // presence of the `_` type shorthand notation. struct X { - y: [u8, ..2], + y: [u8; 2], } fn main() { let x1 = X { y: [0, 0] }; let p1: *const u8 = &x1.y as *const _; //~ ERROR mismatched types - let t1: *const [u8, ..2] = &x1.y as *const _; - let h1: *const [u8, ..2] = &x1.y as *const [u8, ..2]; + let t1: *const [u8; 2] = &x1.y as *const _; + let h1: *const [u8; 2] = &x1.y as *const [u8; 2]; let mut x1 = X { y: [0, 0] }; let p1: *mut u8 = &mut x1.y as *mut _; //~ ERROR mismatched types - let t1: *mut [u8, ..2] = &mut x1.y as *mut _; - let h1: *mut [u8, ..2] = &mut x1.y as *mut [u8, ..2]; + let t1: *mut [u8; 2] = &mut x1.y as *mut _; + let h1: *mut [u8; 2] = &mut x1.y as *mut [u8; 2]; } diff --git a/src/test/compile-fail/visible-private-types-generics.rs b/src/test/compile-fail/visible-private-types-generics.rs index 740848e93cbea..7ff18f8e0886c 100644 --- a/src/test/compile-fail/visible-private-types-generics.rs +++ b/src/test/compile-fail/visible-private-types-generics.rs @@ -10,17 +10,56 @@ trait Foo {} -pub fn f() {} //~ ERROR private type in exported type +pub fn f< + T + : Foo //~ ERROR private trait in exported type parameter bound +>() {} -pub fn g() where T: Foo {} //~ ERROR private type in exported type +pub fn g() where + T + : Foo //~ ERROR private trait in exported type parameter bound +{} -pub struct H { //~ ERROR private type in exported type - x: T, +pub struct S; + +impl S { + pub fn f< + T + : Foo //~ ERROR private trait in exported type parameter bound + >() {} + + pub fn g() where + T + : Foo //~ ERROR private trait in exported type parameter bound + {} } -pub struct I where T: Foo { //~ ERROR private type in exported type - x: T, +pub struct S1< + T + : Foo //~ ERROR private trait in exported type parameter bound +> { + x: T } -fn main() {} +pub struct S2 where + T + : Foo //~ ERROR private trait in exported type parameter bound +{ + x: T +} +pub enum E1< + T + : Foo //~ ERROR private trait in exported type parameter bound +> { + V1(T) +} + +pub enum E2 where + T + : Foo //~ ERROR private trait in exported type parameter bound +{ + V2(T) +} + +fn main() {} diff --git a/src/test/compile-fail/visible-private-types-supertrait.rs b/src/test/compile-fail/visible-private-types-supertrait.rs index c4457aaf1e1f3..dc6d446154ac7 100644 --- a/src/test/compile-fail/visible-private-types-supertrait.rs +++ b/src/test/compile-fail/visible-private-types-supertrait.rs @@ -10,7 +10,6 @@ trait Foo {} -pub trait Bar : Foo {} //~ ERROR private type in exported type +pub trait Bar : Foo {} //~ ERROR private trait in exported type fn main() {} - diff --git a/src/test/compile-fail/where-clause-constraints-are-local-for-inherent-impl.rs b/src/test/compile-fail/where-clause-constraints-are-local-for-inherent-impl.rs new file mode 100644 index 0000000000000..8d72e260a18f7 --- /dev/null +++ b/src/test/compile-fail/where-clause-constraints-are-local-for-inherent-impl.rs @@ -0,0 +1,28 @@ +// Copyright 2014 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +fn require_copy(x: T) {} + +struct Foo { x: T } + +// Ensure constraints are only attached to methods locally +impl Foo { + fn needs_copy(self) where T: Copy { + require_copy(self.x); + + } + + fn fails_copy(self) { + require_copy(self.x); + //~^ ERROR the trait `core::kinds::Copy` is not implemented for the type `T` + } +} + +fn main() {} diff --git a/src/test/compile-fail/where-clause-constraints-are-local-for-trait-impl.rs b/src/test/compile-fail/where-clause-constraints-are-local-for-trait-impl.rs new file mode 100644 index 0000000000000..096b53a1ea624 --- /dev/null +++ b/src/test/compile-fail/where-clause-constraints-are-local-for-trait-impl.rs @@ -0,0 +1,33 @@ +// Copyright 2014 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +fn require_copy(x: T) {} + +struct Bar { x: T } + +trait Foo { + fn needs_copy(self) where T: Copy; + fn fails_copy(self); +} + +// Ensure constraints are only attached to methods locally +impl Foo for Bar { + fn needs_copy(self) where T: Copy { + require_copy(self.x); + + } + + fn fails_copy(self) { + require_copy(self.x); + //~^ ERROR the trait `core::kinds::Copy` is not implemented for the type `T` + } +} + +fn main() {} diff --git a/src/test/compile-fail/where-clause-method-substituion.rs b/src/test/compile-fail/where-clause-method-substituion.rs new file mode 100644 index 0000000000000..40d2df45488f5 --- /dev/null +++ b/src/test/compile-fail/where-clause-method-substituion.rs @@ -0,0 +1,30 @@ +// Copyright 2014 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +trait Foo {} + +trait Bar { + fn method(&self) where A: Foo; +} + +struct S; +struct X; + +// Remove this impl causing the below resolution to fail // impl Foo for X {} + +impl Bar for int { + fn method(&self) where X: Foo { + } +} + +fn main() { + 1.method::(); + //~^ ERROR the trait `Foo` is not implemented for the type `X` +} diff --git a/src/test/compile-fail/where-clauses-method-unsatisfied.rs b/src/test/compile-fail/where-clauses-method-unsatisfied.rs new file mode 100644 index 0000000000000..e5b54582e4e3f --- /dev/null +++ b/src/test/compile-fail/where-clauses-method-unsatisfied.rs @@ -0,0 +1,30 @@ +// Copyright 2014 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +// Test that a where clause attached to a method allows us to add +// additional constraints to a parameter out of scope. + +struct Foo { + value: T +} + +struct Bar; // does not implement Eq + +impl Foo { + fn equals(&self, u: &Foo) -> bool where T : Eq { + self.value == u.value + } +} + +fn main() { + let x = Foo { value: Bar }; + x.equals(&x); + //~^ ERROR the trait `core::cmp::Eq` is not implemented for the type `Bar` +} diff --git a/src/test/compile-fail/where-clauses-not-parameter.rs b/src/test/compile-fail/where-clauses-not-parameter.rs index 2817aa16e8e5e..9e81703787f3e 100644 --- a/src/test/compile-fail/where-clauses-not-parameter.rs +++ b/src/test/compile-fail/where-clauses-not-parameter.rs @@ -8,10 +8,17 @@ // option. This file may not be copied, modified, or distributed // except according to those terms. -fn equal(_: &T, _: &T) -> bool where int : Eq { - //~^ ERROR undeclared type parameter +struct A; + +trait U {} + +// impl U for A {} + +fn equal(_: &T, _: &T) -> bool where A : U { + true } fn main() { + equal(&0i, &0i); + //~^ ERROR the trait `U` is not implemented for the type `A` } - diff --git a/src/test/debuginfo/destructured-for-loop-variable.rs b/src/test/debuginfo/destructured-for-loop-variable.rs new file mode 100644 index 0000000000000..19a82ee5e67b3 --- /dev/null +++ b/src/test/debuginfo/destructured-for-loop-variable.rs @@ -0,0 +1,178 @@ +// Copyright 2013-2014 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +// ignore-android: FIXME(#10381) +// min-lldb-version: 310 + +// compile-flags:-g + +// === GDB TESTS =================================================================================== + +// gdb-command:run + +// DESTRUCTURED STRUCT +// gdb-command:print x +// gdb-check:$1 = 400 +// gdb-command:print y +// gdb-check:$2 = 401.5 +// gdb-command:print z +// gdb-check:$3 = true +// gdb-command:continue + +// DESTRUCTURED TUPLE +// gdb-command:print/x _i8 +// gdb-check:$4 = 0x6f +// gdb-command:print/x _u8 +// gdb-check:$5 = 0x70 +// gdb-command:print _i16 +// gdb-check:$6 = -113 +// gdb-command:print _u16 +// gdb-check:$7 = 114 +// gdb-command:print _i32 +// gdb-check:$8 = -115 +// gdb-command:print _u32 +// gdb-check:$9 = 116 +// gdb-command:print _i64 +// gdb-check:$10 = -117 +// gdb-command:print _u64 +// gdb-check:$11 = 118 +// gdb-command:print _f32 +// gdb-check:$12 = 119.5 +// gdb-command:print _f64 +// gdb-check:$13 = 120.5 +// gdb-command:continue + +// MORE COMPLEX CASE +// gdb-command:print v1 +// gdb-check:$14 = 80000 +// gdb-command:print x1 +// gdb-check:$15 = 8000 +// gdb-command:print *y1 +// gdb-check:$16 = 80001.5 +// gdb-command:print z1 +// gdb-check:$17 = false +// gdb-command:print *x2 +// gdb-check:$18 = -30000 +// gdb-command:print y2 +// gdb-check:$19 = -300001.5 +// gdb-command:print *z2 +// gdb-check:$20 = true +// gdb-command:print v2 +// gdb-check:$21 = 854237.5 +// gdb-command:continue + + +// === LLDB TESTS ================================================================================== + +// lldb-command:type format add --format hex char +// lldb-command:type format add --format hex 'unsigned char' + +// lldb-command:run + +// DESTRUCTURED STRUCT +// lldb-command:print x +// lldb-check:[...]$0 = 400 +// lldb-command:print y +// lldb-check:[...]$1 = 401.5 +// lldb-command:print z +// lldb-check:[...]$2 = true +// lldb-command:continue + +// DESTRUCTURED TUPLE +// lldb-command:print _i8 +// lldb-check:[...]$3 = 0x6f +// lldb-command:print _u8 +// lldb-check:[...]$4 = 0x70 +// lldb-command:print _i16 +// lldb-check:[...]$5 = -113 +// lldb-command:print _u16 +// lldb-check:[...]$6 = 114 +// lldb-command:print _i32 +// lldb-check:[...]$7 = -115 +// lldb-command:print _u32 +// lldb-check:[...]$8 = 116 +// lldb-command:print _i64 +// lldb-check:[...]$9 = -117 +// lldb-command:print _u64 +// lldb-check:[...]$10 = 118 +// lldb-command:print _f32 +// lldb-check:[...]$11 = 119.5 +// lldb-command:print _f64 +// lldb-check:[...]$12 = 120.5 +// lldb-command:continue + +// MORE COMPLEX CASE +// lldb-command:print v1 +// lldb-check:[...]$13 = 80000 +// lldb-command:print x1 +// lldb-check:[...]$14 = 8000 +// lldb-command:print *y1 +// lldb-check:[...]$15 = 80001.5 +// lldb-command:print z1 +// lldb-check:[...]$16 = false +// lldb-command:print *x2 +// lldb-check:[...]$17 = -30000 +// lldb-command:print y2 +// lldb-check:[...]$18 = -300001.5 +// lldb-command:print *z2 +// lldb-check:[...]$19 = true +// lldb-command:print v2 +// lldb-check:[...]$20 = 854237.5 +// lldb-command:continue + + +struct Struct { + x: i16, + y: f32, + z: bool +} + +fn main() { + + let s = Struct { + x: 400, + y: 401.5, + z: true + }; + + for &Struct { x, y, z } in [s].iter() { + zzz(); // #break + } + + let tuple: (i8, u8, i16, u16, i32, u32, i64, u64, f32, f64) = + (0x6f, 0x70, -113, 114, -115, 116, -117, 118, 119.5, 120.5); + + for &(_i8, _u8, _i16, _u16, _i32, _u32, _i64, _u64, _f32, _f64) in [tuple].iter() { + zzz(); // #break + } + + let more_complex: (i32, &Struct, Struct, Box) = + (80000, + &Struct { + x: 8000, + y: 80001.5, + z: false + }, + Struct { + x: -30000, + y: -300001.5, + z: true + }, + box 854237.5); + + for &(v1, + &Struct { x: x1, y: ref y1, z: z1 }, + Struct { x: ref x2, y: y2, z: ref z2 }, + box v2) in [more_complex].iter() { + zzz(); // #break + } +} + +fn zzz() {()} diff --git a/src/test/debuginfo/evec-in-struct.rs b/src/test/debuginfo/evec-in-struct.rs index aab9c446a9e2d..786868f6b89dd 100644 --- a/src/test/debuginfo/evec-in-struct.rs +++ b/src/test/debuginfo/evec-in-struct.rs @@ -53,28 +53,28 @@ #![allow(unused_variables)] struct NoPadding1 { - x: [u32, ..3], + x: [u32; 3], y: i32, - z: [f32, ..2] + z: [f32; 2] } struct NoPadding2 { - x: [u32, ..3], - y: [[u32, ..2], ..2] + x: [u32; 3], + y: [[u32; 2]; 2] } struct StructInternalPadding { - x: [i16, ..2], - y: [i64, ..2] + x: [i16; 2], + y: [i64; 2] } struct SingleVec { - x: [i16, ..5] + x: [i16; 5] } struct StructPaddedAtEnd { - x: [i64, ..2], - y: [i16, ..2] + x: [i64; 2], + y: [i16; 2] } fn main() { diff --git a/src/test/debuginfo/lexical-scope-in-for-loop.rs b/src/test/debuginfo/lexical-scope-in-for-loop.rs index bcaebb5c153f5..7636ffdb07dcd 100644 --- a/src/test/debuginfo/lexical-scope-in-for-loop.rs +++ b/src/test/debuginfo/lexical-scope-in-for-loop.rs @@ -9,7 +9,6 @@ // except according to those terms. // ignore-android: FIXME(#10381) -// ignore-test: Not sure what is going on here --pcwalton // min-lldb-version: 310 // compile-flags:-g diff --git a/src/test/debuginfo/lexical-scopes-in-block-expression.rs b/src/test/debuginfo/lexical-scopes-in-block-expression.rs index a1f34aea0f21f..41dee642feacd 100644 --- a/src/test/debuginfo/lexical-scopes-in-block-expression.rs +++ b/src/test/debuginfo/lexical-scopes-in-block-expression.rs @@ -450,7 +450,7 @@ fn main() { sentinel(); val - }, ..10]; + }; 10]; zzz(); // #break sentinel(); @@ -491,7 +491,7 @@ fn main() { sentinel(); // index expression - let a_vector = [10i, ..20]; + let a_vector = [10i; 20]; let _ = a_vector[{ zzz(); // #break sentinel(); diff --git a/src/test/debuginfo/recursive-struct.rs b/src/test/debuginfo/recursive-struct.rs index 032b8b1fa262f..8cc0fdabfc2e7 100644 --- a/src/test/debuginfo/recursive-struct.rs +++ b/src/test/debuginfo/recursive-struct.rs @@ -143,7 +143,7 @@ fn main() { value: 2, }; - let vec_unique: [UniqueNode, ..1] = [UniqueNode { + let vec_unique: [UniqueNode; 1] = [UniqueNode { next: Val { val: box UniqueNode { next: Empty, diff --git a/src/test/debuginfo/type-names.rs b/src/test/debuginfo/type-names.rs index d72b080409e1d..286c44667c5b4 100644 --- a/src/test/debuginfo/type-names.rs +++ b/src/test/debuginfo/type-names.rs @@ -99,10 +99,10 @@ // VECTORS // gdb-command:whatis fixed_size_vec1 -// gdb-check:type = struct ([type-names::Struct1, ..3], i16) +// gdb-check:type = struct ([type-names::Struct1; 3], i16) // gdb-command:whatis fixed_size_vec2 -// gdb-check:type = struct ([uint, ..3], i16) +// gdb-check:type = struct ([uint; 3], i16) // gdb-command:whatis slice1 // gdb-check:type = struct &[uint] diff --git a/src/test/debuginfo/vec.rs b/src/test/debuginfo/vec.rs index fd422a90e632f..00c93653cf411 100644 --- a/src/test/debuginfo/vec.rs +++ b/src/test/debuginfo/vec.rs @@ -30,7 +30,7 @@ #![allow(unused_variables)] -static mut VECT: [i32, ..3] = [1, 2, 3]; +static mut VECT: [i32; 3] = [1, 2, 3]; fn main() { let a = [1i, 2, 3]; diff --git a/src/test/pretty/asm-options.rs b/src/test/pretty/asm-options.rs new file mode 100644 index 0000000000000..bc9f89a3d15ff --- /dev/null +++ b/src/test/pretty/asm-options.rs @@ -0,0 +1,21 @@ +// Copyright 2014 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +#![feature(asm)] + +// pp-exact + +pub fn main() { + unsafe { + asm!("" : : : : "volatile"); + asm!("" : : : : "alignstack"); + asm!("" : : : : "intel"); + } +} diff --git a/src/test/pretty/blank-lines.rs b/src/test/pretty/blank-lines.rs index 24eb5337d25f7..1774edd3f76e2 100644 --- a/src/test/pretty/blank-lines.rs +++ b/src/test/pretty/blank-lines.rs @@ -9,7 +9,7 @@ // except according to those terms. // pp-exact -fn f() -> [int, ..3] { +fn f() -> [int; 3] { let picard = 0; let data = 1; diff --git a/src/test/pretty/issue-4264.pp b/src/test/pretty/issue-4264.pp index b5ea9bd4b89c6..e4389cd69dd62 100644 --- a/src/test/pretty/issue-4264.pp +++ b/src/test/pretty/issue-4264.pp @@ -21,26 +21,26 @@ // #4264 fixed-length vector types -pub fn foo(_: [int, ..(3 as uint)]) { } +pub fn foo(_: [int; (3 as uint)]) { } pub fn bar() { const FOO: uint = ((5u as uint) - (4u as uint) as uint); - let _: [(), ..(FOO as uint)] = ([(() as ())] as [(), ..1]); + let _: [(); (FOO as uint)] = ([(() as ())] as [(); 1]); - let _: [(), ..(1u as uint)] = ([(() as ())] as [(), ..1]); + let _: [(); (1u as uint)] = ([(() as ())] as [(); 1]); let _ = - (((&((([(1i as int), (2 as int), (3 as int)] as [int, ..3])) as - [int, ..3]) as &[int, ..3]) as *const _ as *const [int, ..3]) - as *const [int, ..(3u as uint)] as *const [int, ..3]); + (((&((([(1i as int), (2 as int), (3 as int)] as [int; 3])) as + [int; 3]) as &[int; 3]) as *const _ as *const [int; 3]) as + *const [int; (3u as uint)] as *const [int; 3]); (match (() as ()) { () => { #[inline] #[allow(dead_code)] static __STATIC_FMTSTR: &'static [&'static str] = - (&([("test" as &'static str)] as [&'static str, ..1]) as - &'static [&'static str, ..1]); + (&([("test" as &'static str)] as [&'static str; 1]) as + &'static [&'static str; 1]); @@ -50,36 +50,36 @@ ((::std::fmt::format as - fn(&core::fmt::Arguments<'_>) -> collections::string::String)((&((::std::fmt::Arguments::new - as - fn(&[&str], &[core::fmt::Argument<'_>]) -> core::fmt::Arguments<'_>)((__STATIC_FMTSTR - as - &'static [&'static str]), - (&([] - as - [core::fmt::Argument<'_>, ..0]) - as - &[core::fmt::Argument<'_>, ..0])) - as - core::fmt::Arguments<'_>) - as - &core::fmt::Arguments<'_>)) + fn(&core::fmt::Arguments<'_>) -> collections::string::String {std::fmt::format})((&((::std::fmt::Arguments::new + as + fn(&[&str], &[core::fmt::Argument<'_>]) -> core::fmt::Arguments<'_> {core::fmt::Arguments<'a>::new})((__STATIC_FMTSTR + as + &'static [&'static str]), + (&([] + as + [core::fmt::Argument<'_>; 0]) + as + &[core::fmt::Argument<'_>; 0])) + as + core::fmt::Arguments<'_>) + as + &core::fmt::Arguments<'_>)) as collections::string::String) } } as collections::string::String); } -pub type Foo = [int, ..(3u as uint)]; +pub type Foo = [int; (3u as uint)]; pub struct Bar { - pub x: [int, ..(3u as uint)], + pub x: [int; (3u as uint)], } -pub struct TupleBar([int, ..(4u as uint)]); -pub enum Baz { BazVariant([int, ..(5u as uint)]), } +pub struct TupleBar([int; (4u as uint)]); +pub enum Baz { BazVariant([int; (5u as uint)]), } pub fn id(x: T) -> T { (x as T) } pub fn use_id() { let _ = - ((id::<[int, ..(3u as uint)]> as - fn([int, ..3]) -> [int, ..3])(([(1 as int), (2 as int), - (3 as int)] as [int, ..3])) as - [int, ..3]); + ((id::<[int; (3u as uint)]> as + fn([int; 3]) -> [int; 3] {id})(([(1 as int), (2 as int), + (3 as int)] as [int; 3])) as + [int; 3]); } fn main() { } diff --git a/src/test/pretty/where-clauses.rs b/src/test/pretty/where-clauses.rs new file mode 100644 index 0000000000000..0f3b914334e12 --- /dev/null +++ b/src/test/pretty/where-clauses.rs @@ -0,0 +1,16 @@ +// Copyright 2013 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +// pp-exact + +fn f<'a, 'b, T>(t: T) -> int where T: 'a, 'a:'b, T: Eq { 0 } + +fn main() { } + diff --git a/src/test/run-make/dep-info-custom/Makefile.foo b/src/test/run-make/dep-info-custom/Makefile.foo index 302bb84908b66..88be7630e8391 100644 --- a/src/test/run-make/dep-info-custom/Makefile.foo +++ b/src/test/run-make/dep-info-custom/Makefile.foo @@ -1,4 +1,4 @@ -LIB := $(shell $(RUSTC) --crate-file-name --crate-type=lib lib.rs) +LIB := $(shell $(RUSTC) --print file-names --crate-type=lib lib.rs) $(TMPDIR)/$(LIB): $(RUSTC) --dep-info $(TMPDIR)/custom-deps-file.d --crate-type=lib lib.rs diff --git a/src/test/run-make/dep-info/Makefile.foo b/src/test/run-make/dep-info/Makefile.foo index 2b43dd0ec7095..e5df31f88c1e1 100644 --- a/src/test/run-make/dep-info/Makefile.foo +++ b/src/test/run-make/dep-info/Makefile.foo @@ -1,7 +1,7 @@ -LIB := $(shell $(RUSTC) --crate-file-name --crate-type=lib lib.rs) +LIB := $(shell $(RUSTC) --print file-names --crate-type=lib lib.rs) $(TMPDIR)/$(LIB): - $(RUSTC) --dep-info --crate-type=lib lib.rs + $(RUSTC) --emit dep-info,link --crate-type=lib lib.rs touch $(TMPDIR)/done -include $(TMPDIR)/foo.d diff --git a/src/test/run-make/graphviz-flowgraph/Makefile b/src/test/run-make/graphviz-flowgraph/Makefile index 09440949177dd..0562e000e56e9 100644 --- a/src/test/run-make/graphviz-flowgraph/Makefile +++ b/src/test/run-make/graphviz-flowgraph/Makefile @@ -28,7 +28,7 @@ $(TMPDIR)/%.pp: %.rs $(TMPDIR)/%.dot: %.rs $(eval $(call FIND_LAST_BLOCK,$<)) - $(RUSTC_LIB) --pretty flowgraph=$(LASTBLOCKNUM_$<) $< -o $@.tmp + $(RUSTC_LIB) -Z unstable-options --xpretty flowgraph=$(LASTBLOCKNUM_$<) $< -o $@.tmp cat $@.tmp | sed -e 's@ (id=[0-9]*)@@g' \ -e 's@\[label=""\]@@' \ -e 's@digraph [a-zA-Z0-9_]* @digraph block @' \ diff --git a/src/test/run-make/issue-7349/Makefile b/src/test/run-make/issue-7349/Makefile index 7f715a475bead..f24933cac0117 100644 --- a/src/test/run-make/issue-7349/Makefile +++ b/src/test/run-make/issue-7349/Makefile @@ -6,6 +6,6 @@ # used in the inner functions should each appear only once in the generated IR. all: - $(RUSTC) foo.rs --emit=ir + $(RUSTC) foo.rs --emit=llvm-ir [ "$$(grep -c 8675309 "$(TMPDIR)/foo.ll")" -eq "1" ] [ "$$(grep -c 11235813 "$(TMPDIR)/foo.ll")" -eq "1" ] diff --git a/src/test/run-make/libs-through-symlinks/Makefile b/src/test/run-make/libs-through-symlinks/Makefile index d19e8f22c0547..f097d8fabd1a8 100644 --- a/src/test/run-make/libs-through-symlinks/Makefile +++ b/src/test/run-make/libs-through-symlinks/Makefile @@ -4,7 +4,7 @@ ifdef IS_WINDOWS all: else -NAME := $(shell $(RUSTC) --crate-file-name foo.rs) +NAME := $(shell $(RUSTC) --print file-names foo.rs) all: mkdir -p $(TMPDIR)/outdir diff --git a/src/test/run-make/no-stack-check/attr.rs b/src/test/run-make/no-stack-check/attr.rs index ef2db932b418d..7d0fc2d7fe503 100644 --- a/src/test/run-make/no-stack-check/attr.rs +++ b/src/test/run-make/no-stack-check/attr.rs @@ -20,6 +20,6 @@ extern { #[no_stack_check] pub unsafe fn foo() { // Make sure we use the stack - let x: [u8, ..50] = [0, ..50]; + let x: [u8; 50] = [0; 50]; black_box(x.as_ptr()); } diff --git a/src/test/run-make/no-stack-check/flag.rs b/src/test/run-make/no-stack-check/flag.rs index ee0364001e19a..2b6e7240d6fa8 100644 --- a/src/test/run-make/no-stack-check/flag.rs +++ b/src/test/run-make/no-stack-check/flag.rs @@ -19,6 +19,6 @@ extern { pub unsafe fn foo() { // Make sure we use the stack - let x: [u8, ..50] = [0, ..50]; + let x: [u8; 50] = [0; 50]; black_box(x.as_ptr()); } diff --git a/src/test/run-make/output-type-permutations/Makefile b/src/test/run-make/output-type-permutations/Makefile index fed071d1a43c2..4efbd9ee48df2 100644 --- a/src/test/run-make/output-type-permutations/Makefile +++ b/src/test/run-make/output-type-permutations/Makefile @@ -12,7 +12,7 @@ all: rm $(TMPDIR)/$(call BIN,bar) [ "$$(ls -1 $(TMPDIR) | wc -l)" -eq "0" ] - $(RUSTC) foo.rs --emit=asm,ir,bc,obj,link + $(RUSTC) foo.rs --emit=asm,llvm-ir,llvm-bc,obj,link rm $(TMPDIR)/bar.ll rm $(TMPDIR)/bar.bc rm $(TMPDIR)/bar.s @@ -24,11 +24,11 @@ all: rm $(TMPDIR)/foo [ "$$(ls -1 $(TMPDIR) | wc -l)" -eq "0" ] - $(RUSTC) foo.rs --emit=bc -o $(TMPDIR)/foo + $(RUSTC) foo.rs --emit=llvm-bc -o $(TMPDIR)/foo rm $(TMPDIR)/foo [ "$$(ls -1 $(TMPDIR) | wc -l)" -eq "0" ] - $(RUSTC) foo.rs --emit=ir -o $(TMPDIR)/foo + $(RUSTC) foo.rs --emit=llvm-ir -o $(TMPDIR)/foo rm $(TMPDIR)/foo [ "$$(ls -1 $(TMPDIR) | wc -l)" -eq "0" ] @@ -56,7 +56,7 @@ all: rm $(TMPDIR)/$(call BIN,foo) [ "$$(ls -1 $(TMPDIR) | wc -l)" -eq "0" ] - $(RUSTC) foo.rs --emit=asm,ir,bc,obj,link --crate-type=staticlib + $(RUSTC) foo.rs --emit=asm,llvm-ir,llvm-bc,obj,link --crate-type=staticlib rm $(TMPDIR)/bar.ll rm $(TMPDIR)/bar.s rm $(TMPDIR)/bar.o @@ -65,7 +65,7 @@ all: # Don't check that the $(TMPDIR) is empty - we left `foo.bc` for later # comparison. - $(RUSTC) foo.rs --emit=bc,link --crate-type=rlib + $(RUSTC) foo.rs --emit=llvm-bc,link --crate-type=rlib cmp $(TMPDIR)/foo.bc $(TMPDIR)/bar.bc rm $(TMPDIR)/bar.bc rm $(TMPDIR)/foo.bc diff --git a/src/test/run-make/sepcomp-cci-copies/Makefile b/src/test/run-make/sepcomp-cci-copies/Makefile index 65db841b0c0ed..189088219d5b3 100644 --- a/src/test/run-make/sepcomp-cci-copies/Makefile +++ b/src/test/run-make/sepcomp-cci-copies/Makefile @@ -5,5 +5,5 @@ all: $(RUSTC) cci_lib.rs - $(RUSTC) foo.rs --emit=ir -C codegen-units=3 + $(RUSTC) foo.rs --emit=llvm-ir -C codegen-units=3 [ "$$(cat "$(TMPDIR)"/foo.?.ll | grep -c define\ .*cci_fn)" -eq "2" ] diff --git a/src/test/run-make/sepcomp-inlining/Makefile b/src/test/run-make/sepcomp-inlining/Makefile index 6cb9f9a3f31bc..bc299de0c2d3f 100644 --- a/src/test/run-make/sepcomp-inlining/Makefile +++ b/src/test/run-make/sepcomp-inlining/Makefile @@ -6,7 +6,7 @@ # function should be defined in only one compilation unit. all: - $(RUSTC) foo.rs --emit=ir -C codegen-units=3 + $(RUSTC) foo.rs --emit=llvm-ir -C codegen-units=3 [ "$$(cat "$(TMPDIR)"/foo.?.ll | grep -c define\ i32\ .*inlined)" -eq "1" ] [ "$$(cat "$(TMPDIR)"/foo.?.ll | grep -c define\ available_externally\ i32\ .*inlined)" -eq "2" ] [ "$$(cat "$(TMPDIR)"/foo.?.ll | grep -c define\ i32\ .*normal)" -eq "1" ] diff --git a/src/test/run-make/sepcomp-separate/Makefile b/src/test/run-make/sepcomp-separate/Makefile index 265bd68bd2e82..a475bdfd74a24 100644 --- a/src/test/run-make/sepcomp-separate/Makefile +++ b/src/test/run-make/sepcomp-separate/Makefile @@ -5,5 +5,5 @@ # wind up in three different compilation units. all: - $(RUSTC) foo.rs --emit=ir -C codegen-units=3 + $(RUSTC) foo.rs --emit=llvm-ir -C codegen-units=3 [ "$$(cat "$(TMPDIR)"/foo.?.ll | grep -c define\ .*magic_fn)" -eq "3" ] diff --git a/src/test/run-make/target-specs/foo.rs b/src/test/run-make/target-specs/foo.rs index cab98204b17d7..fd112034f40c5 100644 --- a/src/test/run-make/target-specs/foo.rs +++ b/src/test/run-make/target-specs/foo.rs @@ -21,7 +21,7 @@ trait Sized { } fn start(_main: *const u8, _argc: int, _argv: *const *const u8) -> int { 0 } extern { - fn _foo() -> [u8, ..16]; + fn _foo() -> [u8; 16]; } fn _main() { diff --git a/src/test/run-make/version/Makefile b/src/test/run-make/version/Makefile index 4950fe7572a3d..23e14a9cb93de 100644 --- a/src/test/run-make/version/Makefile +++ b/src/test/run-make/version/Makefile @@ -1,8 +1,6 @@ -include ../tools.mk all: - $(RUSTC) -v - $(RUSTC) -v verbose - $(RUSTC) -v bad_arg && exit 1 || exit 0 - $(RUSTC) --version verbose - $(RUSTC) --version bad_arg && exit 1 || exit 0 + $(RUSTC) -V + $(RUSTC) -vV + $(RUSTC) --version --verbose diff --git a/src/test/run-make/volatile-intrinsics/Makefile b/src/test/run-make/volatile-intrinsics/Makefile index bf79ca68c9461..34fa56efee6fa 100644 --- a/src/test/run-make/volatile-intrinsics/Makefile +++ b/src/test/run-make/volatile-intrinsics/Makefile @@ -5,6 +5,6 @@ all: $(RUSTC) main.rs $(call RUN,main) # ... and the loads/stores must not be optimized out. - $(RUSTC) main.rs --emit=ir + $(RUSTC) main.rs --emit=llvm-ir grep "load volatile" $(TMPDIR)/main.ll grep "store volatile" $(TMPDIR)/main.ll diff --git a/src/test/run-pass/bitv-perf-test.rs b/src/test/run-pass/bitv-perf-test.rs index 281167ff46c86..c5f69f249db6b 100644 --- a/src/test/run-pass/bitv-perf-test.rs +++ b/src/test/run-pass/bitv-perf-test.rs @@ -13,8 +13,8 @@ extern crate collections; use std::collections::Bitv; fn bitv_test() { - let mut v1 = box Bitv::with_capacity(31, false); - let v2 = box Bitv::with_capacity(31, true); + let mut v1 = box Bitv::from_elem(31, false); + let v2 = box Bitv::from_elem(31, true); v1.union(&*v2); } diff --git a/src/test/run-pass/cast-in-array-size.rs b/src/test/run-pass/cast-in-array-size.rs index aaffb013ad8c7..717ca3ff9fecc 100644 --- a/src/test/run-pass/cast-in-array-size.rs +++ b/src/test/run-pass/cast-in-array-size.rs @@ -13,8 +13,8 @@ const SIZE: int = 25; fn main() { - let _a: [bool, ..1 as uint]; - let _b: [int, ..SIZE as uint] = [1, ..SIZE as uint]; - let _c: [bool, ..'\n' as uint] = [true, ..'\n' as uint]; - let _d: [bool, ..true as uint] = [true, ..true as uint]; + let _a: [bool; 1 as uint]; + let _b: [int; SIZE as uint] = [1; SIZE as uint]; + let _c: [bool; '\n' as uint] = [true; '\n' as uint]; + let _d: [bool; true as uint] = [true; true as uint]; } diff --git a/src/test/run-pass/check-static-slice.rs b/src/test/run-pass/check-static-slice.rs index 60daedec4c79f..6e2cfedf9ec3e 100644 --- a/src/test/run-pass/check-static-slice.rs +++ b/src/test/run-pass/check-static-slice.rs @@ -11,11 +11,11 @@ // Check that the various ways of getting to a reference to a vec (both sized // and unsized) work properly. -const aa: [int, ..3] = [1, 2, 3]; -const ab: &'static [int, ..3] = &aa; +const aa: [int; 3] = [1, 2, 3]; +const ab: &'static [int; 3] = &aa; const ac: &'static [int] = ab; const ad: &'static [int] = &aa; -const ae: &'static [int, ..3] = &[1, 2, 3]; +const ae: &'static [int; 3] = &[1, 2, 3]; const af: &'static [int] = &[1, 2, 3]; static ca: int = aa[0]; diff --git a/src/test/run-pass/const-autoderef.rs b/src/test/run-pass/const-autoderef.rs index e80ed7c984b4a..71312fb387845 100644 --- a/src/test/run-pass/const-autoderef.rs +++ b/src/test/run-pass/const-autoderef.rs @@ -8,9 +8,9 @@ // option. This file may not be copied, modified, or distributed // except according to those terms. -static A: [u8, ..1] = ['h' as u8]; +static A: [u8; 1] = ['h' as u8]; static B: u8 = (&A)[0]; -static C: &'static &'static &'static &'static [u8, ..1] = & & & &A; +static C: &'static &'static &'static &'static [u8; 1] = & & & &A; static D: u8 = (&C)[0]; pub fn main() { diff --git a/src/test/run-pass/const-enum-vec-index.rs b/src/test/run-pass/const-enum-vec-index.rs index fef6c8624cf71..4c8124d28a241 100644 --- a/src/test/run-pass/const-enum-vec-index.rs +++ b/src/test/run-pass/const-enum-vec-index.rs @@ -12,7 +12,7 @@ enum E { V1(int), V0 } const C: &'static [E] = &[E::V0, E::V1(0xDEADBEE)]; static C0: E = C[0]; static C1: E = C[1]; -const D: &'static [E, ..2] = &[E::V0, E::V1(0xDEADBEE)]; +const D: &'static [E; 2] = &[E::V0, E::V1(0xDEADBEE)]; static D0: E = C[0]; static D1: E = C[1]; diff --git a/src/test/run-pass/const-enum-vector.rs b/src/test/run-pass/const-enum-vector.rs index 83687f8775b34..6eb5c2dab38ba 100644 --- a/src/test/run-pass/const-enum-vector.rs +++ b/src/test/run-pass/const-enum-vector.rs @@ -9,7 +9,7 @@ // except according to those terms. enum E { V1(int), V0 } -static C: [E, ..3] = [E::V0, E::V1(0xDEADBEE), E::V0]; +static C: [E; 3] = [E::V0, E::V1(0xDEADBEE), E::V0]; pub fn main() { match C[1] { diff --git a/src/test/run-pass/const-expr-in-fixed-length-vec.rs b/src/test/run-pass/const-expr-in-fixed-length-vec.rs index 317a54e927f31..6317c2eec1801 100644 --- a/src/test/run-pass/const-expr-in-fixed-length-vec.rs +++ b/src/test/run-pass/const-expr-in-fixed-length-vec.rs @@ -14,6 +14,6 @@ pub fn main() { const FOO: uint = 2; - let _v: [int, ..FOO*3]; + let _v: [int; FOO*3]; } diff --git a/src/test/run-pass/const-expr-in-vec-repeat.rs b/src/test/run-pass/const-expr-in-vec-repeat.rs index 54386b33dd9da..d692f3a87e457 100644 --- a/src/test/run-pass/const-expr-in-vec-repeat.rs +++ b/src/test/run-pass/const-expr-in-vec-repeat.rs @@ -13,6 +13,6 @@ pub fn main() { const FOO: uint = 2; - let _v = [0i, ..FOO*3*2/2]; + let _v = [0i; FOO*3*2/2]; } diff --git a/src/test/run-pass/const-extern-function.rs b/src/test/run-pass/const-extern-function.rs index be7c47dafc017..069ca6ecf49dd 100644 --- a/src/test/run-pass/const-extern-function.rs +++ b/src/test/run-pass/const-extern-function.rs @@ -18,6 +18,6 @@ struct S { } pub fn main() { - assert!(foopy == f); + assert!(foopy as extern "C" fn() == f); assert!(f == s.f); } diff --git a/src/test/run-pass/const-fields-and-indexing.rs b/src/test/run-pass/const-fields-and-indexing.rs index 49b244a162b0c..0819e0becbf95 100644 --- a/src/test/run-pass/const-fields-and-indexing.rs +++ b/src/test/run-pass/const-fields-and-indexing.rs @@ -8,7 +8,7 @@ // option. This file may not be copied, modified, or distributed // except according to those terms. -const x : [int, ..4] = [1,2,3,4]; +const x : [int; 4] = [1,2,3,4]; static p : int = x[2]; const y : &'static [int] = &[1,2,3,4]; static q : int = y[2]; diff --git a/src/test/run-pass/const-region-ptrs-noncopy.rs b/src/test/run-pass/const-region-ptrs-noncopy.rs index 5e417efb4b583..e8081005d4a8f 100644 --- a/src/test/run-pass/const-region-ptrs-noncopy.rs +++ b/src/test/run-pass/const-region-ptrs-noncopy.rs @@ -8,7 +8,7 @@ // option. This file may not be copied, modified, or distributed // except according to those terms. -type Big = [u64, ..8]; +type Big = [u64; 8]; struct Pair<'a> { a: int, b: &'a Big } const x: &'static Big = &([13, 14, 10, 13, 11, 14, 14, 15]); const y: &'static Pair<'static> = &Pair {a: 15, b: x}; diff --git a/src/test/run-pass/const-str-ptr.rs b/src/test/run-pass/const-str-ptr.rs index 47d59eca26316..d6f0296619ab4 100644 --- a/src/test/run-pass/const-str-ptr.rs +++ b/src/test/run-pass/const-str-ptr.rs @@ -10,8 +10,8 @@ use std::{str, string}; -const A: [u8, ..2] = ['h' as u8, 'i' as u8]; -const B: &'static [u8, ..2] = &A; +const A: [u8; 2] = ['h' as u8, 'i' as u8]; +const B: &'static [u8; 2] = &A; const C: *const u8 = B as *const u8; pub fn main() { diff --git a/src/test/run-pass/const-vecs-and-slices.rs b/src/test/run-pass/const-vecs-and-slices.rs index 1a2a3e36e8746..26874b9f9d52d 100644 --- a/src/test/run-pass/const-vecs-and-slices.rs +++ b/src/test/run-pass/const-vecs-and-slices.rs @@ -8,9 +8,9 @@ // option. This file may not be copied, modified, or distributed // except according to those terms. -static x : [int, ..4] = [1,2,3,4]; +static x : [int; 4] = [1,2,3,4]; static y : &'static [int] = &[1,2,3,4]; -static z : &'static [int, ..4] = &[1,2,3,4]; +static z : &'static [int; 4] = &[1,2,3,4]; static zz : &'static [int] = &[1,2,3,4]; pub fn main() { diff --git a/src/test/run-pass/dst-struct.rs b/src/test/run-pass/dst-struct.rs index bf5b300f7cf0a..3644ca81d5659 100644 --- a/src/test/run-pass/dst-struct.rs +++ b/src/test/run-pass/dst-struct.rs @@ -120,7 +120,7 @@ pub fn main() { assert!((*f2)[1] == 2); // Nested Box. - let f1 : Box> = box Fat { f1: 5, f2: "some str", ptr: [1, 2, 3] }; + let f1 : Box> = box Fat { f1: 5, f2: "some str", ptr: [1, 2, 3] }; foo(&*f1); let f2 : Box> = f1; foo(&*f2); diff --git a/src/test/run-pass/enum-vec-initializer.rs b/src/test/run-pass/enum-vec-initializer.rs index 0256420ac4c3d..d436916c27967 100644 --- a/src/test/run-pass/enum-vec-initializer.rs +++ b/src/test/run-pass/enum-vec-initializer.rs @@ -16,9 +16,9 @@ const BAR:uint = Flopsy::Bunny as uint; const BAR2:uint = BAR; pub fn main() { - let _v = [0i, .. Flopsy::Bunny as uint]; - let _v = [0i, .. BAR]; - let _v = [0i, .. BAR2]; + let _v = [0i; Flopsy::Bunny as uint]; + let _v = [0i; BAR]; + let _v = [0i; BAR2]; const BAR3:uint = BAR2; - let _v = [0i, .. BAR3]; + let _v = [0i; BAR3]; } diff --git a/src/test/run-pass/evec-internal.rs b/src/test/run-pass/evec-internal.rs index 36b5f86aedab4..28b5f781b5cfd 100644 --- a/src/test/run-pass/evec-internal.rs +++ b/src/test/run-pass/evec-internal.rs @@ -13,16 +13,16 @@ // Doesn't work; needs a design decision. pub fn main() { - let x : [int, ..5] = [1,2,3,4,5]; - let _y : [int, ..5] = [1,2,3,4,5]; + let x : [int; 5] = [1,2,3,4,5]; + let _y : [int; 5] = [1,2,3,4,5]; let mut z = [1,2,3,4,5]; z = x; assert_eq!(z[0], 1); assert_eq!(z[4], 5); - let a : [int, ..5] = [1,1,1,1,1]; - let b : [int, ..5] = [2,2,2,2,2]; - let c : [int, ..5] = [2,2,2,2,3]; + let a : [int; 5] = [1,1,1,1,1]; + let b : [int; 5] = [2,2,2,2,2]; + let c : [int; 5] = [2,2,2,2,3]; log(debug, a); diff --git a/src/test/run-pass/exponential-notation.rs b/src/test/run-pass/exponential-notation.rs index f63ab7fb7c9a5..38d1093762432 100644 --- a/src/test/run-pass/exponential-notation.rs +++ b/src/test/run-pass/exponential-notation.rs @@ -10,25 +10,27 @@ #![feature(macro_rules)] -use std::num::strconv as s; +use std::num::strconv::ExponentFormat::{ExpBin, ExpDec}; +use std::num::strconv::SignificantDigits::DigMax; +use std::num::strconv::SignFormat::{SignAll, SignNeg}; use std::num::strconv::float_to_str_common as to_string; macro_rules! t(($a:expr, $b:expr) => { { let (r, _) = $a; assert_eq!(r, $b.to_string()); } }); pub fn main() { // Basic usage - t!(to_string(1.2345678e-5f64, 10u, true, s::SignNeg, s::DigMax(6), s::ExpDec, false), + t!(to_string(1.2345678e-5f64, 10u, true, SignNeg, DigMax(6), ExpDec, false), "1.234568e-5"); // Hexadecimal output - t!(to_string(7.281738281250e+01f64, 16u, true, s::SignAll, s::DigMax(6), s::ExpBin, false), + t!(to_string(7.281738281250e+01f64, 16u, true, SignAll, DigMax(6), ExpBin, false), "+1.2345p+6"); - t!(to_string(-1.777768135071e-02f64, 16u, true, s::SignAll, s::DigMax(6), s::ExpBin, false), + t!(to_string(-1.777768135071e-02f64, 16u, true, SignAll, DigMax(6), ExpBin, false), "-1.2345p-6"); // Some denormals - t!(to_string(4.9406564584124654e-324f64, 10u, true, s::SignNeg, s::DigMax(6), s::ExpBin, false), + t!(to_string(4.9406564584124654e-324f64, 10u, true, SignNeg, DigMax(6), ExpBin, false), "1p-1074"); - t!(to_string(2.2250738585072009e-308f64, 10u, true, s::SignNeg, s::DigMax(6), s::ExpBin, false), + t!(to_string(2.2250738585072009e-308f64, 10u, true, SignNeg, DigMax(6), ExpBin, false), "1p-1022"); } diff --git a/src/test/run-pass/extern-compare-with-return-type.rs b/src/test/run-pass/extern-compare-with-return-type.rs index 057394b2624fe..3febff18704de 100644 --- a/src/test/run-pass/extern-compare-with-return-type.rs +++ b/src/test/run-pass/extern-compare-with-return-type.rs @@ -18,15 +18,17 @@ extern fn uintret() -> uint { 22 } extern fn uintvoidret(_x: uint) {} extern fn uintuintuintuintret(x: uint, y: uint, z: uint) -> uint { x+y+z } +type uintuintuintuintret = extern fn(uint,uint,uint) -> uint; pub fn main() { - assert!(voidret1 == voidret1); - assert!(voidret1 != voidret2); + assert!(voidret1 as extern fn() == voidret1 as extern fn()); + assert!(voidret1 as extern fn() != voidret2 as extern fn()); - assert!(uintret == uintret); + assert!(uintret as extern fn() -> uint == uintret as extern fn() -> uint); - assert!(uintvoidret == uintvoidret); + assert!(uintvoidret as extern fn(uint) == uintvoidret as extern fn(uint)); - assert!(uintuintuintuintret == uintuintuintuintret); + assert!(uintuintuintuintret as uintuintuintuintret == + uintuintuintuintret as uintuintuintuintret); } diff --git a/src/test/run-pass/fn-item-type-cast.rs b/src/test/run-pass/fn-item-type-cast.rs new file mode 100644 index 0000000000000..bfd02f5e27b0c --- /dev/null +++ b/src/test/run-pass/fn-item-type-cast.rs @@ -0,0 +1,28 @@ +// Copyright 2014 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +// Test explicit coercions from a fn item type to a fn pointer type. + +fn foo(x: int) -> int { x * 2 } +fn bar(x: int) -> int { x * 4 } +type IntMap = fn(int) -> int; + +fn eq(x: T, y: T) { } + +static TEST: Option = Some(foo as IntMap); + +fn main() { + let f = foo as IntMap; + + let f = if true { foo as IntMap } else { bar as IntMap }; + assert_eq!(f(4), 8); + + eq(foo as IntMap, bar as IntMap); +} diff --git a/src/test/run-pass/fn-item-type-coerce.rs b/src/test/run-pass/fn-item-type-coerce.rs new file mode 100644 index 0000000000000..8427a0f444621 --- /dev/null +++ b/src/test/run-pass/fn-item-type-coerce.rs @@ -0,0 +1,23 @@ +// Copyright 2014 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +// Test implicit coercions from a fn item type to a fn pointer type. + +fn foo(x: int) -> int { x * 2 } +fn bar(x: int) -> int { x * 4 } +type IntMap = fn(int) -> int; + +fn eq(x: T, y: T) { } + +fn main() { + let f: IntMap = foo; + + eq::(foo, bar); +} diff --git a/src/test/run-pass/huge-largest-array.rs b/src/test/run-pass/huge-largest-array.rs index d494e0bf40dea..e24731546edfe 100644 --- a/src/test/run-pass/huge-largest-array.rs +++ b/src/test/run-pass/huge-largest-array.rs @@ -12,10 +12,10 @@ use std::mem::size_of; #[cfg(target_word_size = "32")] pub fn main() { - assert_eq!(size_of::<[u8, ..(1 << 31) - 1]>(), (1 << 31) - 1); + assert_eq!(size_of::<[u8; (1 << 31) - 1]>(), (1 << 31) - 1); } #[cfg(target_word_size = "64")] pub fn main() { - assert_eq!(size_of::<[u8, ..(1 << 47) - 1]>(), (1 << 47) - 1); + assert_eq!(size_of::<[u8; (1 << 47) - 1]>(), (1 << 47) - 1); } diff --git a/src/test/run-pass/ifmt.rs b/src/test/run-pass/ifmt.rs index 9eac9c30dc8f7..f3e15562b6dc9 100644 --- a/src/test/run-pass/ifmt.rs +++ b/src/test/run-pass/ifmt.rs @@ -60,6 +60,7 @@ pub fn main() { t!(format!("{}", 10i), "10"); t!(format!("{}", 10i), "10"); t!(format!("{}", 10u), "10"); + t!(format!("{:?}", true), "true"); t!(format!("{:o}", 10u), "12"); t!(format!("{:x}", 10u), "a"); t!(format!("{:X}", 10u), "A"); diff --git a/src/test/run-pass/issue-10767.rs b/src/test/run-pass/issue-10767.rs index a30eb8120eae1..d28950241874d 100644 --- a/src/test/run-pass/issue-10767.rs +++ b/src/test/run-pass/issue-10767.rs @@ -12,5 +12,5 @@ pub fn main() { fn f() { }; - let _: Box = box f; + let _: Box = box() (f as fn()); } diff --git a/src/test/run-pass/issue-11205.rs b/src/test/run-pass/issue-11205.rs index ea138311f19b3..549a70f19e334 100644 --- a/src/test/run-pass/issue-11205.rs +++ b/src/test/run-pass/issue-11205.rs @@ -12,22 +12,22 @@ trait Foo {} impl Foo for int {} -fn foo(_: [&Foo, ..2]) {} +fn foo(_: [&Foo; 2]) {} fn foos(_: &[&Foo]) {} fn foog(_: &[T], _: &[T]) {} -fn bar(_: [Box, ..2]) {} +fn bar(_: [Box; 2]) {} fn bars(_: &[Box]) {} fn main() { - let x: [&Foo, ..2] = [&1i, &2i]; + let x: [&Foo; 2] = [&1i, &2i]; foo(x); foo([&1i, &2i]); let r = &1i; - let x: [&Foo, ..2] = [r, ..2]; + let x: [&Foo; 2] = [r; 2]; foo(x); - foo([&1i, ..2]); + foo([&1i; 2]); let x: &[&Foo] = &[&1i, &2i]; foos(x); @@ -37,7 +37,7 @@ fn main() { let r = &1i; foog(x, &[r]); - let x: [Box, ..2] = [box 1i, box 2i]; + let x: [Box; 2] = [box 1i, box 2i]; bar(x); bar([box 1i, box 2i]); @@ -49,16 +49,16 @@ fn main() { foog(x, &[box 1i]); struct T<'a> { - t: [&'a (Foo+'a), ..2] + t: [&'a (Foo+'a); 2] } let _n = T { t: [&1i, &2i] }; let r = &1i; let _n = T { - t: [r, ..2] + t: [r; 2] }; - let x: [&Foo, ..2] = [&1i, &2i]; + let x: [&Foo; 2] = [&1i, &2i]; let _n = T { t: x }; @@ -70,11 +70,11 @@ fn main() { t: &[&1i, &2i] }; let r = &1i; - let r: [&Foo, ..2] = [r, ..2]; + let r: [&Foo; 2] = [r; 2]; let _n = F { t: &r }; - let x: [&Foo, ..2] = [&1i, &2i]; + let x: [&Foo; 2] = [&1i, &2i]; let _n = F { t: &x }; @@ -85,7 +85,7 @@ fn main() { let _n = M { t: &[box 1i, box 2i] }; - let x: [Box, ..2] = [box 1i, box 2i]; + let x: [Box; 2] = [box 1i, box 2i]; let _n = M { t: &x }; diff --git a/src/test/run-pass/issue-11736.rs b/src/test/run-pass/issue-11736.rs index 912a62b5b0f6a..bc4ceb38de33c 100644 --- a/src/test/run-pass/issue-11736.rs +++ b/src/test/run-pass/issue-11736.rs @@ -16,7 +16,7 @@ use std::num::Float; fn main() { // Generate sieve of Eratosthenes for n up to 1e6 let n = 1000000u; - let mut sieve = Bitv::with_capacity(n+1, true); + let mut sieve = Bitv::from_elem(n+1, true); let limit: uint = (n as f32).sqrt() as uint; for i in range(2, limit+1) { if sieve[i] { diff --git a/src/test/run-pass/issue-13167.rs b/src/test/run-pass/issue-13167.rs index be3ee0e078311..1282077028ffb 100644 --- a/src/test/run-pass/issue-13167.rs +++ b/src/test/run-pass/issue-13167.rs @@ -11,7 +11,7 @@ use std::slice; pub struct PhfMapEntries<'a, T: 'a> { - iter: slice::Items<'a, (&'static str, T)>, + iter: slice::Iter<'a, (&'static str, T)>, } impl<'a, T> Iterator<(&'static str, &'a T)> for PhfMapEntries<'a, T> { diff --git a/src/test/run-pass/issue-13259-windows-tcb-trash.rs b/src/test/run-pass/issue-13259-windows-tcb-trash.rs index 0e42bdbd6add7..329ab7c921dda 100644 --- a/src/test/run-pass/issue-13259-windows-tcb-trash.rs +++ b/src/test/run-pass/issue-13259-windows-tcb-trash.rs @@ -27,7 +27,7 @@ mod imp { } pub fn test() { - let mut buf: [u16, ..50] = [0, ..50]; + let mut buf: [u16; 50] = [0; 50]; let ret = unsafe { FormatMessageW(0x1000, 0 as *mut c_void, 1, 0x400, buf.as_mut_ptr(), buf.len() as u32, 0 as *const c_void) diff --git a/src/test/run-pass/issue-13763.rs b/src/test/run-pass/issue-13763.rs index 8b2b732415ee0..81b6892b0f97a 100644 --- a/src/test/run-pass/issue-13763.rs +++ b/src/test/run-pass/issue-13763.rs @@ -12,9 +12,9 @@ use std::u8; const NUM: uint = u8::BITS as uint; -struct MyStruct { nums: [uint, ..8] } +struct MyStruct { nums: [uint; 8] } fn main() { - let _s = MyStruct { nums: [0, ..NUM] }; + let _s = MyStruct { nums: [0; NUM] }; } diff --git a/src/test/run-pass/issue-13837.rs b/src/test/run-pass/issue-13837.rs index 221115a0869a5..f62a45277b22a 100644 --- a/src/test/run-pass/issue-13837.rs +++ b/src/test/run-pass/issue-13837.rs @@ -8,6 +8,6 @@ // option. This file may not be copied, modified, or distributed // except according to those terms. -static TEST_VALUE : *const [int, ..2] = 0x1234 as *const [int, ..2]; +static TEST_VALUE : *const [int; 2] = 0x1234 as *const [int; 2]; fn main() {} diff --git a/src/test/run-pass/issue-14254.rs b/src/test/run-pass/issue-14254.rs index 160828d42fc54..ad4ed03e6e2c8 100644 --- a/src/test/run-pass/issue-14254.rs +++ b/src/test/run-pass/issue-14254.rs @@ -24,7 +24,7 @@ impl BarTy { fn b(&self) {} } -// If these fail, it's necessary to update middle::resolve and the cfail tests. +// If these fail, it's necessary to update rustc_resolve and the cfail tests. impl Foo for *const BarTy { fn bar(&self) { self.baz(); @@ -33,7 +33,7 @@ impl Foo for *const BarTy { } } -// If these fail, it's necessary to update middle::resolve and the cfail tests. +// If these fail, it's necessary to update rustc_resolve and the cfail tests. impl<'a> Foo for &'a BarTy { fn bar(&self) { self.baz(); @@ -45,7 +45,7 @@ impl<'a> Foo for &'a BarTy { } } -// If these fail, it's necessary to update middle::resolve and the cfail tests. +// If these fail, it's necessary to update rustc_resolve and the cfail tests. impl<'a> Foo for &'a mut BarTy { fn bar(&self) { self.baz(); @@ -57,7 +57,7 @@ impl<'a> Foo for &'a mut BarTy { } } -// If these fail, it's necessary to update middle::resolve and the cfail tests. +// If these fail, it's necessary to update rustc_resolve and the cfail tests. impl Foo for Box { fn bar(&self) { self.baz(); @@ -65,7 +65,7 @@ impl Foo for Box { } } -// If these fail, it's necessary to update middle::resolve and the cfail tests. +// If these fail, it's necessary to update rustc_resolve and the cfail tests. impl Foo for *const int { fn bar(&self) { self.baz(); @@ -73,7 +73,7 @@ impl Foo for *const int { } } -// If these fail, it's necessary to update middle::resolve and the cfail tests. +// If these fail, it's necessary to update rustc_resolve and the cfail tests. impl<'a> Foo for &'a int { fn bar(&self) { self.baz(); @@ -81,7 +81,7 @@ impl<'a> Foo for &'a int { } } -// If these fail, it's necessary to update middle::resolve and the cfail tests. +// If these fail, it's necessary to update rustc_resolve and the cfail tests. impl<'a> Foo for &'a mut int { fn bar(&self) { self.baz(); @@ -89,7 +89,7 @@ impl<'a> Foo for &'a mut int { } } -// If these fail, it's necessary to update middle::resolve and the cfail tests. +// If these fail, it's necessary to update rustc_resolve and the cfail tests. impl Foo for Box { fn bar(&self) { self.baz(); diff --git a/src/test/run-pass/issue-14940.rs b/src/test/run-pass/issue-14940.rs index cef09af1fcff2..d815620c9694d 100644 --- a/src/test/run-pass/issue-14940.rs +++ b/src/test/run-pass/issue-14940.rs @@ -15,7 +15,7 @@ fn main() { let args = os::args(); if args.len() > 1 { let mut out = stdio::stdout(); - out.write(&['a' as u8, ..128 * 1024]).unwrap(); + out.write(&['a' as u8; 128 * 1024]).unwrap(); } else { let out = Command::new(args[0].as_slice()).arg("child").output(); let out = out.unwrap(); diff --git a/src/test/run-pass/issue-15444.rs b/src/test/run-pass/issue-15444.rs index f5618c2c7a3c4..0f4978d78dd86 100644 --- a/src/test/run-pass/issue-15444.rs +++ b/src/test/run-pass/issue-15444.rs @@ -25,5 +25,6 @@ fn thing(a: int, b: int) -> int { } fn main() { + let thing: fn(int, int) -> int = thing; // coerce to fn type bar(&thing); } diff --git a/src/test/run-pass/issue-15673.rs b/src/test/run-pass/issue-15673.rs index 051d98aa1d89b..e66788a2c0037 100644 --- a/src/test/run-pass/issue-15673.rs +++ b/src/test/run-pass/issue-15673.rs @@ -10,6 +10,6 @@ use std::iter::AdditiveIterator; fn main() { - let x: [u64, ..3] = [1, 2, 3]; + let x: [u64; 3] = [1, 2, 3]; assert_eq!(6, range(0, 3).map(|i| x[i]).sum()); } diff --git a/src/test/run-pass/issue-17302.rs b/src/test/run-pass/issue-17302.rs index 50583c7d127d8..b2abf2d2b1a9c 100644 --- a/src/test/run-pass/issue-17302.rs +++ b/src/test/run-pass/issue-17302.rs @@ -8,7 +8,7 @@ // option. This file may not be copied, modified, or distributed // except according to those terms. -static mut DROPPED: [bool, ..2] = [false, false]; +static mut DROPPED: [bool; 2] = [false, false]; struct A(uint); struct Foo { _a: A, _b: int } diff --git a/src/test/run-pass/issue-17877.rs b/src/test/run-pass/issue-17877.rs index 51db2f0595997..827e6a10abdfd 100644 --- a/src/test/run-pass/issue-17877.rs +++ b/src/test/run-pass/issue-17877.rs @@ -9,11 +9,11 @@ // except according to those terms. fn main() { - assert_eq!(match [0u8, ..1024] { + assert_eq!(match [0u8; 1024] { _ => 42u, }, 42u); - assert_eq!(match [0u8, ..1024] { + assert_eq!(match [0u8; 1024] { [1, _..] => 0u, [0, _..] => 1u, _ => 2u diff --git a/src/test/run-pass/issue-18425.rs b/src/test/run-pass/issue-18425.rs index 6bb244bf88f42..f61530c741851 100644 --- a/src/test/run-pass/issue-18425.rs +++ b/src/test/run-pass/issue-18425.rs @@ -12,5 +12,5 @@ // expression with a count of 1 and a non-Copy element type. fn main() { - let _ = [box 1u, ..1]; + let _ = [box 1u; 1]; } diff --git a/src/test/run-pass/issue-19244.rs b/src/test/run-pass/issue-19244.rs index d42bda6cd5d42..3ee5ce9bff93a 100644 --- a/src/test/run-pass/issue-19244.rs +++ b/src/test/run-pass/issue-19244.rs @@ -13,8 +13,8 @@ const STRUCT: MyStruct = MyStruct { field: 42 }; const TUP: (uint,) = (43,); fn main() { - let a = [0i, ..STRUCT.field]; - let b = [0i, ..TUP.0]; + let a = [0i; STRUCT.field]; + let b = [0i; TUP.0]; assert!(a.len() == 42); assert!(b.len() == 43); diff --git a/src/test/run-pass/issue-19340-1.rs b/src/test/run-pass/issue-19340-1.rs index b7a6391ee0475..2f466d4ca8c47 100644 --- a/src/test/run-pass/issue-19340-1.rs +++ b/src/test/run-pass/issue-19340-1.rs @@ -15,7 +15,7 @@ extern crate "issue-19340-1" as lib; use lib::Homura; fn main() { - let homura = Homura::Madoka { name: "Kaname".into_string() }; + let homura = Homura::Madoka { name: "Kaname".to_string() }; match homura { Homura::Madoka { name } => (), diff --git a/src/test/run-pass/issue-19340-2.rs b/src/test/run-pass/issue-19340-2.rs index 5179c1e2acb75..8300220edeaff 100644 --- a/src/test/run-pass/issue-19340-2.rs +++ b/src/test/run-pass/issue-19340-2.rs @@ -17,7 +17,7 @@ enum Homura { fn main() { let homura = Homura::Madoka { - name: "Akemi".into_string(), + name: "Akemi".to_string(), age: 14, }; diff --git a/src/test/run-pass/issue-19367.rs b/src/test/run-pass/issue-19367.rs index 3efc2ee50f358..7db84d518ff30 100644 --- a/src/test/run-pass/issue-19367.rs +++ b/src/test/run-pass/issue-19367.rs @@ -16,10 +16,10 @@ struct S { // on field of struct or tuple which we reassign in the match body. fn main() { - let mut a = (0i, Some("right".into_string())); + let mut a = (0i, Some("right".to_string())); let b = match a.1 { Some(v) => { - a.1 = Some("wrong".into_string()); + a.1 = Some("wrong".to_string()); v } None => String::new() @@ -28,10 +28,10 @@ fn main() { assert_eq!(b, "right"); - let mut s = S{ o: Some("right".into_string()) }; + let mut s = S{ o: Some("right".to_string()) }; let b = match s.o { Some(v) => { - s.o = Some("wrong".into_string()); + s.o = Some("wrong".to_string()); v } None => String::new(), diff --git a/src/test/run-pass/issue-19811-escape-unicode.rs b/src/test/run-pass/issue-19811-escape-unicode.rs new file mode 100644 index 0000000000000..23400859e54f1 --- /dev/null +++ b/src/test/run-pass/issue-19811-escape-unicode.rs @@ -0,0 +1,17 @@ +// Copyright 2014 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +fn main() { + let mut escaped = String::from_str(""); + for c in '\u{10401}'.escape_unicode() { + escaped.push(c); + } + assert_eq!("\\u{10401}", escaped); +} diff --git a/src/test/run-pass/issue-20091.rs b/src/test/run-pass/issue-20091.rs new file mode 100644 index 0000000000000..d653843ba0590 --- /dev/null +++ b/src/test/run-pass/issue-20091.rs @@ -0,0 +1,21 @@ +// Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +// ignore-windows currently windows requires UTF-8 for spawning processes + +use std::io::Command; +use std::os; + +fn main() { + if os::args().len() == 1 { + assert!(Command::new(os::self_exe_name().unwrap()).arg(b"\xff") + .status().unwrap().success()) + } +} diff --git a/src/test/run-pass/issue-2904.rs b/src/test/run-pass/issue-2904.rs index 1dc1587ff2fd6..f87eb46d553b7 100644 --- a/src/test/run-pass/issue-2904.rs +++ b/src/test/run-pass/issue-2904.rs @@ -63,7 +63,7 @@ fn read_board_grid(mut input: rdr) -> Vec> { let mut input: &mut io::Reader = &mut input; let mut grid = Vec::new(); - let mut line = [0, ..10]; + let mut line = [0; 10]; input.read(&mut line); let mut row = Vec::new(); for c in line.iter() { diff --git a/src/test/run-pass/issue-3656.rs b/src/test/run-pass/issue-3656.rs index 53157ce7546ff..8a39676ca17ac 100644 --- a/src/test/run-pass/issue-3656.rs +++ b/src/test/run-pass/issue-3656.rs @@ -16,7 +16,7 @@ extern crate libc; use libc::{c_uint, uint32_t, c_void}; pub struct KEYGEN { - hash_algorithm: [c_uint, ..2], + hash_algorithm: [c_uint; 2], count: uint32_t, salt: *const c_void, salt_size: uint32_t, diff --git a/src/test/run-pass/issue-4387.rs b/src/test/run-pass/issue-4387.rs index 447bf3b4b267e..43948ef4a45c5 100644 --- a/src/test/run-pass/issue-4387.rs +++ b/src/test/run-pass/issue-4387.rs @@ -9,5 +9,5 @@ // except according to those terms. pub fn main() { - let _foo = [0i, ..2*4]; + let _foo = [0i; 2*4]; } diff --git a/src/test/run-pass/issue-5688.rs b/src/test/run-pass/issue-5688.rs index 0a13e001fabf5..7c8940aafbfbe 100644 --- a/src/test/run-pass/issue-5688.rs +++ b/src/test/run-pass/issue-5688.rs @@ -13,7 +13,7 @@ ...should print &[1, 2, 3] but instead prints something like &[4492532864, 24]. It is pretty evident that the compiler messed up -with the representation of [int, ..n] and [int] somehow, or at least +with the representation of [int; n] and [int] somehow, or at least failed to typecheck correctly. */ diff --git a/src/test/run-pass/issue-7784.rs b/src/test/run-pass/issue-7784.rs index 666847517efde..b936eb322fc5f 100644 --- a/src/test/run-pass/issue-7784.rs +++ b/src/test/run-pass/issue-7784.rs @@ -10,10 +10,10 @@ #![feature(advanced_slice_patterns)] -fn foo + Clone>([x, y, z]: [T, ..3]) -> (T, T, T) { +fn foo + Clone>([x, y, z]: [T; 3]) -> (T, T, T) { (x.clone(), x.clone() + y.clone(), x + y + z) } -fn bar(a: &'static str, b: &'static str) -> [&'static str, ..4] { +fn bar(a: &'static str, b: &'static str) -> [&'static str; 4] { [a, b, b, a] } diff --git a/src/test/run-pass/issue-9942.rs b/src/test/run-pass/issue-9942.rs index b9410ffdb43ec..321e22cd19c92 100644 --- a/src/test/run-pass/issue-9942.rs +++ b/src/test/run-pass/issue-9942.rs @@ -9,5 +9,5 @@ // except according to those terms. pub fn main() { - const S: uint = 23 as uint; [0i, ..S]; () + const S: uint = 23 as uint; [0i; S]; () } diff --git a/src/test/run-pass/macro-invocation-in-count-expr-fixed-array-type.rs b/src/test/run-pass/macro-invocation-in-count-expr-fixed-array-type.rs index 4c124d85eee3a..ecd7c0458f701 100644 --- a/src/test/run-pass/macro-invocation-in-count-expr-fixed-array-type.rs +++ b/src/test/run-pass/macro-invocation-in-count-expr-fixed-array-type.rs @@ -15,5 +15,5 @@ macro_rules! four ( ); fn main() { - let _x: [u16, ..four!()]; + let _x: [u16; four!()]; } diff --git a/src/test/run-pass/match-arm-statics.rs b/src/test/run-pass/match-arm-statics.rs index 400aab64b4cdb..db512adc01164 100644 --- a/src/test/run-pass/match-arm-statics.rs +++ b/src/test/run-pass/match-arm-statics.rs @@ -64,7 +64,7 @@ fn issue_6533() { } fn issue_13626() { - const VAL: [u8, ..1] = [0]; + const VAL: [u8; 1] = [0]; match [1] { VAL => unreachable!(), _ => () diff --git a/src/test/run-pass/method-mut-self-modifies-mut-slice-lvalue.rs b/src/test/run-pass/method-mut-self-modifies-mut-slice-lvalue.rs index 00319d57f8da6..9ae7f49c75a15 100644 --- a/src/test/run-pass/method-mut-self-modifies-mut-slice-lvalue.rs +++ b/src/test/run-pass/method-mut-self-modifies-mut-slice-lvalue.rs @@ -38,7 +38,7 @@ impl<'a> MyWriter for &'a mut [u8] { } fn main() { - let mut buf = [0_u8, .. 6]; + let mut buf = [0_u8; 6]; { let mut writer = buf.as_mut_slice(); diff --git a/src/test/run-pass/method-two-traits-distinguished-via-where-clause.rs b/src/test/run-pass/method-two-traits-distinguished-via-where-clause.rs index 986236fb6f9fd..fbecb6851b687 100644 --- a/src/test/run-pass/method-two-traits-distinguished-via-where-clause.rs +++ b/src/test/run-pass/method-two-traits-distinguished-via-where-clause.rs @@ -28,7 +28,7 @@ impl B for *const [T] { } fn main() { - let x: [int, ..4] = [1,2,3,4]; + let x: [int; 4] = [1,2,3,4]; let xptr = x.as_slice() as *const _; xptr.foo(); } diff --git a/src/test/run-pass/mutability-inherits-through-fixed-length-vec.rs b/src/test/run-pass/mutability-inherits-through-fixed-length-vec.rs index ef0bc75c32661..bf926a6c48ace 100644 --- a/src/test/run-pass/mutability-inherits-through-fixed-length-vec.rs +++ b/src/test/run-pass/mutability-inherits-through-fixed-length-vec.rs @@ -9,13 +9,13 @@ // except according to those terms. fn test1() { - let mut ints = [0i, ..32]; + let mut ints = [0i; 32]; ints[0] += 1; assert_eq!(ints[0], 1); } fn test2() { - let mut ints = [0i, ..32]; + let mut ints = [0i; 32]; for i in ints.iter_mut() { *i += 22; } for i in ints.iter() { assert!(*i == 22); } } diff --git a/src/test/run-pass/new-style-fixed-length-vec.rs b/src/test/run-pass/new-style-fixed-length-vec.rs index a689fb0cf7c2b..e06461daed0c9 100644 --- a/src/test/run-pass/new-style-fixed-length-vec.rs +++ b/src/test/run-pass/new-style-fixed-length-vec.rs @@ -8,7 +8,7 @@ // option. This file may not be copied, modified, or distributed // except according to those terms. -static FOO: [int, ..3] = [1, 2, 3]; +static FOO: [int; 3] = [1, 2, 3]; pub fn main() { println!("{} {} {}", FOO[0], FOO[1], FOO[2]); diff --git a/src/test/run-pass/nullable-pointer-iotareduction.rs b/src/test/run-pass/nullable-pointer-iotareduction.rs index da1ad094df6a0..2660de619e9c7 100644 --- a/src/test/run-pass/nullable-pointer-iotareduction.rs +++ b/src/test/run-pass/nullable-pointer-iotareduction.rs @@ -20,7 +20,7 @@ use std::{option, mem}; // trying to get assert failure messages that at least identify which case // failed. -enum E { Thing(int, T), Nothing((), ((), ()), [i8, ..0]) } +enum E { Thing(int, T), Nothing((), ((), ()), [i8; 0]) } impl E { fn is_none(&self) -> bool { match *self { @@ -54,7 +54,7 @@ macro_rules! check_fancy { check_fancy!($e: $T, |ptr| assert!(*ptr == $e)); }}; ($e:expr: $T:ty, |$v:ident| $chk:expr) => {{ - assert!(E::Nothing::<$T>((), ((), ()), [23i8, ..0]).is_none()); + assert!(E::Nothing::<$T>((), ((), ()), [23i8; 0]).is_none()); let e = $e; let t_ = E::Thing::<$T>(23, e); match t_.get_ref() { diff --git a/src/test/run-pass/nullable-pointer-size.rs b/src/test/run-pass/nullable-pointer-size.rs index 5708310abadbf..afc22be38b8a4 100644 --- a/src/test/run-pass/nullable-pointer-size.rs +++ b/src/test/run-pass/nullable-pointer-size.rs @@ -12,7 +12,7 @@ use std::mem; -enum E { Thing(int, T), Nothing((), ((), ()), [i8, ..0]) } +enum E { Thing(int, T), Nothing((), ((), ()), [i8; 0]) } struct S(int, T); // These are macros so we get useful assert messages. diff --git a/src/test/run-pass/order-drop-with-match.rs b/src/test/run-pass/order-drop-with-match.rs index 9a76beac9e5f7..a866be43a0514 100644 --- a/src/test/run-pass/order-drop-with-match.rs +++ b/src/test/run-pass/order-drop-with-match.rs @@ -14,7 +14,7 @@ // in ORDER matching up to when it ran. // Correct order is: matched, inner, outer -static mut ORDER: [uint, ..3] = [0, 0, 0]; +static mut ORDER: [uint; 3] = [0, 0, 0]; static mut INDEX: uint = 0; struct A; diff --git a/src/test/run-pass/out-of-stack-new-thread-no-split.rs b/src/test/run-pass/out-of-stack-new-thread-no-split.rs index 419d9b5d824b1..674d0dc86da1c 100644 --- a/src/test/run-pass/out-of-stack-new-thread-no-split.rs +++ b/src/test/run-pass/out-of-stack-new-thread-no-split.rs @@ -27,7 +27,7 @@ pub fn black_box(dummy: T) { unsafe { asm!("" : : "r"(&dummy)) } } #[no_stack_check] fn recurse() { - let buf = [0i, ..10]; + let buf = [0i; 10]; black_box(buf); recurse(); } diff --git a/src/test/run-pass/out-of-stack-no-split.rs b/src/test/run-pass/out-of-stack-no-split.rs index ecb93cc6f8c7e..79926776abf4c 100644 --- a/src/test/run-pass/out-of-stack-no-split.rs +++ b/src/test/run-pass/out-of-stack-no-split.rs @@ -28,7 +28,7 @@ pub fn black_box(dummy: T) { unsafe { asm!("" : : "r"(&dummy)) } } #[no_stack_check] fn recurse() { - let buf = [0i, ..10]; + let buf = [0i; 10]; black_box(buf); recurse(); } diff --git a/src/test/run-pass/out-of-stack.rs b/src/test/run-pass/out-of-stack.rs index 81e75ba2cd53d..1594cca89e55b 100644 --- a/src/test/run-pass/out-of-stack.rs +++ b/src/test/run-pass/out-of-stack.rs @@ -22,7 +22,7 @@ use std::os; pub fn black_box(dummy: T) { unsafe { asm!("" : : "r"(&dummy)) } } fn silent_recurse() { - let buf = [0i, ..1000]; + let buf = [0i; 1000]; black_box(buf); silent_recurse(); } diff --git a/src/test/run-pass/packed-struct-generic-layout.rs b/src/test/run-pass/packed-struct-generic-layout.rs index 999e4aeeb5977..004a30220183e 100644 --- a/src/test/run-pass/packed-struct-generic-layout.rs +++ b/src/test/run-pass/packed-struct-generic-layout.rs @@ -20,7 +20,7 @@ struct S { pub fn main() { unsafe { let s = S { a: 0xff_ff_ff_ffu32, b: 1, c: 0xaa_aa_aa_aa as i32 }; - let transd : [u8, .. 9] = mem::transmute(s); + let transd : [u8; 9] = mem::transmute(s); // Don't worry about endianness, the numbers are palindromic. assert!(transd == [0xff, 0xff, 0xff, 0xff, @@ -29,7 +29,7 @@ pub fn main() { let s = S { a: 1u8, b: 2u8, c: 0b10000001_10000001 as i16}; - let transd : [u8, .. 4] = mem::transmute(s); + let transd : [u8; 4] = mem::transmute(s); // Again, no endianness problems. assert!(transd == [1, 2, 0b10000001, 0b10000001]); diff --git a/src/test/run-pass/packed-struct-layout.rs b/src/test/run-pass/packed-struct-layout.rs index b4fbf0820cd46..9e94502a92a63 100644 --- a/src/test/run-pass/packed-struct-layout.rs +++ b/src/test/run-pass/packed-struct-layout.rs @@ -13,7 +13,7 @@ use std::mem; #[repr(packed)] struct S4 { a: u8, - b: [u8, .. 3], + b: [u8; 3], } #[repr(packed)] @@ -25,11 +25,11 @@ struct S5 { pub fn main() { unsafe { let s4 = S4 { a: 1, b: [2,3,4] }; - let transd : [u8, .. 4] = mem::transmute(s4); + let transd : [u8; 4] = mem::transmute(s4); assert!(transd == [1, 2, 3, 4]); let s5 = S5 { a: 1, b: 0xff_00_00_ff }; - let transd : [u8, .. 5] = mem::transmute(s5); + let transd : [u8; 5] = mem::transmute(s5); // Don't worry about endianness, the u32 is palindromic. assert!(transd == [1, 0xff, 0, 0, 0xff]); } diff --git a/src/test/run-pass/packed-struct-size.rs b/src/test/run-pass/packed-struct-size.rs index 9472fd4ce38ed..846d51e2e7ecc 100644 --- a/src/test/run-pass/packed-struct-size.rs +++ b/src/test/run-pass/packed-struct-size.rs @@ -14,7 +14,7 @@ use std::mem; #[repr(packed)] struct S4 { a: u8, - b: [u8, .. 3], + b: [u8; 3], } #[repr(packed)] diff --git a/src/test/run-pass/packed-struct-vec.rs b/src/test/run-pass/packed-struct-vec.rs index 59bb5678b6936..d2121aa775216 100644 --- a/src/test/run-pass/packed-struct-vec.rs +++ b/src/test/run-pass/packed-struct-vec.rs @@ -22,9 +22,9 @@ struct Foo { impl Copy for Foo {} pub fn main() { - let foos = [Foo { bar: 1, baz: 2 }, .. 10]; + let foos = [Foo { bar: 1, baz: 2 }; 10]; - assert_eq!(mem::size_of::<[Foo, .. 10]>(), 90); + assert_eq!(mem::size_of::<[Foo; 10]>(), 90); for i in range(0u, 10) { assert_eq!(foos[i], Foo { bar: 1, baz: 2}); diff --git a/src/test/run-pass/packed-tuple-struct-layout.rs b/src/test/run-pass/packed-tuple-struct-layout.rs index 5fb43503ccb26..c41d678b0f5b7 100644 --- a/src/test/run-pass/packed-tuple-struct-layout.rs +++ b/src/test/run-pass/packed-tuple-struct-layout.rs @@ -11,7 +11,7 @@ use std::mem; #[repr(packed)] -struct S4(u8,[u8, .. 3]); +struct S4(u8,[u8; 3]); #[repr(packed)] struct S5(u8,u32); @@ -19,11 +19,11 @@ struct S5(u8,u32); pub fn main() { unsafe { let s4 = S4(1, [2,3,4]); - let transd : [u8, .. 4] = mem::transmute(s4); + let transd : [u8; 4] = mem::transmute(s4); assert!(transd == [1, 2, 3, 4]); let s5 = S5(1, 0xff_00_00_ff); - let transd : [u8, .. 5] = mem::transmute(s5); + let transd : [u8; 5] = mem::transmute(s5); // Don't worry about endianness, the u32 is palindromic. assert!(transd == [1, 0xff, 0, 0, 0xff]); } diff --git a/src/test/run-pass/packed-tuple-struct-size.rs b/src/test/run-pass/packed-tuple-struct-size.rs index 8967b07ca8823..a0b88ea53c5a4 100644 --- a/src/test/run-pass/packed-tuple-struct-size.rs +++ b/src/test/run-pass/packed-tuple-struct-size.rs @@ -12,7 +12,7 @@ use std::mem; #[repr(packed)] -struct S4(u8,[u8, .. 3]); +struct S4(u8,[u8; 3]); #[repr(packed)] struct S5(u8, u32); diff --git a/src/test/run-pass/regions-dependent-addr-of.rs b/src/test/run-pass/regions-dependent-addr-of.rs index 79f8ca48882cf..41396ef01bee8 100644 --- a/src/test/run-pass/regions-dependent-addr-of.rs +++ b/src/test/run-pass/regions-dependent-addr-of.rs @@ -18,7 +18,7 @@ struct A { struct B { v1: int, - v2: [int, ..3], + v2: [int; 3], v3: Vec , v4: C, v5: Box, diff --git a/src/test/run-pass/repeat-expr-in-static.rs b/src/test/run-pass/repeat-expr-in-static.rs index 9955673bb0b12..a53f1da4ce605 100644 --- a/src/test/run-pass/repeat-expr-in-static.rs +++ b/src/test/run-pass/repeat-expr-in-static.rs @@ -8,8 +8,8 @@ // option. This file may not be copied, modified, or distributed // except according to those terms. -static FOO: [int, ..4] = [32, ..4]; -static BAR: [int, ..4] = [32, 32, 32, 32]; +static FOO: [int; 4] = [32; 4]; +static BAR: [int; 4] = [32, 32, 32, 32]; pub fn main() { assert!(FOO == BAR); diff --git a/src/test/run-pass/repeated-vector-syntax.rs b/src/test/run-pass/repeated-vector-syntax.rs index 9c369c0d770c6..0781822cb7482 100644 --- a/src/test/run-pass/repeated-vector-syntax.rs +++ b/src/test/run-pass/repeated-vector-syntax.rs @@ -11,8 +11,8 @@ #![feature(slicing_syntax)] pub fn main() { - let x = [ [true], ..512 ]; - let y = [ 0i, ..1 ]; + let x = [ [true]; 512 ]; + let y = [ 0i; 1 ]; print!("["); for xi in x.iter() { diff --git a/src/test/run-pass/self-impl.rs b/src/test/run-pass/self-impl.rs new file mode 100644 index 0000000000000..3ece042aef053 --- /dev/null +++ b/src/test/run-pass/self-impl.rs @@ -0,0 +1,42 @@ +// Copyright 2014 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +// Test that we can use `Self` types in impls in the expected way. + +struct Foo; + +// Test uses on inherant impl. +impl Foo { + fn foo(_x: Self, _y: &Self, _z: Box) -> Self { + Foo + } +} + +// Test uses when implementing a trait and with a type parameter. +pub struct Baz { + pub f: X, +} + +trait Bar { + fn bar(x: Self, y: &Self, z: Box) -> Self; +} + +impl Bar for Box> { + fn bar(_x: Self, _y: &Self, _z: Box) -> Self { + box Baz { f: 42 } + } +} + +fn main() { + let _: Foo = Foo::foo(Foo, &Foo, box Foo); + let _: Box> = Bar::bar(box Baz { f: 42 }, + &box Baz { f: 42 }, + box box Baz { f: 42 }); +} diff --git a/src/test/run-pass/uninit-empty-types.rs b/src/test/run-pass/uninit-empty-types.rs index 005205353fce6..c2bd738b8a46b 100644 --- a/src/test/run-pass/uninit-empty-types.rs +++ b/src/test/run-pass/uninit-empty-types.rs @@ -18,6 +18,6 @@ struct Foo; pub fn main() { unsafe { let _x: Foo = mem::uninitialized(); - let _x: [Foo, ..2] = mem::uninitialized(); + let _x: [Foo; 2] = mem::uninitialized(); } } diff --git a/src/test/run-pass/unsized3.rs b/src/test/run-pass/unsized3.rs index e5e6ce6e76bb6..271f5817c9e79 100644 --- a/src/test/run-pass/unsized3.rs +++ b/src/test/run-pass/unsized3.rs @@ -60,7 +60,7 @@ pub fn main() { unsafe { struct Foo_ { - f: [T, ..3] + f: [T; 3] } let data = box Foo_{f: [1i32, 2, 3] }; @@ -72,7 +72,7 @@ pub fn main() { struct Baz_ { f1: uint, - f2: [u8, ..5], + f2: [u8; 5], } let data = box Baz_{ f1: 42, f2: ['a' as u8, 'b' as u8, 'c' as u8, 'd' as u8, 'e' as u8] }; diff --git a/src/test/run-pass/variadic-ffi.rs b/src/test/run-pass/variadic-ffi.rs index aa71de2123c7d..f8eef98856114 100644 --- a/src/test/run-pass/variadic-ffi.rs +++ b/src/test/run-pass/variadic-ffi.rs @@ -19,7 +19,7 @@ extern { } unsafe fn check(expected: &str, f: |*mut c_char| -> T) { - let mut x = [0 as c_char, ..50]; + let mut x = [0 as c_char; 50]; f(&mut x[0] as *mut c_char); let res = CString::new(&x[0], false); assert_eq!(expected, res.as_str().unwrap()); diff --git a/src/test/run-pass/vec-dst.rs b/src/test/run-pass/vec-dst.rs index d8bf0a5c627f6..4a36231e72b0a 100644 --- a/src/test/run-pass/vec-dst.rs +++ b/src/test/run-pass/vec-dst.rs @@ -9,9 +9,9 @@ // except according to those terms. pub fn main() { - // Tests for indexing into box/& [T, ..n] - let x: [int, ..3] = [1, 2, 3]; - let mut x: Box<[int, ..3]> = box x; + // Tests for indexing into box/& [T; n] + let x: [int; 3] = [1, 2, 3]; + let mut x: Box<[int; 3]> = box x; assert!(x[0] == 1); assert!(x[1] == 2); assert!(x[2] == 3); @@ -20,8 +20,8 @@ pub fn main() { assert!(x[1] == 45); assert!(x[2] == 3); - let mut x: [int, ..3] = [1, 2, 3]; - let x: &mut [int, ..3] = &mut x; + let mut x: [int; 3] = [1, 2, 3]; + let x: &mut [int; 3] = &mut x; assert!(x[0] == 1); assert!(x[1] == 2); assert!(x[2] == 3); diff --git a/src/test/run-pass/vec-fixed-length.rs b/src/test/run-pass/vec-fixed-length.rs index 05a7388b5e2c4..20e1becd008a5 100644 --- a/src/test/run-pass/vec-fixed-length.rs +++ b/src/test/run-pass/vec-fixed-length.rs @@ -11,17 +11,17 @@ use std::mem::size_of; pub fn main() { - let x: [int, ..4] = [1, 2, 3, 4]; + let x: [int; 4] = [1, 2, 3, 4]; assert_eq!(x[0], 1); assert_eq!(x[1], 2); assert_eq!(x[2], 3); assert_eq!(x[3], 4); - assert_eq!(size_of::<[u8, ..4]>(), 4u); + assert_eq!(size_of::<[u8; 4]>(), 4u); // FIXME #10183 // FIXME #18069 //if cfg!(target_word_size = "64") { - // assert_eq!(size_of::<[u8, ..(1 << 32)]>(), (1u << 32)); + // assert_eq!(size_of::<[u8; (1 << 32)]>(), (1u << 32)); //} } diff --git a/src/test/run-pass/vec-repeat-with-cast.rs b/src/test/run-pass/vec-repeat-with-cast.rs index 18ccd8c96ab6c..97a443cb3b8e0 100644 --- a/src/test/run-pass/vec-repeat-with-cast.rs +++ b/src/test/run-pass/vec-repeat-with-cast.rs @@ -8,4 +8,4 @@ // option. This file may not be copied, modified, or distributed // except according to those terms. -pub fn main() { let _a = [0i, ..1 as uint]; } +pub fn main() { let _a = [0i; 1 as uint]; } diff --git a/src/test/run-pass/vector-sort-panic-safe.rs b/src/test/run-pass/vector-sort-panic-safe.rs index c969e66957ccb..fe89c7532eebc 100644 --- a/src/test/run-pass/vector-sort-panic-safe.rs +++ b/src/test/run-pass/vector-sort-panic-safe.rs @@ -14,7 +14,7 @@ use std::rand::{task_rng, Rng, Rand}; const REPEATS: uint = 5; const MAX_LEN: uint = 32; -static drop_counts: [AtomicUint, .. MAX_LEN] = +static drop_counts: [AtomicUint; MAX_LEN] = // FIXME #5244: AtomicUint is not Copy. [ INIT_ATOMIC_UINT, INIT_ATOMIC_UINT, INIT_ATOMIC_UINT, INIT_ATOMIC_UINT, diff --git a/src/test/run-pass/where-clause-early-bound-lifetimes.rs b/src/test/run-pass/where-clause-early-bound-lifetimes.rs new file mode 100644 index 0000000000000..cade99b83a2fe --- /dev/null +++ b/src/test/run-pass/where-clause-early-bound-lifetimes.rs @@ -0,0 +1,23 @@ +// Copyright 2014 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +trait TheTrait { } + +impl TheTrait for &'static int { } + +fn foo<'a,T>(_: &'a T) where &'a T : TheTrait { } + +fn bar(_: &'static T) where &'static T : TheTrait { } + +fn main() { + static x: int = 1; + foo(&x); + bar(&x); +} diff --git a/src/test/run-pass/where-clause-method-substituion.rs b/src/test/run-pass/where-clause-method-substituion.rs new file mode 100644 index 0000000000000..b391df8500bb7 --- /dev/null +++ b/src/test/run-pass/where-clause-method-substituion.rs @@ -0,0 +1,30 @@ +// Copyright 2014 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +trait Foo {} + +trait Bar { + fn method(&self) where A: Foo; +} + +struct S; +struct X; + +impl Foo for X {} + +impl Bar for int { + fn method(&self) where X: Foo { + } +} + +fn main() { + 1.method::(); +} + diff --git a/src/test/run-pass/where-clause-region-outlives.rs b/src/test/run-pass/where-clause-region-outlives.rs new file mode 100644 index 0000000000000..1ecb4b6c2dc8f --- /dev/null +++ b/src/test/run-pass/where-clause-region-outlives.rs @@ -0,0 +1,17 @@ +// Copyright 2014 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +struct A<'a, 'b> where 'a : 'b { x: &'a int, y: &'b int } + +fn main() { + let x = 1i; + let y = 1i; + let a = A { x: &x, y: &y }; +} diff --git a/src/test/run-pass/where-clauses-method.rs b/src/test/run-pass/where-clauses-method.rs new file mode 100644 index 0000000000000..2b87bcd4b3979 --- /dev/null +++ b/src/test/run-pass/where-clauses-method.rs @@ -0,0 +1,29 @@ +// Copyright 2014 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +// Test that a where clause attached to a method allows us to add +// additional constraints to a parameter out of scope. + +struct Foo { + value: T +} + +impl Foo { + fn equals(&self, u: &Foo) -> bool where T : Eq { + self.value == u.value + } +} + +fn main() { + let x = Foo { value: 1i }; + let y = Foo { value: 2i }; + println!("{}", x.equals(&x)); + println!("{}", x.equals(&y)); +} diff --git a/src/test/compile-fail/trailing-comma-array-repeat.rs b/src/test/run-pass/where-clauses-not-parameter.rs similarity index 85% rename from src/test/compile-fail/trailing-comma-array-repeat.rs rename to src/test/run-pass/where-clauses-not-parameter.rs index dadd657158384..bc5fc388ca1ea 100644 --- a/src/test/compile-fail/trailing-comma-array-repeat.rs +++ b/src/test/run-pass/where-clauses-not-parameter.rs @@ -8,6 +8,10 @@ // option. This file may not be copied, modified, or distributed // except according to those terms. +fn equal(_: &T, _: &T) -> bool where int : Eq { + true +} + fn main() { - let [_, ..,] = [(), ()]; //~ ERROR unexpected token: `]` + equal(&0i, &0i); }