aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--.github/actions/github-release/Dockerfile8
-rw-r--r--.github/actions/github-release/README.md21
-rw-r--r--.github/actions/github-release/action.yml15
-rw-r--r--.github/actions/github-release/main.js117
-rw-r--r--.github/actions/github-release/package.json10
-rw-r--r--.github/workflows/ci.yaml149
-rw-r--r--.github/workflows/release.yaml265
-rw-r--r--.github/workflows/rustdoc.yaml42
-rw-r--r--.gitignore1
-rw-r--r--Cargo.lock12
-rw-r--r--crates/ra_assists/src/handlers/inline_local_variable.rs29
-rw-r--r--crates/ra_assists/src/marks.rs5
-rw-r--r--crates/ra_hir/src/lib.rs3
-rw-r--r--crates/ra_hir/src/semantics.rs25
-rw-r--r--crates/ra_hir/src/source_analyzer.rs88
-rw-r--r--docs/user/features.md7
-rw-r--r--xtask/src/dist.rs79
-rw-r--r--xtask/src/install.rs6
-rw-r--r--xtask/src/lib.rs2
-rw-r--r--xtask/src/main.rs15
-rw-r--r--xtask/src/not_bash.rs17
21 files changed, 529 insertions, 387 deletions
diff --git a/.github/actions/github-release/Dockerfile b/.github/actions/github-release/Dockerfile
new file mode 100644
index 000000000..5849eac7d
--- /dev/null
+++ b/.github/actions/github-release/Dockerfile
@@ -0,0 +1,8 @@
1FROM node:slim
2
3COPY . /action
4WORKDIR /action
5
6RUN npm install --production
7
8ENTRYPOINT ["node", "/action/main.js"]
diff --git a/.github/actions/github-release/README.md b/.github/actions/github-release/README.md
new file mode 100644
index 000000000..7b50d0020
--- /dev/null
+++ b/.github/actions/github-release/README.md
@@ -0,0 +1,21 @@
1# github-release
2
3Copy-pasted from
4https://github.com/bytecodealliance/wasmtime/tree/8acfdbdd8aa550d1b84e0ce1e6222a6605d14e38/.github/actions/github-release
5
6An action used to publish GitHub releases for `wasmtime`.
7
8As of the time of this writing there's a few actions floating around which
9perform github releases but they all tend to have their set of drawbacks.
10Additionally nothing handles deleting releases which we need for our rolling
11`dev` release.
12
13To handle all this this action rolls-its-own implementation using the
14actions/toolkit repository and packages published there. These run in a Docker
15container and take various inputs to orchestrate the release from the build.
16
17More comments can be found in `main.js`.
18
19Testing this is really hard. If you want to try though run `npm install` and
20then `node main.js`. You'll have to configure a bunch of env vars though to get
21anything reasonably working.
diff --git a/.github/actions/github-release/action.yml b/.github/actions/github-release/action.yml
new file mode 100644
index 000000000..51a074adf
--- /dev/null
+++ b/.github/actions/github-release/action.yml
@@ -0,0 +1,15 @@
1name: 'wasmtime github releases'
2description: 'wasmtime github releases'
3inputs:
4 token:
5 description: ''
6 required: true
7 name:
8 description: ''
9 required: true
10 files:
11 description: ''
12 required: true
13runs:
14 using: 'docker'
15 image: 'Dockerfile'
diff --git a/.github/actions/github-release/main.js b/.github/actions/github-release/main.js
new file mode 100644
index 000000000..295c02626
--- /dev/null
+++ b/.github/actions/github-release/main.js
@@ -0,0 +1,117 @@
1const core = require('@actions/core');
2const path = require("path");
3const fs = require("fs");
4const github = require('@actions/github');
5const glob = require('glob');
6
7function sleep(milliseconds) {
8 return new Promise(resolve => setTimeout(resolve, milliseconds))
9}
10
11async function runOnce() {
12 // Load all our inputs and env vars. Note that `getInput` reads from `INPUT_*`
13 const files = core.getInput('files');
14 const name = core.getInput('name');
15 const token = core.getInput('token');
16 const slug = process.env.GITHUB_REPOSITORY;
17 const owner = slug.split('/')[0];
18 const repo = slug.split('/')[1];
19 const sha = process.env.GITHUB_SHA;
20
21 core.info(`files: ${files}`);
22 core.info(`name: ${name}`);
23 core.info(`token: ${token}`);
24
25 const octokit = new github.GitHub(token);
26
27 // Delete the previous release since we can't overwrite one. This may happen
28 // due to retrying an upload or it may happen because we're doing the dev
29 // release.
30 const releases = await octokit.paginate("GET /repos/:owner/:repo/releases", { owner, repo });
31 for (const release of releases) {
32 if (release.tag_name !== name) {
33 continue;
34 }
35 const release_id = release.id;
36 core.info(`deleting release ${release_id}`);
37 await octokit.repos.deleteRelease({ owner, repo, release_id });
38 }
39
40 // We also need to update the `dev` tag while we're at it on the `dev` branch.
41 if (name == 'nightly') {
42 try {
43 core.info(`updating nightly tag`);
44 await octokit.git.updateRef({
45 owner,
46 repo,
47 ref: 'tags/nightly',
48 sha,
49 force: true,
50 });
51 } catch (e) {
52 console.log("ERROR: ", JSON.stringify(e, null, 2));
53 core.info(`creating nightly tag`);
54 await octokit.git.createTag({
55 owner,
56 repo,
57 tag: 'nightly',
58 message: 'nightly release',
59 object: sha,
60 type: 'commit',
61 });
62 }
63 }
64
65 // Creates an official GitHub release for this `tag`, and if this is `dev`
66 // then we know that from the previous block this should be a fresh release.
67 core.info(`creating a release`);
68 const release = await octokit.repos.createRelease({
69 owner,
70 repo,
71 tag_name: name,
72 prerelease: name === 'nightly',
73 });
74
75 // Upload all the relevant assets for this release as just general blobs.
76 for (const file of glob.sync(files)) {
77 const size = fs.statSync(file).size;
78 core.info(`upload ${file}`);
79 await octokit.repos.uploadReleaseAsset({
80 data: fs.createReadStream(file),
81 headers: { 'content-length': size, 'content-type': 'application/octet-stream' },
82 name: path.basename(file),
83 url: release.data.upload_url,
84 });
85 }
86}
87
88async function run() {
89 const retries = 10;
90 for (let i = 0; i < retries; i++) {
91 try {
92 await runOnce();
93 break;
94 } catch (e) {
95 if (i === retries - 1)
96 throw e;
97 logError(e);
98 console.log("RETRYING after 10s");
99 await sleep(10000)
100 }
101 }
102}
103
104function logError(e) {
105 console.log("ERROR: ", e.message);
106 try {
107 console.log(JSON.stringify(e, null, 2));
108 } catch (e) {
109 // ignore json errors for now
110 }
111 console.log(e.stack);
112}
113
114run().catch(err => {
115 logError(err);
116 core.setFailed(err.message);
117});
diff --git a/.github/actions/github-release/package.json b/.github/actions/github-release/package.json
new file mode 100644
index 000000000..abfc55f6f
--- /dev/null
+++ b/.github/actions/github-release/package.json
@@ -0,0 +1,10 @@
1{
2 "name": "wasmtime-github-release",
3 "version": "0.0.0",
4 "main": "main.js",
5 "dependencies": {
6 "@actions/core": "^1.0.0",
7 "@actions/github": "^1.0.0",
8 "glob": "^7.1.5"
9 }
10}
diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml
index 50c4265cf..633015956 100644
--- a/.github/workflows/ci.yaml
+++ b/.github/workflows/ci.yaml
@@ -3,28 +3,30 @@ on:
3 pull_request: 3 pull_request:
4 push: 4 push:
5 branches: 5 branches:
6 - master 6 - master
7 - staging 7 - staging
8 - trying 8 - trying
9 9
10jobs: 10jobs:
11 rust-audit: 11 rust-audit:
12 name: Audit Rust vulnerabilities 12 name: Audit Rust vulnerabilities
13 runs-on: ubuntu-latest 13 runs-on: ubuntu-latest
14 steps: 14 steps:
15 - name: Checkout repository 15 - name: Checkout repository
16 uses: actions/checkout@v1 16 uses: actions/checkout@v1
17 17
18 - run: cargo install cargo-audit 18 - run: cargo install cargo-audit
19 - run: cargo audit 19 - run: cargo audit
20 20
21 rust: 21 rust:
22 name: Rust 22 name: Rust
23 runs-on: ${{ matrix.os }} 23 runs-on: ${{ matrix.os }}
24
24 strategy: 25 strategy:
25 fail-fast: false 26 fail-fast: false
26 matrix: 27 matrix:
27 os: [ubuntu-latest, windows-latest, macos-latest] 28 os: [ubuntu-latest, windows-latest, macos-latest]
29
28 env: 30 env:
29 RUSTFLAGS: -D warnings 31 RUSTFLAGS: -D warnings
30 CC: deny_c 32 CC: deny_c
@@ -32,62 +34,57 @@ jobs:
32 RUN_SLOW_TESTS: 1 34 RUN_SLOW_TESTS: 1
33 RUSTUP_MAX_RETRIES: 10 35 RUSTUP_MAX_RETRIES: 10
34 CARGO_NET_RETRY: 10 36 CARGO_NET_RETRY: 10
35 steps:
36 37
37 - name: Checkout repository 38 steps:
38 uses: actions/checkout@v1 39 - name: Checkout repository
39 40 uses: actions/checkout@v1
40 # We need to disable the existing toolchain to avoid updating rust-docs 41
41 # which takes a long time. The fastest way to do this is to rename the 42 # We need to disable the existing toolchain to avoid updating rust-docs
42 # existing folder, as deleting it takes about as much time as not doing 43 # which takes a long time. The fastest way to do this is to rename the
43 # anything and just updating rust-docs. 44 # existing folder, as deleting it takes about as much time as not doing
44 - name: Rename existing rust toolchain (Windows) 45 # anything and just updating rust-docs.
45 if: matrix.os == 'windows-latest' 46 - name: Rename existing rust toolchain (Windows)
46 run: Rename-Item C:\Users\runneradmin\.rustup\toolchains\stable-x86_64-pc-windows-msvc C:\Users\runneradmin\.rustup\toolchains\stable-x86_64-pc-windows-msvc.old 47 if: matrix.os == 'windows-latest'
47 48 run: Rename-Item C:\Users\runneradmin\.rustup\toolchains\stable-x86_64-pc-windows-msvc C:\Users\runneradmin\.rustup\toolchains\stable-x86_64-pc-windows-msvc.old
48 - name: Install Rust toolchain 49
49 uses: actions-rs/toolchain@v1 50 - name: Install Rust toolchain
50 with: 51 uses: actions-rs/toolchain@v1
51 toolchain: stable 52 with:
52 profile: minimal 53 toolchain: stable
53 override: true 54 profile: minimal
54 components: rustfmt, rust-src 55 override: true
55 56 components: rustfmt, rust-src
56 - name: Cache cargo registry 57
57 uses: actions/cache@v1 58 - name: Cache cargo registry
58 with: 59 uses: actions/cache@v1
59 path: ~/.cargo/registry 60 with:
60 key: ${{ runner.os }}-cargo-registry-${{ hashFiles('**/Cargo.lock') }} 61 path: ~/.cargo/registry
61 62 key: ${{ runner.os }}-cargo-registry-${{ hashFiles('**/Cargo.lock') }}
62 - name: Cache cargo index 63
63 uses: actions/cache@v1 64 - name: Cache cargo index
64 with: 65 uses: actions/cache@v1
65 path: ~/.cargo/git 66 with:
66 key: ${{ runner.os }}-cargo-index-${{ hashFiles('**/Cargo.lock') }} 67 path: ~/.cargo/git
67 68 key: ${{ runner.os }}-cargo-index-${{ hashFiles('**/Cargo.lock') }}
68 - name: Cache cargo target dir 69
69 uses: actions/cache@v1 70 - name: Cache cargo target dir
70 with: 71 uses: actions/cache@v1
71 path: target 72 with:
72 key: ${{ runner.os }}-cargo-build-target-${{ hashFiles('**/Cargo.lock') }} 73 path: target
73 74 key: ${{ runner.os }}-cargo-build-target-${{ hashFiles('**/Cargo.lock') }}
74 - name: Compile 75
75 uses: actions-rs/cargo@v1 76 - name: Compile
76 with: 77 run: cargo test --no-run
77 command: test 78
78 args: --no-run 79 - name: Test
79 80 run: cargo test
80 - name: Test 81
81 uses: actions-rs/cargo@v1 82 - name: Prepare cache
82 with: 83 run: cargo xtask pre-cache
83 command: test 84
84 85 - name: Prepare cache 2
85 - name: Prepare cache 86 if: matrix.os == 'windows-latest'
86 run: cargo xtask pre-cache 87 run: Remove-Item ./target/debug/xtask.exe, ./target/debug/deps/xtask.exe
87
88 - name: Prepare cache 2
89 if: matrix.os == 'windows-latest'
90 run: Remove-Item ./target/debug/xtask.exe, ./target/debug/deps/xtask.exe
91 88
92 typescript: 89 typescript:
93 name: TypeScript 90 name: TypeScript
@@ -96,22 +93,22 @@ jobs:
96 CXX: g++-4.9 93 CXX: g++-4.9
97 CC: gcc-4.9 94 CC: gcc-4.9
98 steps: 95 steps:
99 - name: Checkout repository 96 - name: Checkout repository
100 uses: actions/checkout@v1 97 uses: actions/checkout@v1
101 98
102 - name: Install Nodejs 99 - name: Install Nodejs
103 uses: actions/setup-node@v1 100 uses: actions/setup-node@v1
104 with: 101 with:
105 node-version: 12.x 102 node-version: 12.x
106 103
107 - run: npm ci 104 - run: npm ci
108 working-directory: ./editors/code 105 working-directory: ./editors/code
109 106
110 - run: npm audit 107 - run: npm audit
111 working-directory: ./editors/code 108 working-directory: ./editors/code
112 109
113 - run: npm run lint 110 - run: npm run lint
114 working-directory: ./editors/code 111 working-directory: ./editors/code
115 112
116 - run: npm run package --scripts-prepend-node-path 113 - run: npm run package --scripts-prepend-node-path
117 working-directory: ./editors/code 114 working-directory: ./editors/code
diff --git a/.github/workflows/release.yaml b/.github/workflows/release.yaml
index a697c0071..8100c04ef 100644
--- a/.github/workflows/release.yaml
+++ b/.github/workflows/release.yaml
@@ -1,193 +1,110 @@
1name: release 1name: release
2on: 2on:
3 schedule:
4 - cron: '*/15 * * * *' # midnight UTC
5
3 push: 6 push:
4 branches: 7 branches:
5 - release 8 - release
6 9
7jobs: 10jobs:
8 build-server: 11 dist:
9 name: build-server 12 name: dist
10 runs-on: ${{ matrix.os }} 13 runs-on: ${{ matrix.os }}
11 strategy: 14 strategy:
12 matrix: 15 matrix:
13 os: [ubuntu-latest, windows-latest, macos-latest] 16 os: [ubuntu-latest, windows-latest, macos-latest]
17
14 env: 18 env:
15 RUSTFLAGS: -D warnings 19 RUSTFLAGS: -D warnings
16 CARGO_INCREMENTAL: 0 20 CARGO_INCREMENTAL: 0
17 RUSTUP_MAX_RETRIES: 10 21 RUSTUP_MAX_RETRIES: 10
18 CARGO_NET_RETRY: 10 22 CARGO_NET_RETRY: 10
19 steps:
20 23
21 - name: Checkout repository
22 uses: actions/checkout@v1
23
24 # We need to disable the existing toolchain to avoid updating rust-docs
25 # which takes a long time. The fastest way to do this is to rename the
26 # existing folder, as deleting it takes about as much time as not doing
27 # anything and just updating rust-docs.
28 - name: Rename existing rust toolchain (Windows)
29 if: matrix.os == 'windows-latest'
30 run: Rename-Item C:\Users\runneradmin\.rustup\toolchains\stable-x86_64-pc-windows-msvc C:\Users\runneradmin\.rustup\toolchains\stable-x86_64-pc-windows-msvc.old
31
32 - name: Install Rust toolchain
33 uses: actions-rs/toolchain@v1
34 with:
35 toolchain: stable
36 profile: minimal
37 target: x86_64-unknown-linux-musl
38 override: true
39
40 - name: Build
41 if: matrix.os == 'ubuntu-latest'
42 uses: actions-rs/cargo@v1
43 env:
44 CC: clang
45 with:
46 command: build
47 args: --package rust-analyzer --bin rust-analyzer --release --target x86_64-unknown-linux-musl
48
49 - name: Build
50 if: matrix.os != 'ubuntu-latest'
51 uses: actions-rs/cargo@v1
52 with:
53 command: build
54 args: --package rust-analyzer --bin rust-analyzer --release
55
56 - name: Create distribution dir
57 run: mkdir ./dist
58
59 - name: Copy binary
60 if: matrix.os == 'ubuntu-latest'
61 run: cp ./target/x86_64-unknown-linux-musl/release/rust-analyzer ./dist/rust-analyzer-linux && strip ./dist/rust-analyzer-linux
62
63 - name: Copy binary
64 if: matrix.os == 'macos-latest'
65 run: cp ./target/release/rust-analyzer ./dist/rust-analyzer-mac
66
67 - name: Copy binary
68 if: matrix.os == 'windows-latest'
69 run: copy ./target/release/rust-analyzer.exe ./dist/rust-analyzer-windows.exe
70
71 - name: Upload artifacts
72 uses: actions/upload-artifact@v1
73 with:
74 name: server-${{ matrix.os }}
75 path: ./dist
76
77 build-clients:
78 name: build-clients
79 runs-on: ubuntu-latest
80 steps: 24 steps:
81 - name: Checkout repository 25 - name: Checkout repository
82 uses: actions/checkout@v1 26 uses: actions/checkout@v1
83 27
84 - name: Install Nodejs 28 # We need to disable the existing toolchain to avoid updating rust-docs
85 uses: actions/setup-node@v1 29 # which takes a long time. The fastest way to do this is to rename the
86 with: 30 # existing folder, as deleting it takes about as much time as not doing
87 node-version: 12.x 31 # anything and just updating rust-docs.
88 32 - name: Rename existing rust toolchain (Windows)
89 - run: npm ci 33 if: matrix.os == 'windows-latest'
90 working-directory: ./editors/code 34 run: Rename-Item C:\Users\runneradmin\.rustup\toolchains\stable-x86_64-pc-windows-msvc C:\Users\runneradmin\.rustup\toolchains\stable-x86_64-pc-windows-msvc.old
91 35
92 - run: npm run package --scripts-prepend-node-path 36 - name: Install Rust toolchain
93 working-directory: ./editors/code 37 uses: actions-rs/toolchain@v1
94 38 with:
95 - name: Copy vscode extension 39 toolchain: stable
96 run: mkdir -p ./dist/code && cp ./editors/code/rust-analyzer.vsix ./dist/ 40 profile: minimal
97 41 target: x86_64-unknown-linux-musl
98 - name: Upload artifacts 42 override: true
99 uses: actions/upload-artifact@v1 43
100 with: 44 - name: Install Nodejs
101 name: editor-plugins 45 uses: actions/setup-node@v1
102 path: ./dist 46 with:
103 47 node-version: 12.x
104 make-release: 48
105 name: make-release 49 - name: Dist
50 if: github.event_name == 'push'
51 run: cargo xtask dist
52
53 - name: Dist
54 if: github.event_name != 'push'
55 run: cargo xtask dist --nightly
56
57 - name: Upload artifacts
58 uses: actions/upload-artifact@v1
59 with:
60 name: dist-${{ matrix.os }}
61 path: ./dist
62
63 publish:
64 name: publish
106 runs-on: ubuntu-latest 65 runs-on: ubuntu-latest
107 needs: ['build-server', 'build-clients'] 66 needs: ['dist']
108 steps: 67 steps:
109 - name: Install Nodejs 68 - name: Install Nodejs
110 uses: actions/setup-node@v1 69 uses: actions/setup-node@v1
111 with: 70 with:
112 node-version: 12.x 71 node-version: 12.x
113 72
114 - run: echo "::set-env name=TAG::$(date --iso)" 73 - run: echo "::set-env name=TAG::$(date --iso)"
115 - run: 'echo "TAG: $TAG"' 74 if: github.event_name == 'push'
116 75 - run: echo "::set-env name=TAG::nightly"
117 - name: Checkout repository 76 if: github.event_name == 'schedule'
118 uses: actions/checkout@v1 77 - run: 'echo "TAG: $TAG"'
119 78
120 - uses: actions/download-artifact@v1 79 - name: Checkout repository
121 with: 80 uses: actions/checkout@v1
122 name: editor-plugins 81
123 path: dist 82 - uses: actions/download-artifact@v1
124 - uses: actions/download-artifact@v1 83 with:
125 with: 84 name: dist-macos-latest
126 name: server-macos-latest 85 path: dist
127 path: dist 86 - uses: actions/download-artifact@v1
128 - uses: actions/download-artifact@v1 87 with:
129 with: 88 name: dist-ubuntu-latest
130 name: server-ubuntu-latest 89 path: dist
131 path: dist 90 - uses: actions/download-artifact@v1
132 - uses: actions/download-artifact@v1 91 with:
133 with: 92 name: dist-windows-latest
134 name: server-windows-latest 93 path: dist
135 path: dist 94 - run: ls -all ./dist
136 - run: ls -all ./dist 95
137 96 - name: Publish Release
138 - name: Create Release 97 uses: ./.github/actions/github-release
139 id: create_release 98 with:
140 # uses: actions/create-release@v1 99 files: "dist/*"
141 # https://github.com/actions/create-release/pull/32 100 name: ${{ env.TAG }}
142 uses: fleskesvor/create-release@1a72e235c178bf2ae6c51a8ae36febc24568c5fe 101 token: ${{ secrets.GITHUB_TOKEN }}
143 env: 102
144 GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} 103 - run: npm ci
145 with: 104 working-directory: ./editors/code
146 tag_name: ${{ env.TAG }} 105
147 release_name: ${{ env.TAG }} 106 - name: Publish Extension
148 draft: false 107 if: github.event_name == 'push'
149 prerelease: false 108 working-directory: ./editors/code
150 109 # token from https://dev.azure.com/rust-analyzer/
151 - uses: actions/[email protected] 110 run: npx vsce publish 0.1.$(date +%Y%m%d) --pat ${{ secrets.MARKETPLACE_TOKEN }} --packagePath ../../dist/rust-analyzer.vsix
152 env:
153 GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
154 with:
155 upload_url: ${{ steps.create_release.outputs.upload_url }}
156 asset_path: ./dist/rust-analyzer-linux
157 asset_name: rust-analyzer-linux
158 asset_content_type: application/octet-stream
159
160 - uses: actions/[email protected]
161 env:
162 GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
163 with:
164 upload_url: ${{ steps.create_release.outputs.upload_url }}
165 asset_path: ./dist/rust-analyzer-mac
166 asset_name: rust-analyzer-mac
167 asset_content_type: application/octet-stream
168
169 - uses: actions/[email protected]
170 env:
171 GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
172 with:
173 upload_url: ${{ steps.create_release.outputs.upload_url }}
174 asset_path: ./dist/rust-analyzer-windows.exe
175 asset_name: rust-analyzer-windows.exe
176 asset_content_type: application/octet-stream
177
178 - uses: actions/[email protected]
179 env:
180 GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
181 with:
182 upload_url: ${{ steps.create_release.outputs.upload_url }}
183 asset_path: ./dist/rust-analyzer.vsix
184 asset_name: rust-analyzer.vsix
185 asset_content_type: application/octet-stream
186
187 - run: npm ci
188 working-directory: ./editors/code
189
190 - name: Publish Extension
191 working-directory: ./editors/code
192 # token from https://dev.azure.com/rust-analyzer/
193 run: npx vsce publish 0.1.$(date +%Y%m%d) --pat ${{ secrets.MARKETPLACE_TOKEN }}
diff --git a/.github/workflows/rustdoc.yaml b/.github/workflows/rustdoc.yaml
index caa1dcc30..e75e92695 100644
--- a/.github/workflows/rustdoc.yaml
+++ b/.github/workflows/rustdoc.yaml
@@ -2,7 +2,7 @@ name: rustdoc
2on: 2on:
3 push: 3 push:
4 branches: 4 branches:
5 - master 5 - master
6 6
7jobs: 7jobs:
8 rustdoc: 8 rustdoc:
@@ -12,28 +12,24 @@ jobs:
12 CARGO_INCREMENTAL: 0 12 CARGO_INCREMENTAL: 0
13 13
14 steps: 14 steps:
15 - name: Checkout repository 15 - name: Checkout repository
16 uses: actions/checkout@v1 16 uses: actions/checkout@v1
17 17
18 - name: Install Rust toolchain 18 - name: Install Rust toolchain
19 uses: actions-rs/toolchain@v1 19 uses: actions-rs/toolchain@v1
20 with: 20 with:
21 toolchain: stable 21 toolchain: stable
22 profile: minimal 22 profile: minimal
23 override: true 23 override: true
24 components: rustfmt, rust-src 24 components: rustfmt, rust-src
25 25
26 - name: Build Documentation 26 - name: Build Documentation
27 uses: actions-rs/cargo@v1 27 run: cargo doc --all --no-deps
28 with:
29 command: doc
30 args: --all --no-deps
31 28
32 - name: Deploy Docs 29 - name: Deploy Docs
33 uses: peaceiris/[email protected] 30 uses: peaceiris/actions-gh-pages@364c31d33bb99327c77b3a5438a83a357a6729ad # v3.4.0
34 env: 31 with:
35 ACTIONS_DEPLOY_KEY: ${{ secrets.ACTIONS_DEPLOY_KEY }} 32 github_token: ${{ secrets.GITHUB_TOKEN }}
36 PUBLISH_BRANCH: gh-pages 33 publish_branch: gh-pages
37 PUBLISH_DIR: ./target/doc 34 publish_dir: ./target/doc
38 with: 35 force_orphan: true
39 forceOrphan: true
diff --git a/.gitignore b/.gitignore
index dc5ceca7f..f835edef0 100644
--- a/.gitignore
+++ b/.gitignore
@@ -1,4 +1,5 @@
1/target/ 1/target/
2/dist/
2crates/*/target 3crates/*/target
3**/*.rs.bk 4**/*.rs.bk
4**/*.rs.pending-snap 5**/*.rs.pending-snap
diff --git a/Cargo.lock b/Cargo.lock
index 316dae053..91edb460c 100644
--- a/Cargo.lock
+++ b/Cargo.lock
@@ -46,9 +46,9 @@ checksum = "f8aac770f1885fd7e387acedd76065302551364496e46b3dd00860b2f8359b9d"
46 46
47[[package]] 47[[package]]
48name = "backtrace" 48name = "backtrace"
49version = "0.3.44" 49version = "0.3.45"
50source = "registry+https://github.com/rust-lang/crates.io-index" 50source = "registry+https://github.com/rust-lang/crates.io-index"
51checksum = "e4036b9bf40f3cf16aba72a3d65e8a520fc4bafcdc7079aea8f848c58c5b5536" 51checksum = "ad235dabf00f36301792cfe82499880ba54c6486be094d1047b02bacb67c14e8"
52dependencies = [ 52dependencies = [
53 "backtrace-sys", 53 "backtrace-sys",
54 "cfg-if", 54 "cfg-if",
@@ -58,9 +58,9 @@ dependencies = [
58 58
59[[package]] 59[[package]]
60name = "backtrace-sys" 60name = "backtrace-sys"
61version = "0.1.32" 61version = "0.1.33"
62source = "registry+https://github.com/rust-lang/crates.io-index" 62source = "registry+https://github.com/rust-lang/crates.io-index"
63checksum = "5d6575f128516de27e3ce99689419835fce9643a9b215a14d2b5b685be018491" 63checksum = "e17b52e737c40a7d75abca20b29a19a0eb7ba9fc72c5a72dd282a0a3c2c0dc35"
64dependencies = [ 64dependencies = [
65 "cc", 65 "cc",
66 "libc", 66 "libc",
@@ -1259,9 +1259,9 @@ dependencies = [
1259 1259
1260[[package]] 1260[[package]]
1261name = "regex-syntax" 1261name = "regex-syntax"
1262version = "0.6.15" 1262version = "0.6.16"
1263source = "registry+https://github.com/rust-lang/crates.io-index" 1263source = "registry+https://github.com/rust-lang/crates.io-index"
1264checksum = "7246cd0a0a6ec2239a5405b2b16e3f404fa0dcc6d28f5f5b877bf80e33e0f294" 1264checksum = "1132f845907680735a84409c3bebc64d1364a5683ffbce899550cd09d5eaefc1"
1265 1265
1266[[package]] 1266[[package]]
1267name = "relative-path" 1267name = "relative-path"
diff --git a/crates/ra_assists/src/handlers/inline_local_variable.rs b/crates/ra_assists/src/handlers/inline_local_variable.rs
index eb5112343..3bfcba8ff 100644
--- a/crates/ra_assists/src/handlers/inline_local_variable.rs
+++ b/crates/ra_assists/src/handlers/inline_local_variable.rs
@@ -1,3 +1,4 @@
1use ra_ide_db::defs::Definition;
1use ra_syntax::{ 2use ra_syntax::{
2 ast::{self, AstNode, AstToken}, 3 ast::{self, AstNode, AstToken},
3 TextRange, 4 TextRange,
@@ -37,6 +38,15 @@ pub(crate) fn inline_local_variable(ctx: AssistCtx) -> Option<Assist> {
37 return None; 38 return None;
38 } 39 }
39 let initializer_expr = let_stmt.initializer()?; 40 let initializer_expr = let_stmt.initializer()?;
41
42 let def = ctx.sema.to_def(&bind_pat)?;
43 let def = Definition::Local(def);
44 let refs = def.find_usages(ctx.db, None);
45 if refs.is_empty() {
46 tested_by!(test_not_applicable_if_variable_unused);
47 return None;
48 };
49
40 let delete_range = if let Some(whitespace) = let_stmt 50 let delete_range = if let Some(whitespace) = let_stmt
41 .syntax() 51 .syntax()
42 .next_sibling_or_token() 52 .next_sibling_or_token()
@@ -49,16 +59,14 @@ pub(crate) fn inline_local_variable(ctx: AssistCtx) -> Option<Assist> {
49 } else { 59 } else {
50 let_stmt.syntax().text_range() 60 let_stmt.syntax().text_range()
51 }; 61 };
52 let refs = ctx.sema.find_all_refs(&bind_pat);
53 if refs.is_empty() {
54 return None;
55 };
56 62
57 let mut wrap_in_parens = vec![true; refs.len()]; 63 let mut wrap_in_parens = vec![true; refs.len()];
58 64
59 for (i, desc) in refs.iter().enumerate() { 65 for (i, desc) in refs.iter().enumerate() {
60 let usage_node = 66 let usage_node = ctx
61 ctx.covering_node_for_range(desc.range).ancestors().find_map(ast::PathExpr::cast)?; 67 .covering_node_for_range(desc.file_range.range)
68 .ancestors()
69 .find_map(ast::PathExpr::cast)?;
62 let usage_parent_option = usage_node.syntax().parent().and_then(ast::Expr::cast); 70 let usage_parent_option = usage_node.syntax().parent().and_then(ast::Expr::cast);
63 let usage_parent = match usage_parent_option { 71 let usage_parent = match usage_parent_option {
64 Some(u) => u, 72 Some(u) => u,
@@ -103,11 +111,9 @@ pub(crate) fn inline_local_variable(ctx: AssistCtx) -> Option<Assist> {
103 move |edit: &mut ActionBuilder| { 111 move |edit: &mut ActionBuilder| {
104 edit.delete(delete_range); 112 edit.delete(delete_range);
105 for (desc, should_wrap) in refs.iter().zip(wrap_in_parens) { 113 for (desc, should_wrap) in refs.iter().zip(wrap_in_parens) {
106 if should_wrap { 114 let replacement =
107 edit.replace(desc.range, init_in_paren.clone()) 115 if should_wrap { init_in_paren.clone() } else { init_str.clone() };
108 } else { 116 edit.replace(desc.file_range.range, replacement)
109 edit.replace(desc.range, init_str.clone())
110 }
111 } 117 }
112 edit.set_cursor(delete_range.start()) 118 edit.set_cursor(delete_range.start())
113 }, 119 },
@@ -657,6 +663,7 @@ fn foo() {
657 663
658 #[test] 664 #[test]
659 fn test_not_applicable_if_variable_unused() { 665 fn test_not_applicable_if_variable_unused() {
666 covers!(test_not_applicable_if_variable_unused);
660 check_assist_not_applicable( 667 check_assist_not_applicable(
661 inline_local_variable, 668 inline_local_variable,
662 r" 669 r"
diff --git a/crates/ra_assists/src/marks.rs b/crates/ra_assists/src/marks.rs
index cef3df4e5..22404ee80 100644
--- a/crates/ra_assists/src/marks.rs
+++ b/crates/ra_assists/src/marks.rs
@@ -1,9 +1,10 @@
1//! See test_utils/src/marks.rs 1//! See test_utils/src/marks.rs
2 2
3test_utils::marks!( 3test_utils::marks![
4 introduce_var_in_comment_is_not_applicable 4 introduce_var_in_comment_is_not_applicable
5 test_introduce_var_expr_stmt 5 test_introduce_var_expr_stmt
6 test_introduce_var_last_expr 6 test_introduce_var_last_expr
7 not_applicable_outside_of_bind_pat 7 not_applicable_outside_of_bind_pat
8 test_not_inline_mut_variable 8 test_not_inline_mut_variable
9); 9 test_not_applicable_if_variable_unused
10];
diff --git a/crates/ra_hir/src/lib.rs b/crates/ra_hir/src/lib.rs
index e1cb12cca..9f59d590c 100644
--- a/crates/ra_hir/src/lib.rs
+++ b/crates/ra_hir/src/lib.rs
@@ -45,8 +45,7 @@ pub use crate::{
45 StructField, Trait, Type, TypeAlias, TypeParam, Union, VariantDef, 45 StructField, Trait, Type, TypeAlias, TypeParam, Union, VariantDef,
46 }, 46 },
47 has_source::HasSource, 47 has_source::HasSource,
48 semantics::{original_range, Semantics, SemanticsScope}, 48 semantics::{original_range, PathResolution, Semantics, SemanticsScope},
49 source_analyzer::PathResolution,
50}; 49};
51 50
52pub use hir_def::{ 51pub use hir_def::{
diff --git a/crates/ra_hir/src/semantics.rs b/crates/ra_hir/src/semantics.rs
index afc7f7ee7..965d185a4 100644
--- a/crates/ra_hir/src/semantics.rs
+++ b/crates/ra_hir/src/semantics.rs
@@ -19,11 +19,24 @@ use rustc_hash::{FxHashMap, FxHashSet};
19use crate::{ 19use crate::{
20 db::HirDatabase, 20 db::HirDatabase,
21 semantics::source_to_def::{ChildContainer, SourceToDefCache, SourceToDefCtx}, 21 semantics::source_to_def::{ChildContainer, SourceToDefCache, SourceToDefCtx},
22 source_analyzer::{resolve_hir_path, ReferenceDescriptor, SourceAnalyzer}, 22 source_analyzer::{resolve_hir_path, SourceAnalyzer},
23 Function, HirFileId, InFile, Local, MacroDef, Module, ModuleDef, Name, Origin, Path, 23 AssocItem, Function, HirFileId, ImplDef, InFile, Local, MacroDef, Module, ModuleDef, Name,
24 PathResolution, ScopeDef, StructField, Trait, Type, TypeParam, VariantDef, 24 Origin, Path, ScopeDef, StructField, Trait, Type, TypeParam, VariantDef,
25}; 25};
26 26
27#[derive(Debug, Clone, PartialEq, Eq)]
28pub enum PathResolution {
29 /// An item
30 Def(ModuleDef),
31 /// A local binding (only value namespace)
32 Local(Local),
33 /// A generic parameter
34 TypeParam(TypeParam),
35 SelfType(ImplDef),
36 Macro(MacroDef),
37 AssocItem(AssocItem),
38}
39
27/// Primary API to get semantic information, like types, from syntax trees. 40/// Primary API to get semantic information, like types, from syntax trees.
28pub struct Semantics<'db, DB> { 41pub struct Semantics<'db, DB> {
29 pub db: &'db DB, 42 pub db: &'db DB,
@@ -171,12 +184,6 @@ impl<'db, DB: HirDatabase> Semantics<'db, DB> {
171 SemanticsScope { db: self.db, resolver } 184 SemanticsScope { db: self.db, resolver }
172 } 185 }
173 186
174 // FIXME: we only use this in `inline_local_variable` assist, ideally, we
175 // should switch to general reference search infra there.
176 pub fn find_all_refs(&self, pat: &ast::BindPat) -> Vec<ReferenceDescriptor> {
177 self.analyze(pat.syntax()).find_all_refs(pat)
178 }
179
180 fn analyze(&self, node: &SyntaxNode) -> SourceAnalyzer { 187 fn analyze(&self, node: &SyntaxNode) -> SourceAnalyzer {
181 let src = self.find_file(node.clone()); 188 let src = self.find_file(node.clone());
182 self.analyze2(src.as_ref(), None) 189 self.analyze2(src.as_ref(), None)
diff --git a/crates/ra_hir/src/source_analyzer.rs b/crates/ra_hir/src/source_analyzer.rs
index 015389fb0..f3f1ed05a 100644
--- a/crates/ra_hir/src/source_analyzer.rs
+++ b/crates/ra_hir/src/source_analyzer.rs
@@ -7,7 +7,6 @@
7//! purely for "IDE needs". 7//! purely for "IDE needs".
8use std::{iter::once, sync::Arc}; 8use std::{iter::once, sync::Arc};
9 9
10use either::Either;
11use hir_def::{ 10use hir_def::{
12 body::{ 11 body::{
13 scope::{ExprScopes, ScopeId}, 12 scope::{ExprScopes, ScopeId},
@@ -21,12 +20,12 @@ use hir_expand::{hygiene::Hygiene, name::AsName, HirFileId, InFile};
21use hir_ty::{InEnvironment, InferenceResult, TraitEnvironment}; 20use hir_ty::{InEnvironment, InferenceResult, TraitEnvironment};
22use ra_syntax::{ 21use ra_syntax::{
23 ast::{self, AstNode}, 22 ast::{self, AstNode},
24 AstPtr, SyntaxNode, SyntaxNodePtr, TextRange, TextUnit, 23 SyntaxNode, SyntaxNodePtr, TextUnit,
25}; 24};
26 25
27use crate::{ 26use crate::{
28 db::HirDatabase, Adt, Const, EnumVariant, Function, Local, MacroDef, ModPath, ModuleDef, Path, 27 db::HirDatabase, semantics::PathResolution, Adt, Const, EnumVariant, Function, Local, MacroDef,
29 PathKind, Static, Struct, Trait, Type, TypeAlias, TypeParam, 28 ModPath, ModuleDef, Path, PathKind, Static, Struct, Trait, Type, TypeAlias, TypeParam,
30}; 29};
31 30
32/// `SourceAnalyzer` is a convenience wrapper which exposes HIR API in terms of 31/// `SourceAnalyzer` is a convenience wrapper which exposes HIR API in terms of
@@ -41,25 +40,6 @@ pub(crate) struct SourceAnalyzer {
41 scopes: Option<Arc<ExprScopes>>, 40 scopes: Option<Arc<ExprScopes>>,
42} 41}
43 42
44#[derive(Debug, Clone, PartialEq, Eq)]
45pub enum PathResolution {
46 /// An item
47 Def(crate::ModuleDef),
48 /// A local binding (only value namespace)
49 Local(Local),
50 /// A generic parameter
51 TypeParam(TypeParam),
52 SelfType(crate::ImplDef),
53 Macro(MacroDef),
54 AssocItem(crate::AssocItem),
55}
56
57#[derive(Debug)]
58pub struct ReferenceDescriptor {
59 pub range: TextRange,
60 pub name: String,
61}
62
63impl SourceAnalyzer { 43impl SourceAnalyzer {
64 pub(crate) fn new_for_body( 44 pub(crate) fn new_for_body(
65 db: &impl HirDatabase, 45 db: &impl HirDatabase,
@@ -111,20 +91,16 @@ impl SourceAnalyzer {
111 fn expand_expr( 91 fn expand_expr(
112 &self, 92 &self,
113 db: &impl HirDatabase, 93 db: &impl HirDatabase,
114 expr: InFile<&ast::Expr>, 94 expr: InFile<ast::MacroCall>,
115 ) -> Option<InFile<ast::Expr>> { 95 ) -> Option<InFile<ast::Expr>> {
116 let macro_call = ast::MacroCall::cast(expr.value.syntax().clone())?; 96 let macro_file = self.body_source_map.as_ref()?.node_macro_file(expr.as_ref())?;
117 let macro_file =
118 self.body_source_map.as_ref()?.node_macro_file(expr.with_value(&macro_call))?;
119 let expanded = db.parse_or_expand(macro_file)?; 97 let expanded = db.parse_or_expand(macro_file)?;
120 let kind = expanded.kind();
121 let expr = InFile::new(macro_file, ast::Expr::cast(expanded)?);
122 98
123 if ast::MacroCall::can_cast(kind) { 99 let res = match ast::MacroCall::cast(expanded.clone()) {
124 self.expand_expr(db, expr.as_ref()) 100 Some(call) => self.expand_expr(db, InFile::new(macro_file, call))?,
125 } else { 101 _ => InFile::new(macro_file, ast::Expr::cast(expanded)?),
126 Some(expr) 102 };
127 } 103 Some(res)
128 } 104 }
129 105
130 fn trait_env(&self, db: &impl HirDatabase) -> Arc<TraitEnvironment> { 106 fn trait_env(&self, db: &impl HirDatabase) -> Arc<TraitEnvironment> {
@@ -132,11 +108,13 @@ impl SourceAnalyzer {
132 } 108 }
133 109
134 pub(crate) fn type_of(&self, db: &impl HirDatabase, expr: &ast::Expr) -> Option<Type> { 110 pub(crate) fn type_of(&self, db: &impl HirDatabase, expr: &ast::Expr) -> Option<Type> {
135 let expr_id = if let Some(expr) = self.expand_expr(db, InFile::new(self.file_id, expr)) { 111 let expr_id = match expr {
136 self.body_source_map.as_ref()?.node_expr(expr.as_ref())? 112 ast::Expr::MacroCall(call) => {
137 } else { 113 let expr = self.expand_expr(db, InFile::new(self.file_id, call.clone()))?;
138 self.expr_id(expr)? 114 self.body_source_map.as_ref()?.node_expr(expr.as_ref())
139 }; 115 }
116 _ => self.expr_id(expr),
117 }?;
140 118
141 let ty = self.infer.as_ref()?[expr_id].clone(); 119 let ty = self.infer.as_ref()?[expr_id].clone();
142 let environment = self.trait_env(db); 120 let environment = self.trait_env(db);
@@ -251,38 +229,6 @@ impl SourceAnalyzer {
251 resolve_hir_path(db, &self.resolver, &hir_path) 229 resolve_hir_path(db, &self.resolver, &hir_path)
252 } 230 }
253 231
254 fn resolve_local_name(
255 &self,
256 name_ref: &ast::NameRef,
257 ) -> Option<Either<AstPtr<ast::Pat>, AstPtr<ast::SelfParam>>> {
258 let name = name_ref.as_name();
259 let source_map = self.body_source_map.as_ref()?;
260 let scopes = self.scopes.as_ref()?;
261 let scope = scope_for(scopes, source_map, InFile::new(self.file_id, name_ref.syntax()))?;
262 let entry = scopes.resolve_name_in_scope(scope, &name)?;
263 Some(source_map.pat_syntax(entry.pat())?.value)
264 }
265
266 // FIXME: we only use this in `inline_local_variable` assist, ideally, we
267 // should switch to general reference search infra there.
268 pub(crate) fn find_all_refs(&self, pat: &ast::BindPat) -> Vec<ReferenceDescriptor> {
269 let fn_def = pat.syntax().ancestors().find_map(ast::FnDef::cast).unwrap();
270 let ptr = Either::Left(AstPtr::new(&ast::Pat::from(pat.clone())));
271 fn_def
272 .syntax()
273 .descendants()
274 .filter_map(ast::NameRef::cast)
275 .filter(|name_ref| match self.resolve_local_name(&name_ref) {
276 None => false,
277 Some(d_ptr) => d_ptr == ptr,
278 })
279 .map(|name_ref| ReferenceDescriptor {
280 name: name_ref.text().to_string(),
281 range: name_ref.syntax().text_range(),
282 })
283 .collect()
284 }
285
286 pub(crate) fn expand( 232 pub(crate) fn expand(
287 &self, 233 &self,
288 db: &impl HirDatabase, 234 db: &impl HirDatabase,
diff --git a/docs/user/features.md b/docs/user/features.md
index 48c63ba7b..ba4d50fa8 100644
--- a/docs/user/features.md
+++ b/docs/user/features.md
@@ -105,9 +105,9 @@ Start `cargo watch` for live error highlighting. Will prompt to install if it's
105 105
106Stop `cargo watch`. 106Stop `cargo watch`.
107 107
108#### Structural Seach and Replace 108#### Structural Seach and Replace
109 109
110Search and replace with named wildcards that will match any expression. 110Search and replace with named wildcards that will match any expression.
111The syntax for a structural search replace command is `<search_pattern> ==>> <replace_pattern>`. A `$<name>:expr` placeholder in the search pattern will match any expression and `$<name>` will reference it in the replacement. Available via the command `rust-analyzer.ssr`. 111The syntax for a structural search replace command is `<search_pattern> ==>> <replace_pattern>`. A `$<name>:expr` placeholder in the search pattern will match any expression and `$<name>` will reference it in the replacement. Available via the command `rust-analyzer.ssr`.
112 112
113```rust 113```rust
@@ -195,4 +195,5 @@ In VS Code, the following settings can be used to configure the inlay hints:
195* `rust-analyzer.maxInlayHintLength` — shortens the hints if their length exceeds the value specified. If no value is specified (`null`), no shortening is applied. 195* `rust-analyzer.maxInlayHintLength` — shortens the hints if their length exceeds the value specified. If no value is specified (`null`), no shortening is applied.
196 196
197**Note:** VS Code does not have native support for inlay hints [yet](https://github.com/microsoft/vscode/issues/16221) and the hints are implemented using decorations. 197**Note:** VS Code does not have native support for inlay hints [yet](https://github.com/microsoft/vscode/issues/16221) and the hints are implemented using decorations.
198This approach has limitations: the caret movement near the end of the hint may look [weird](https://github.com/rust-analyzer/rust-analyzer/issues/1623). 198This approach has limitations, the caret movement and bracket highlighting near the edges of the hint may be weird:
199[1](https://github.com/rust-analyzer/rust-analyzer/issues/1623), [2](https://github.com/rust-analyzer/rust-analyzer/issues/3453).
diff --git a/xtask/src/dist.rs b/xtask/src/dist.rs
new file mode 100644
index 000000000..737751ae8
--- /dev/null
+++ b/xtask/src/dist.rs
@@ -0,0 +1,79 @@
1use std::path::PathBuf;
2
3use anyhow::Result;
4
5use crate::{
6 not_bash::{fs2, pushd, pwd, rm_rf, run},
7 project_root,
8};
9
10pub fn run_dist(nightly: bool) -> Result<()> {
11 let dist = project_root().join("dist");
12 rm_rf(&dist)?;
13 fs2::create_dir_all(&dist)?;
14
15 if cfg!(target_os = "linux") {
16 dist_client(nightly)?;
17 }
18 dist_server()?;
19 Ok(())
20}
21
22fn dist_client(nightly: bool) -> Result<()> {
23 let _d = pushd("./editors/code");
24
25 let package_json_path = pwd().join("package.json");
26 let original_package_json = fs2::read_to_string(&package_json_path)?;
27 let _restore =
28 Restore { path: package_json_path.clone(), contents: original_package_json.clone() };
29
30 let mut package_json = original_package_json.replace(r#""enableProposedApi": true,"#, r#""#);
31
32 if nightly {
33 package_json = package_json.replace(
34 r#""displayName": "rust-analyzer""#,
35 r#""displayName": "rust-analyzer nightly""#,
36 );
37 } else {
38 package_json = original_package_json.replace(r#""enableProposedApi": true,"#, r#""#);
39 }
40 fs2::write(package_json_path, package_json)?;
41
42 run!("npx vsce package -o ../../dist/rust-analyzer.vsix")?;
43 Ok(())
44}
45
46fn dist_server() -> Result<()> {
47 if cfg!(target_os = "linux") {
48 std::env::set_var("CC", "clang");
49 run!("cargo build --package rust-analyzer --bin rust-analyzer --release --target x86_64-unknown-linux-musl")?;
50 run!("strip ./target/x86_64-unknown-linux-musl/release/rust-analyzer")?;
51 } else {
52 run!("cargo build --package rust-analyzer --bin rust-analyzer --release")?;
53 }
54
55 let (src, dst) = if cfg!(target_os = "linux") {
56 ("./target/x86_64-unknown-linux-musl/release/rust-analyzer", "./dist/rust-analyzer-linux")
57 } else if cfg!(target_os = "windows") {
58 ("/target/release/rust-analyzer.exe", "./dist/rust-analyzer-windows.exe")
59 } else if cfg!(target_os = "macos") {
60 ("/target/release/rust-analyzer", "./dist/rust-analyzer-mac")
61 } else {
62 panic!("Unsupported OS")
63 };
64
65 fs2::copy(src, dst)?;
66
67 Ok(())
68}
69
70struct Restore {
71 path: PathBuf,
72 contents: String,
73}
74
75impl Drop for Restore {
76 fn drop(&mut self) {
77 fs2::write(&self.path, &self.contents).unwrap();
78 }
79}
diff --git a/xtask/src/install.rs b/xtask/src/install.rs
index f76467cac..d0d745b05 100644
--- a/xtask/src/install.rs
+++ b/xtask/src/install.rs
@@ -4,10 +4,7 @@ use std::{env, path::PathBuf, str};
4 4
5use anyhow::{bail, format_err, Context, Result}; 5use anyhow::{bail, format_err, Context, Result};
6 6
7use crate::{ 7use crate::not_bash::{pushd, run};
8 not_bash::{pushd, run},
9 project_root,
10};
11 8
12// Latest stable, feel free to send a PR if this lags behind. 9// Latest stable, feel free to send a PR if this lags behind.
13const REQUIRED_RUST_VERSION: u32 = 41; 10const REQUIRED_RUST_VERSION: u32 = 41;
@@ -27,7 +24,6 @@ pub struct ServerOpt {
27 24
28impl InstallCmd { 25impl InstallCmd {
29 pub fn run(self) -> Result<()> { 26 pub fn run(self) -> Result<()> {
30 let _dir = pushd(project_root());
31 let both = self.server.is_some() && self.client.is_some(); 27 let both = self.server.is_some() && self.client.is_some();
32 if cfg!(target_os = "macos") { 28 if cfg!(target_os = "macos") {
33 fix_path_for_mac().context("Fix path for mac")? 29 fix_path_for_mac().context("Fix path for mac")?
diff --git a/xtask/src/lib.rs b/xtask/src/lib.rs
index f48045d17..014b61b37 100644
--- a/xtask/src/lib.rs
+++ b/xtask/src/lib.rs
@@ -4,6 +4,7 @@
4 4
5pub mod not_bash; 5pub mod not_bash;
6pub mod install; 6pub mod install;
7pub mod dist;
7pub mod pre_commit; 8pub mod pre_commit;
8 9
9pub mod codegen; 10pub mod codegen;
@@ -90,7 +91,6 @@ pub fn run_clippy() -> Result<()> {
90 91
91 let allowed_lints = [ 92 let allowed_lints = [
92 "clippy::collapsible_if", 93 "clippy::collapsible_if",
93 "clippy::map_clone", // FIXME: remove when Iterator::copied stabilizes (1.36.0)
94 "clippy::needless_pass_by_value", 94 "clippy::needless_pass_by_value",
95 "clippy::nonminimal_bool", 95 "clippy::nonminimal_bool",
96 "clippy::redundant_pattern_matching", 96 "clippy::redundant_pattern_matching",
diff --git a/xtask/src/main.rs b/xtask/src/main.rs
index a7dffe2cc..7c8ea9001 100644
--- a/xtask/src/main.rs
+++ b/xtask/src/main.rs
@@ -13,8 +13,11 @@ use std::env;
13use pico_args::Arguments; 13use pico_args::Arguments;
14use xtask::{ 14use xtask::{
15 codegen::{self, Mode}, 15 codegen::{self, Mode},
16 dist::run_dist,
16 install::{ClientOpt, InstallCmd, ServerOpt}, 17 install::{ClientOpt, InstallCmd, ServerOpt},
17 pre_commit, run_clippy, run_fuzzer, run_pre_cache, run_release, run_rustfmt, Result, 18 not_bash::pushd,
19 pre_commit, project_root, run_clippy, run_fuzzer, run_pre_cache, run_release, run_rustfmt,
20 Result,
18}; 21};
19 22
20fn main() -> Result<()> { 23fn main() -> Result<()> {
@@ -22,6 +25,8 @@ fn main() -> Result<()> {
22 return pre_commit::run_hook(); 25 return pre_commit::run_hook();
23 } 26 }
24 27
28 let _d = pushd(project_root());
29
25 let mut args = Arguments::from_env(); 30 let mut args = Arguments::from_env();
26 let subcommand = args.subcommand()?.unwrap_or_default(); 31 let subcommand = args.subcommand()?.unwrap_or_default();
27 32
@@ -97,6 +102,11 @@ FLAGS:
97 args.finish()?; 102 args.finish()?;
98 run_release(dry_run) 103 run_release(dry_run)
99 } 104 }
105 "dist" => {
106 let nightly = args.contains("--nightly");
107 args.finish()?;
108 run_dist(nightly)
109 }
100 _ => { 110 _ => {
101 eprintln!( 111 eprintln!(
102 "\ 112 "\
@@ -112,7 +122,8 @@ SUBCOMMANDS:
112 fuzz-tests 122 fuzz-tests
113 codegen 123 codegen
114 install 124 install
115 lint" 125 lint
126 dist"
116 ); 127 );
117 Ok(()) 128 Ok(())
118 } 129 }
diff --git a/xtask/src/not_bash.rs b/xtask/src/not_bash.rs
index 40f706d9f..1697b7fcd 100644
--- a/xtask/src/not_bash.rs
+++ b/xtask/src/not_bash.rs
@@ -19,6 +19,11 @@ pub mod fs2 {
19 fs::read_dir(path).with_context(|| format!("Failed to read {}", path.display())) 19 fs::read_dir(path).with_context(|| format!("Failed to read {}", path.display()))
20 } 20 }
21 21
22 pub fn read_to_string<P: AsRef<Path>>(path: P) -> Result<String> {
23 let path = path.as_ref();
24 fs::read_to_string(path).with_context(|| format!("Failed to read {}", path.display()))
25 }
26
22 pub fn write<P: AsRef<Path>, C: AsRef<[u8]>>(path: P, contents: C) -> Result<()> { 27 pub fn write<P: AsRef<Path>, C: AsRef<[u8]>>(path: P, contents: C) -> Result<()> {
23 let path = path.as_ref(); 28 let path = path.as_ref();
24 fs::write(path, contents).with_context(|| format!("Failed to write {}", path.display())) 29 fs::write(path, contents).with_context(|| format!("Failed to write {}", path.display()))
@@ -40,6 +45,11 @@ pub mod fs2 {
40 let path = path.as_ref(); 45 let path = path.as_ref();
41 fs::remove_dir_all(path).with_context(|| format!("Failed to remove dir {}", path.display())) 46 fs::remove_dir_all(path).with_context(|| format!("Failed to remove dir {}", path.display()))
42 } 47 }
48
49 pub fn create_dir_all<P: AsRef<Path>>(path: P) -> Result<()> {
50 let path = path.as_ref();
51 fs::create_dir_all(path).with_context(|| format!("Failed to create dir {}", path.display()))
52 }
43} 53}
44 54
45macro_rules! _run { 55macro_rules! _run {
@@ -61,6 +71,10 @@ pub fn pushd(path: impl Into<PathBuf>) -> Pushd {
61 Pushd { _p: () } 71 Pushd { _p: () }
62} 72}
63 73
74pub fn pwd() -> PathBuf {
75 Env::with(|env| env.cwd())
76}
77
64impl Drop for Pushd { 78impl Drop for Pushd {
65 fn drop(&mut self) { 79 fn drop(&mut self) {
66 Env::with(|env| env.popd()) 80 Env::with(|env| env.popd())
@@ -85,7 +99,6 @@ pub fn run_process(cmd: String, echo: bool) -> Result<String> {
85} 99}
86 100
87fn run_process_inner(cmd: &str, echo: bool) -> Result<String> { 101fn run_process_inner(cmd: &str, echo: bool) -> Result<String> {
88 let cwd = Env::with(|env| env.cwd());
89 let mut args = shelx(cmd); 102 let mut args = shelx(cmd);
90 let binary = args.remove(0); 103 let binary = args.remove(0);
91 104
@@ -95,7 +108,7 @@ fn run_process_inner(cmd: &str, echo: bool) -> Result<String> {
95 108
96 let output = Command::new(binary) 109 let output = Command::new(binary)
97 .args(args) 110 .args(args)
98 .current_dir(cwd) 111 .current_dir(pwd())
99 .stdin(Stdio::null()) 112 .stdin(Stdio::null())
100 .stderr(Stdio::inherit()) 113 .stderr(Stdio::inherit())
101 .output()?; 114 .output()?;