Compare commits
133 Commits
2.1472-vsc
...
2.1698
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
f51e045cd5 | ||
|
|
8122b7f69e | ||
|
|
25f18beda4 | ||
|
|
7e7923706f | ||
|
|
ae35673489 | ||
|
|
23f142fdc6 | ||
|
|
101139fabf | ||
|
|
e2d354c8f2 | ||
|
|
7c178805ea | ||
|
|
45f70e741f | ||
|
|
1474a82c7d | ||
|
|
d97feca3ba | ||
|
|
b2669e78bf | ||
|
|
66ee6e8201 | ||
|
|
62f050fda7 | ||
|
|
57425377e5 | ||
|
|
174cb2f8a9 | ||
|
|
42bddce21f | ||
|
|
f2a15795a1 | ||
|
|
6dd5e515c5 | ||
|
|
92da02ef3e | ||
|
|
3ce7129492 | ||
|
|
336ee28888 | ||
|
|
3f2240ab65 | ||
|
|
1087037728 | ||
|
|
1959d82912 | ||
|
|
8024144381 | ||
|
|
6a1dcab7a6 | ||
|
|
e6d1f2a7c8 | ||
|
|
44c4722edf | ||
|
|
e5fc63f2c8 | ||
|
|
015a99e87d | ||
|
|
884491d72b | ||
|
|
e14362f322 | ||
|
|
917aa48072 | ||
|
|
938c6ef829 | ||
|
|
0add01d383 | ||
|
|
2018024810 | ||
|
|
a1d6bcb8e5 | ||
|
|
727ac6483b | ||
|
|
2c15c09fc0 | ||
|
|
2ad2582cc0 | ||
|
|
cee0ac213c | ||
|
|
780a673017 | ||
|
|
af71203955 | ||
|
|
fc3acfabb2 | ||
|
|
3d5db8313a | ||
|
|
73cf8f34e3 | ||
|
|
766efd6079 | ||
|
|
87485948ad | ||
|
|
7e4a73ce2d | ||
|
|
2f0878d9b7 | ||
|
|
f65c9b23fc | ||
|
|
cd859d117f | ||
|
|
e22964915a | ||
|
|
197d0b6ca9 | ||
|
|
422503ef98 | ||
|
|
ea36345d2c | ||
|
|
a89d83cbba | ||
|
|
83ff31b620 | ||
|
|
3a9b032c72 | ||
|
|
f73e9225b4 | ||
|
|
168ccb0dfc | ||
|
|
58f7f5b769 | ||
|
|
b8e6369fbe | ||
|
|
d81d5f499f | ||
|
|
4be178d234 | ||
|
|
9c40466b4b | ||
|
|
95693fb58e | ||
|
|
e7945bea94 | ||
|
|
91f49e1efd | ||
|
|
eea9c1618c | ||
|
|
f1b38e4e48 | ||
|
|
ff99a1d768 | ||
|
|
7f07b8f66c | ||
|
|
faae03da6b | ||
|
|
a6e4f96737 | ||
|
|
cc7585bbc2 | ||
|
|
14a0cd3ffd | ||
|
|
3ff83eda45 | ||
|
|
f133b00851 | ||
|
|
ece840834d | ||
|
|
76f6ff4145 | ||
|
|
2458cde498 | ||
|
|
82e2b8a169 | ||
|
|
5aa2abaf9f | ||
|
|
fdb2308c62 | ||
|
|
4cd2f2cd52 | ||
|
|
88cef85f62 | ||
|
|
bdd11f741b | ||
|
|
56ce780522 | ||
|
|
567010e163 | ||
|
|
4ae2c81157 | ||
|
|
ae43e2016f | ||
|
|
3f6cbfa4dd | ||
|
|
1c50b5285e | ||
|
|
ea9c511db8 | ||
|
|
e1e3f32643 | ||
|
|
4290cffe3b | ||
|
|
548d095611 | ||
|
|
846dcbb947 | ||
|
|
d7d3368cc2 | ||
|
|
134040fea3 | ||
|
|
65caa26d40 | ||
|
|
7812f6b75a | ||
|
|
637e58f255 | ||
|
|
6135630fc0 | ||
|
|
22058c5f86 | ||
|
|
616bdb35f3 | ||
|
|
42f7b5d12b | ||
|
|
53818b0e36 | ||
|
|
6f08b13540 | ||
|
|
d36526b1c8 | ||
|
|
1252eb6a8a | ||
|
|
4733c31a2f | ||
|
|
17c5173d8b | ||
|
|
d0a08f6dd7 | ||
|
|
7b5d6d186b | ||
|
|
3851927396 | ||
|
|
7eececead6 | ||
|
|
b8c3d96fcd | ||
|
|
a2ee6c8e73 | ||
|
|
ef069d9b0e | ||
|
|
6a864f9f47 | ||
|
|
5c16399810 | ||
|
|
0141ded35d | ||
|
|
bb46e80d44 | ||
|
|
1bd5eca73d | ||
|
|
48a97abe1d | ||
|
|
0ff8a11c7f | ||
|
|
8b1cdaa4a1 | ||
|
|
0bbaa9763b | ||
|
|
dbe5f23e21 |
@@ -9,3 +9,4 @@ doc
|
||||
LICENSE
|
||||
README.md
|
||||
node_modules
|
||||
release
|
||||
|
||||
368
.drone.yml
Normal file
368
.drone.yml
Normal file
@@ -0,0 +1,368 @@
|
||||
kind: pipeline
|
||||
type: docker
|
||||
name: amd64:linux
|
||||
|
||||
platform:
|
||||
arch: amd64
|
||||
|
||||
steps:
|
||||
- name: cache:restore
|
||||
image: node:12
|
||||
commands:
|
||||
- ./scripts/cacher.sh
|
||||
|
||||
- name: build
|
||||
image: codercom/nbin:centos
|
||||
commands:
|
||||
- yum install -y libxkbfile-devel libsecret-devel
|
||||
- . /opt/rh/devtoolset-6/enable
|
||||
- timeout 50m ./scripts/ci.bash || echo 'Timed out or failed; continuing so we can preserve cache for the next run'
|
||||
|
||||
- name: cache:package
|
||||
image: node:12
|
||||
commands:
|
||||
- ./scripts/cacher.sh
|
||||
when:
|
||||
event: push
|
||||
|
||||
- name: cache:push
|
||||
image: plugins/gcs
|
||||
settings:
|
||||
source: cache-upload/
|
||||
target: codesrv-ci.cdr.sh
|
||||
token:
|
||||
from_secret: gcs-token
|
||||
when:
|
||||
event: push
|
||||
|
||||
- name: test
|
||||
image: node:12
|
||||
commands:
|
||||
- yarn test
|
||||
|
||||
- name: publish:github
|
||||
image: plugins/github-release
|
||||
settings:
|
||||
api_key:
|
||||
from_secret: github_token
|
||||
files: release/*.tar.gz
|
||||
draft: true
|
||||
overwrite: true
|
||||
title: ${DRONE_TAG}
|
||||
when:
|
||||
event: tag
|
||||
|
||||
- name: publish:docker
|
||||
image: plugins/docker
|
||||
settings:
|
||||
username:
|
||||
from_secret: docker_user
|
||||
password:
|
||||
from_secret: docker_pass
|
||||
repo: codercom/code-server
|
||||
dockerfile: scripts/ci.dockerfile
|
||||
tags:
|
||||
- ${DRONE_TAG}
|
||||
when:
|
||||
event: tag
|
||||
|
||||
---
|
||||
kind: pipeline
|
||||
type: docker
|
||||
name: amd64:alpine
|
||||
|
||||
platform:
|
||||
arch: amd64
|
||||
|
||||
steps:
|
||||
- name: cache:restore
|
||||
image: node:12-alpine
|
||||
commands:
|
||||
- ./scripts/cacher.sh
|
||||
|
||||
- name: build
|
||||
image: node:12-alpine
|
||||
commands:
|
||||
- apk add libxkbfile-dev libsecret-dev build-base git bash python
|
||||
- timeout 50m ./scripts/ci.bash || echo 'Timed out or failed; continuing so we can preserve cache for the next run'
|
||||
|
||||
- name: cache:package
|
||||
image: node:12-alpine
|
||||
commands:
|
||||
- ./scripts/cacher.sh
|
||||
when:
|
||||
event: push
|
||||
|
||||
- name: cache:push
|
||||
image: plugins/gcs
|
||||
settings:
|
||||
source: cache-upload/
|
||||
target: codesrv-ci.cdr.sh
|
||||
token:
|
||||
from_secret: gcs-token
|
||||
when:
|
||||
event: push
|
||||
|
||||
- name: test
|
||||
image: node:12-alpine
|
||||
commands:
|
||||
- yarn test
|
||||
|
||||
- name: publish:github
|
||||
image: plugins/github-release
|
||||
settings:
|
||||
api_key:
|
||||
from_secret: github_token
|
||||
files: release/*.tar.gz
|
||||
draft: true
|
||||
overwrite: true
|
||||
title: ${DRONE_TAG}
|
||||
when:
|
||||
event: tag
|
||||
|
||||
---
|
||||
kind: pipeline
|
||||
type: docker
|
||||
name: arm64:linux
|
||||
|
||||
platform:
|
||||
arch: arm64
|
||||
|
||||
steps:
|
||||
- name: cache:restore
|
||||
image: node:12
|
||||
commands:
|
||||
- ./scripts/cacher.sh
|
||||
|
||||
- name: build
|
||||
image: node:12
|
||||
commands:
|
||||
- apt update && apt install -y build-essential git libsecret-1-dev libx11-dev libxkbfile-dev
|
||||
- timeout 50m ./scripts/ci.bash || echo 'Timed out or failed; continuing so we can preserve cache for the next run'
|
||||
|
||||
- name: cache:package
|
||||
image: node:12
|
||||
commands:
|
||||
- ./scripts/cacher.sh
|
||||
when:
|
||||
event: push
|
||||
|
||||
- name: cache:push
|
||||
image: plugins/gcs
|
||||
settings:
|
||||
source: cache-upload/
|
||||
target: codesrv-ci.cdr.sh
|
||||
token:
|
||||
from_secret: gcs-token
|
||||
when:
|
||||
event: push
|
||||
|
||||
- name: test
|
||||
image: node:12
|
||||
commands:
|
||||
- yarn test
|
||||
|
||||
- name: publish:github
|
||||
image: plugins/github-release
|
||||
settings:
|
||||
api_key:
|
||||
from_secret: github_token
|
||||
files: release/*.tar.gz
|
||||
draft: true
|
||||
overwrite: true
|
||||
title: ${DRONE_TAG}
|
||||
when:
|
||||
event: tag
|
||||
|
||||
- name: publish:docker
|
||||
image: plugins/docker
|
||||
settings:
|
||||
username:
|
||||
from_secret: docker_user
|
||||
password:
|
||||
from_secret: docker_pass
|
||||
repo: codercom/code-server
|
||||
dockerfile: scripts/ci.dockerfile
|
||||
tags:
|
||||
- ${DRONE_TAG}-arm64
|
||||
when:
|
||||
event: tag
|
||||
|
||||
---
|
||||
kind: pipeline
|
||||
type: docker
|
||||
name: arm64:alpine
|
||||
|
||||
platform:
|
||||
arch: arm64
|
||||
|
||||
steps:
|
||||
- name: cache:restore
|
||||
image: node:12-alpine
|
||||
commands:
|
||||
- ./scripts/cacher.sh
|
||||
|
||||
- name: build
|
||||
image: node:12-alpine
|
||||
commands:
|
||||
- apk add libxkbfile-dev libsecret-dev build-base git bash python
|
||||
- timeout 50m ./scripts/ci.bash || echo 'Timed out or failed; continuing so we can preserve cache for the next run'
|
||||
|
||||
- name: cache:package
|
||||
image: node:12-alpine
|
||||
commands:
|
||||
- ./scripts/cacher.sh
|
||||
when:
|
||||
event: push
|
||||
|
||||
- name: cache:push
|
||||
image: plugins/gcs
|
||||
settings:
|
||||
source: cache-upload/
|
||||
target: codesrv-ci.cdr.sh
|
||||
token:
|
||||
from_secret: gcs-token
|
||||
when:
|
||||
event: push
|
||||
|
||||
- name: test
|
||||
image: node:12-alpine
|
||||
commands:
|
||||
- yarn test
|
||||
|
||||
- name: publish:github
|
||||
image: plugins/github-release
|
||||
settings:
|
||||
api_key:
|
||||
from_secret: github_token
|
||||
files: release/*.tar.gz
|
||||
draft: true
|
||||
overwrite: true
|
||||
title: ${DRONE_TAG}
|
||||
when:
|
||||
event: tag
|
||||
|
||||
---
|
||||
kind: pipeline
|
||||
type: docker
|
||||
name: arm:linux
|
||||
|
||||
platform:
|
||||
arch: arm
|
||||
|
||||
steps:
|
||||
- name: cache:restore
|
||||
image: node:12
|
||||
commands:
|
||||
- ./scripts/cacher.sh
|
||||
|
||||
- name: build
|
||||
image: node:12
|
||||
commands:
|
||||
- apt update && apt install -y build-essential git libsecret-1-dev libx11-dev libxkbfile-dev
|
||||
- timeout 50m ./scripts/ci.bash || echo 'Timed out or failed; continuing so we can preserve cache for the next run'
|
||||
|
||||
- name: cache:package
|
||||
image: node:12
|
||||
commands:
|
||||
- ./scripts/cacher.sh
|
||||
when:
|
||||
event: push
|
||||
|
||||
- name: cache:push
|
||||
image: plugins/gcs
|
||||
settings:
|
||||
source: cache-upload/
|
||||
target: codesrv-ci.cdr.sh
|
||||
token:
|
||||
from_secret: gcs-token
|
||||
when:
|
||||
event: push
|
||||
|
||||
- name: test
|
||||
image: node:12
|
||||
failure: ignore
|
||||
commands:
|
||||
- yarn test
|
||||
|
||||
# - name: publish:github
|
||||
# image: plugins/github-release
|
||||
# settings:
|
||||
# api_key:
|
||||
# from_secret: github_token
|
||||
# files: release/*.tar.gz
|
||||
# draft: true
|
||||
# overwrite: true
|
||||
# title: ${DRONE_TAG}
|
||||
# when:
|
||||
# event: tag
|
||||
|
||||
# - name: publish:docker
|
||||
# image: plugins/docker
|
||||
# settings:
|
||||
# username:
|
||||
# from_secret: docker_user
|
||||
# password:
|
||||
# from_secret: docker_pass
|
||||
# repo: codercom/code-server
|
||||
# dockerfile: scripts/ci.dockerfile
|
||||
# tags:
|
||||
# - ${DRONE_TAG}-arm
|
||||
# when:
|
||||
# event: tag
|
||||
|
||||
---
|
||||
kind: pipeline
|
||||
type: docker
|
||||
name: arm:alpine
|
||||
|
||||
platform:
|
||||
arch: arm
|
||||
|
||||
steps:
|
||||
- name: cache:restore
|
||||
image: node:12-alpine
|
||||
commands:
|
||||
- ./scripts/cacher.sh
|
||||
|
||||
- name: build
|
||||
image: node:12-alpine
|
||||
commands:
|
||||
- apk add libxkbfile-dev libsecret-dev build-base git bash python
|
||||
- timeout 50m ./scripts/ci.bash || echo 'Timed out or failed; continuing so we can preserve cache for the next run'
|
||||
|
||||
- name: cache:package
|
||||
image: node:12-alpine
|
||||
commands:
|
||||
- ./scripts/cacher.sh
|
||||
when:
|
||||
event: push
|
||||
|
||||
- name: cache:push
|
||||
image: plugins/gcs
|
||||
settings:
|
||||
source: cache-upload/
|
||||
target: codesrv-ci.cdr.sh
|
||||
token:
|
||||
from_secret: gcs-token
|
||||
when:
|
||||
event: push
|
||||
|
||||
- name: test
|
||||
image: node:12-alpine
|
||||
failure: ignore
|
||||
commands:
|
||||
- yarn test
|
||||
|
||||
# - name: publish:github
|
||||
# image: plugins/github-release
|
||||
# failure: ignore
|
||||
# settings:
|
||||
# api_key:
|
||||
# from_secret: github_token
|
||||
# files: release/*.tar.gz
|
||||
# draft: true
|
||||
# overwrite: true
|
||||
# title: ${DRONE_TAG}
|
||||
# when:
|
||||
# event: tag
|
||||
6
.gitignore
vendored
6
.gitignore
vendored
@@ -1,3 +1,5 @@
|
||||
node_modules
|
||||
build
|
||||
release
|
||||
/build
|
||||
/release
|
||||
/binaries
|
||||
/lib
|
||||
|
||||
@@ -1 +1 @@
|
||||
10.16.0
|
||||
12.14.0
|
||||
|
||||
70
.travis.yml
70
.travis.yml
@@ -1,49 +1,37 @@
|
||||
language: node_js
|
||||
node_js:
|
||||
- 10.16.0
|
||||
- 12.14.0
|
||||
services:
|
||||
- docker
|
||||
matrix:
|
||||
- docker
|
||||
|
||||
jobs:
|
||||
include:
|
||||
- os: linux
|
||||
dist: trusty
|
||||
env:
|
||||
- VSCODE_VERSION="1.38.1" MAJOR_VERSION="2" VERSION="$MAJOR_VERSION.$TRAVIS_BUILD_NUMBER" TARGET="linux"
|
||||
- os: linux
|
||||
dist: trusty
|
||||
env:
|
||||
- VSCODE_VERSION="1.38.1" MAJOR_VERSION="2" VERSION="$MAJOR_VERSION.$TRAVIS_BUILD_NUMBER" TARGET="alpine"
|
||||
- os: osx
|
||||
env:
|
||||
- VSCODE_VERSION="1.38.1" MAJOR_VERSION="2" VERSION="$MAJOR_VERSION.$TRAVIS_BUILD_NUMBER"
|
||||
before_install:
|
||||
- if [[ "$TRAVIS_BRANCH" == "master" ]]; then export MINIFY="true"; fi
|
||||
- if [[ "$TRAVIS_BRANCH" == "master" ]]; then export PACKAGE="true"; fi
|
||||
script:
|
||||
- travis_wait 30 scripts/ci.bash
|
||||
before_deploy:
|
||||
- echo "$VERSION-vsc$VSCODE_VERSION" "$TRAVIS_COMMIT"
|
||||
- git config --local user.name "$USER_NAME"
|
||||
- git config --local user.email "$USER_EMAIL"
|
||||
- git tag "$VERSION-vsc$VSCODE_VERSION" "$TRAVIS_COMMIT"
|
||||
- name: "MacOS build"
|
||||
os: osx
|
||||
script: travis_wait 60 scripts/ci.bash
|
||||
|
||||
git:
|
||||
depth: 3
|
||||
|
||||
deploy:
|
||||
provider: releases
|
||||
file_glob: true
|
||||
draft: true
|
||||
tag_name: "$VERSION-vsc$VSCODE_VERSION"
|
||||
target_commitish: "$TRAVIS_COMMIT"
|
||||
name: "$VERSION-vsc$VSCODE_VERSION"
|
||||
skip_cleanup: true
|
||||
api_key:
|
||||
secure: YL/x24KjYjgYXPcJWk3FV7FGxI79Mh6gBECQEcdlf3fkLEoKFVgzHBoUNWrFPzyR4tgLyWNAgcpD9Lkme1TRWTom7UPjXcwMNyLcLa+uec7ciSAnYD9ntLTpiCuPDD1u0LtRGclSi/EHQ+F8YVq+HZJpXTsJeAmOmihma3GVbGKSZr+BRum+0YZSG4w+o4TOlYzw/4bLWS52MogZcwpjd+hemBbgXLuGU2ziKv2vEKCZFbEeA16II4x1WLI4mutDdCeh7+3aLzGLwDa49NxtsVYNjyNFF75JhCTCNA55e2YMiLz9Uq69IXe/mi5F7xUaFfhIqqLNyKBnKeEOzu3dYnc+8n3LjnQ+00PmkF05nx9kBn3UfV1kwQGh6QbyDmTtBP07rtUMyI14aeQqHjxsaVRdMnwj9Q2DjXRr8UDqESZF0rmK3pHCXS2fBhIzLE8tLVW5Heiba2pQRFMHMZW+KBE97FzcFh7is90Ait3T8enfcd/PWFPYoBejDAdjwxwOkezh5N5ZkYquEfDYuWrFi6zRFCktsruaAcA+xGtTf9oilBBzUqu8Ie+YFWH5me83xakcblJWdaW/D2rLJAJH3m6LFm8lBqyUgDX5t/etob6CpDuYHu5D1J3XINOj/+aLAcadq6qlh70PMZS3zYffUu3JlzaD2amlSHIT8b5YXFc=
|
||||
file:
|
||||
- release/*.tar.gz
|
||||
- release/*.zip
|
||||
on:
|
||||
repo: cdr/code-server
|
||||
branch: master
|
||||
- provider: releases
|
||||
file_glob: true
|
||||
draft: true
|
||||
tag_name: "$TRAVIS_TAG"
|
||||
target_commitish: "$TRAVIS_COMMIT"
|
||||
name: "$TRAVIS_TAG"
|
||||
skip_cleanup: true
|
||||
api_key:
|
||||
secure: YL/x24KjYjgYXPcJWk3FV7FGxI79Mh6gBECQEcdlf3fkLEoKFVgzHBoUNWrFPzyR4tgLyWNAgcpD9Lkme1TRWTom7UPjXcwMNyLcLa+uec7ciSAnYD9ntLTpiCuPDD1u0LtRGclSi/EHQ+F8YVq+HZJpXTsJeAmOmihma3GVbGKSZr+BRum+0YZSG4w+o4TOlYzw/4bLWS52MogZcwpjd+hemBbgXLuGU2ziKv2vEKCZFbEeA16II4x1WLI4mutDdCeh7+3aLzGLwDa49NxtsVYNjyNFF75JhCTCNA55e2YMiLz9Uq69IXe/mi5F7xUaFfhIqqLNyKBnKeEOzu3dYnc+8n3LjnQ+00PmkF05nx9kBn3UfV1kwQGh6QbyDmTtBP07rtUMyI14aeQqHjxsaVRdMnwj9Q2DjXRr8UDqESZF0rmK3pHCXS2fBhIzLE8tLVW5Heiba2pQRFMHMZW+KBE97FzcFh7is90Ait3T8enfcd/PWFPYoBejDAdjwxwOkezh5N5ZkYquEfDYuWrFi6zRFCktsruaAcA+xGtTf9oilBBzUqu8Ie+YFWH5me83xakcblJWdaW/D2rLJAJH3m6LFm8lBqyUgDX5t/etob6CpDuYHu5D1J3XINOj/+aLAcadq6qlh70PMZS3zYffUu3JlzaD2amlSHIT8b5YXFc=
|
||||
file:
|
||||
- release/*.tar.gz
|
||||
- release/*.zip
|
||||
on:
|
||||
repo: cdr/code-server
|
||||
tags: true
|
||||
|
||||
cache:
|
||||
yarn: true
|
||||
timeout: 1000
|
||||
yarn: true
|
||||
directories:
|
||||
- .cache
|
||||
- source
|
||||
|
||||
30
Dockerfile
30
Dockerfile
@@ -1,24 +1,21 @@
|
||||
FROM node:10.16.0
|
||||
ARG codeServerVersion=docker
|
||||
ARG vscodeVersion
|
||||
FROM node:12.14.0
|
||||
ARG tag
|
||||
ARG githubToken
|
||||
|
||||
# Install VS Code's deps. These are the only two it seems we need.
|
||||
RUN apt-get update && apt-get install -y \
|
||||
libxkbfile-dev \
|
||||
libsecret-1-dev
|
||||
|
||||
# Ensure latest yarn.
|
||||
RUN npm install -g yarn@1.13
|
||||
|
||||
WORKDIR /src
|
||||
COPY . .
|
||||
|
||||
RUN yarn \
|
||||
&& MINIFY=true yarn build "${vscodeVersion}" "${codeServerVersion}" \
|
||||
&& yarn binary "${vscodeVersion}" "${codeServerVersion}" \
|
||||
&& mv "/src/build/code-server${codeServerVersion}-vsc${vscodeVersion}-linux-x86_64-built/code-server${codeServerVersion}-vsc${vscodeVersion}-linux-x86_64" /src/build/code-server
|
||||
&& DRONE_TAG="$tag" MINIFY=true BINARY=true GITHUB_TOKEN="$githubToken" ./scripts/ci.bash \
|
||||
&& rm -r /src/build \
|
||||
&& rm -r /src/source
|
||||
|
||||
# We deploy with ubuntu so that devs have a familiar environment.
|
||||
# We deploy with Ubuntu so that devs have a familiar environment.
|
||||
FROM ubuntu:18.04
|
||||
|
||||
RUN apt-get update && apt-get install -y \
|
||||
@@ -30,20 +27,23 @@ RUN apt-get update && apt-get install -y \
|
||||
dumb-init \
|
||||
vim \
|
||||
curl \
|
||||
wget
|
||||
wget \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
|
||||
RUN locale-gen en_US.UTF-8
|
||||
# We cannot use update-locale because docker will not use the env variables
|
||||
# configured in /etc/default/locale so we need to set it manually.
|
||||
ENV LC_ALL=en_US.UTF-8
|
||||
ENV LC_ALL=en_US.UTF-8 \
|
||||
SHELL=/bin/bash
|
||||
|
||||
RUN adduser --gecos '' --disabled-password coder && \
|
||||
echo "coder ALL=(ALL) NOPASSWD:ALL" >> /etc/sudoers.d/nopasswd
|
||||
|
||||
USER coder
|
||||
# We create first instead of just using WORKDIR as when WORKDIR creates, the
|
||||
# user is root.
|
||||
# Create first so these directories will be owned by coder instead of root
|
||||
# (workdir and mounting appear to both default to root).
|
||||
RUN mkdir -p /home/coder/project
|
||||
RUN mkdir -p /home/coder/.local/share/code-server
|
||||
|
||||
WORKDIR /home/coder/project
|
||||
|
||||
@@ -51,7 +51,7 @@ WORKDIR /home/coder/project
|
||||
# mount. So that they do not lose their data if they delete the container.
|
||||
VOLUME [ "/home/coder/project" ]
|
||||
|
||||
COPY --from=0 /src/build/code-server /usr/local/bin/code-server
|
||||
COPY --from=0 /src/binaries/code-server /usr/local/bin/code-server
|
||||
EXPOSE 8080
|
||||
|
||||
ENTRYPOINT ["dumb-init", "code-server", "--host", "0.0.0.0"]
|
||||
|
||||
128
README.md
128
README.md
@@ -4,8 +4,9 @@
|
||||
remote server, accessible through the browser.
|
||||
|
||||
Try it out:
|
||||
|
||||
```bash
|
||||
docker run -it -p 127.0.0.1:8080:8080 -v "${HOME}/.local/share/code-server:/home/coder/.local/share/code-server" -v "$PWD:/home/coder/project" codercom/code-server
|
||||
docker run -it -p 127.0.0.1:8080:8080 -v "${HOME}/.local/share/code-server:/home/coder/.local/share/code-server" -v "$PWD:/home/coder/project" codercom/code-server:v2
|
||||
```
|
||||
|
||||
- **Consistent environment:** Code on your Chromebook, tablet, and laptop with a
|
||||
@@ -18,10 +19,22 @@ docker run -it -p 127.0.0.1:8080:8080 -v "${HOME}/.local/share/code-server:/home
|
||||

|
||||
|
||||
## Getting Started
|
||||
|
||||
### Requirements
|
||||
|
||||
- 64-bit host.
|
||||
- At least 1GB of RAM.
|
||||
- 2 cores or more are recommended (1 core works but not optimally).
|
||||
- Secure connection over HTTPS or localhost (required for service workers).
|
||||
- For Linux: GLIBC 2.17 or later and GLIBCXX 3.4.15 or later.
|
||||
- Docker (for Docker versions of `code-server`).
|
||||
|
||||
### Run over SSH
|
||||
|
||||
Use [sshcode](https://github.com/codercom/sshcode) for a simple setup.
|
||||
|
||||
### Docker
|
||||
|
||||
See the Docker one-liner mentioned above. Dockerfile is at [/Dockerfile](/Dockerfile).
|
||||
|
||||
To debug Golang using the
|
||||
@@ -30,9 +43,14 @@ you need to add `--security-opt seccomp=unconfined` to your `docker run`
|
||||
arguments when launching code-server with Docker. See
|
||||
[#725](https://github.com/cdr/code-server/issues/725) for details.
|
||||
|
||||
### Digital Ocean
|
||||
|
||||
[](https://marketplace.digitalocean.com/apps/code-server?action=deploy)
|
||||
|
||||
### Binaries
|
||||
|
||||
1. [Download a binary](https://github.com/cdr/code-server/releases). (Linux and
|
||||
OS X supported. Windows coming soon)
|
||||
OS X supported. Windows coming soon)
|
||||
2. Unpack the downloaded file then run the binary.
|
||||
3. In your browser navigate to `localhost:8080`.
|
||||
|
||||
@@ -40,25 +58,53 @@ arguments when launching code-server with Docker. See
|
||||
- For hosting on cloud platforms see [doc/deploy.md](doc/deploy.md).
|
||||
|
||||
### Build
|
||||
- If you also plan on developing, set the `OUT` environment variable. Otherwise
|
||||
it will build in this directory which will cause issues because `yarn watch`
|
||||
will try to compile the build directory as well.
|
||||
- For now `@coder/nbin` is a global dependency.
|
||||
- Run `yarn build ${vscodeVersion} ${codeServerVersion}` in this directory (for
|
||||
example: `yarn build 1.36.0 development`).
|
||||
- If you target the same VS Code version our Travis builds do everything will
|
||||
work but if you target some other version it might not (we have to do some
|
||||
patching to VS Code so different versions aren't always compatible).
|
||||
- You can run the built code with `node path/to/build/out/vs/server/main.js` or run
|
||||
`yarn binary` with the same arguments in the previous step to package the
|
||||
code into a single binary.
|
||||
|
||||
See
|
||||
[VS Code's prerequisites](https://github.com/Microsoft/vscode/wiki/How-to-Contribute#prerequisites)
|
||||
before building.
|
||||
|
||||
```shell
|
||||
export OUT=/path/to/output/build # Optional if only building. Required if also developing.
|
||||
yarn build $vscodeVersion $codeServerVersion # See travis.yml for the VS Code version to use.
|
||||
# The code-server version can be anything you want.
|
||||
node /path/to/output/build/out/vs/server/main.js # You can run the built JavaScript with Node.
|
||||
yarn binary $vscodeVersion $codeServerVersion # Or you can package it into a binary.
|
||||
```
|
||||
|
||||
## Security
|
||||
|
||||
### Authentication
|
||||
By default `code-server` enables password authentication using a randomly
|
||||
generated password. You can set the `PASSWORD` environment variable to use your
|
||||
own instead or use `--auth none` to disable password authentication.
|
||||
|
||||
Do not expose `code-server` to the open internet without some form of
|
||||
authentication.
|
||||
|
||||
### Encrypting traffic with HTTPS
|
||||
If you aren't doing SSL termination elsewhere you can directly give
|
||||
`code-server` a certificate with `code-server --cert` followed by the path to
|
||||
your certificate. Additionally, you can use certificate keys with `--cert-key`
|
||||
followed by the path to your key. If you pass `--cert` without any path
|
||||
`code-server` will generate a self-signed certificate.
|
||||
|
||||
If `code-server` has been passed a certificate it will also respond to HTTPS
|
||||
requests and will redirect all HTTP requests to HTTPS. Otherwise it will respond
|
||||
only to HTTP requests.
|
||||
|
||||
You can use [Let's Encrypt](https://letsencrypt.org/) to get an SSL certificate
|
||||
for free.
|
||||
|
||||
Do not expose `code-server` to the open internet without SSL, whether built-in
|
||||
or through a proxy.
|
||||
|
||||
## Known Issues
|
||||
- Uploading .vsix files doesn't work.
|
||||
|
||||
- Creating custom VS Code extensions and debugging them doesn't work.
|
||||
- Extension profiling and tips are currently disabled.
|
||||
|
||||
## Future
|
||||
|
||||
- **Stay up to date!** Get notified about new releases of code-server.
|
||||

|
||||
- Windows support.
|
||||
@@ -66,26 +112,41 @@ arguments when launching code-server with Docker. See
|
||||
- Run VS Code unit tests against our builds to ensure features work as expected.
|
||||
|
||||
## Extensions
|
||||
At the moment we can't use the official VS Code Marketplace. We've created a
|
||||
custom extension marketplace focused around open-sourced extensions. However,
|
||||
you can manually download the extension to your extensions directory. It's also
|
||||
possible to set your own marketplace URLs by setting the `SERVICE_URL` and
|
||||
`ITEM_URL` environment variables.
|
||||
|
||||
code-server does not provide access to the official
|
||||
[Visual Studio Marketplace](https://marketplace.visualstudio.com/vscode). Instead,
|
||||
Coder has created a custom extension marketplace that we manage for open-source
|
||||
extensions. If you want to use an extension with code-server that we do not have
|
||||
in our marketplace please look for a release in the extension’s repository,
|
||||
contact us to see if we have one in the works or, if you build an extension
|
||||
locally from open source, you can copy it to the `extensions` folder. If you
|
||||
build one locally from open-source please contribute it to the project and let
|
||||
us know so we can give you props! If you have your own custom marketplace, it is
|
||||
possible to point code-server to it by setting the `SERVICE_URL` and `ITEM_URL`
|
||||
environment variables.
|
||||
|
||||
## Telemetry
|
||||
|
||||
Use the `--disable-telemetry` flag to completely disable telemetry. We use the
|
||||
data collected to improve code-server.
|
||||
|
||||
## Contributing
|
||||
|
||||
### Development
|
||||
|
||||
See
|
||||
[VS Code's prerequisites](https://github.com/Microsoft/vscode/wiki/How-to-Contribute#prerequisites)
|
||||
before developing.
|
||||
|
||||
```shell
|
||||
git clone https://github.com/microsoft/vscode
|
||||
cd vscode
|
||||
git checkout <see travis.yml for the VS Code version to use here>
|
||||
git checkout ${vscodeVersion} # See travis.yml for the version to use.
|
||||
yarn
|
||||
git clone https://github.com/cdr/code-server src/vs/server
|
||||
cd src/vs/server
|
||||
yarn patch:apply
|
||||
yarn
|
||||
yarn patch:apply
|
||||
yarn watch
|
||||
# Wait for the initial compilation to complete (it will say "Finished compilation").
|
||||
# Run the next command in another shell.
|
||||
@@ -94,10 +155,10 @@ yarn start
|
||||
```
|
||||
|
||||
If you run into issues about a different version of Node being used, try running
|
||||
`npm rebuild` in the VS Code directory and ignore the error at the end from
|
||||
`vscode-ripgrep`.
|
||||
`npm rebuild` in the VS Code directory.
|
||||
|
||||
### Upgrading VS Code
|
||||
|
||||
We patch VS Code to provide and fix some functionality. As the web portion of VS
|
||||
Code matures, we'll be able to shrink and maybe even entirely eliminate our
|
||||
patch. In the meantime, however, upgrading the VS Code version requires ensuring
|
||||
@@ -108,26 +169,27 @@ the patch in the VS Code source, then run `yarn patch:generate` in this
|
||||
directory.
|
||||
|
||||
Our changes include:
|
||||
- Change the remote schema to `code-server`.
|
||||
|
||||
- Allow multiple extension directories (both user and built-in).
|
||||
- Modify the loader, websocket, webview, service worker, and asset requests to
|
||||
use the URL of the page as a base (and TLS if necessary for the websocket).
|
||||
- Send client-side telemetry through the server and get the initial log level
|
||||
from the server.
|
||||
- Add an upload service for use in editor windows and the explorer along with a
|
||||
file prefix to ignore for temporary files created during upload.
|
||||
- Send client-side telemetry through the server.
|
||||
- Make changing the display language work.
|
||||
- Make hiding or toggling the menu bar possible.
|
||||
- Make it possible for us to load code on the client.
|
||||
- Modify the build process to include our code.
|
||||
- Make extensions work in the browser.
|
||||
- Fix getting permanently disconnected when you sleep or hibernate for a while.
|
||||
- Make it possible to automatically update the binary.
|
||||
|
||||
## License
|
||||
|
||||
[MIT](LICENSE)
|
||||
|
||||
## Enterprise
|
||||
Visit [our enterprise page](https://coder.com/enterprise) for more information
|
||||
about our enterprise offering.
|
||||
|
||||
Visit [our enterprise page](https://coder.com) for more information about our
|
||||
enterprise offering.
|
||||
|
||||
## Commercialization
|
||||
|
||||
If you would like to commercialize code-server, please contact
|
||||
contact@coder.com.
|
||||
|
||||
24
doc/assets/droplet.svg
Normal file
24
doc/assets/droplet.svg
Normal file
@@ -0,0 +1,24 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<svg width="200px" height="40px" viewBox="0 0 200 40" version="1.1" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink">
|
||||
<!-- Generator: Sketch 52.5 (67469) - http://www.bohemiancoding.com/sketch -->
|
||||
<title>do-btn-blue-ghost</title>
|
||||
<desc>Created with Sketch.</desc>
|
||||
<g id="Page-1" stroke="none" stroke-width="1" fill="none" fill-rule="evenodd">
|
||||
<g id="Partner-welcome-kit-Copy-3" transform="translate(-651.000000, -828.000000)">
|
||||
<g id="do-btn-blue-ghost" transform="translate(651.000000, 828.000000)">
|
||||
<rect id="Rectangle-Copy-4" stroke="#0069FF" x="0.5" y="0.5" width="199" height="39" rx="6"></rect>
|
||||
<path d="M6,0 L47,0 L47,40 L6,40 C2.6862915,40 4.05812251e-16,37.3137085 0,34 L-8.8817842e-16,6 C-1.29399067e-15,2.6862915 2.6862915,6.08718376e-16 6,0 Z" id="Rectangle-Copy-5" fill="#0069FF"></path>
|
||||
<g id="DO_Logo_horizontal_blue-Copy-3" transform="translate(13.000000, 10.000000)" fill="#FFFFFF">
|
||||
<path d="M10.0098493,20 L10.0098493,16.1262429 C14.12457,16.1262429 17.2897398,12.0548452 15.7269372,7.74627862 C15.1334679,6.14538921 13.8674,4.86072487 12.2650328,4.28756693 C7.952489,2.72620566 3.87733294,5.88845634 3.87733294,9.99938223 C3.87733294,9.99938223 3.87733294,9.99938223 3.87733294,9.99938223 L0,9.99938223 C0,3.45747613 6.3303395,-1.64165309 13.1948014,0.492866119 C16.2017127,1.42177726 18.57559,3.81322933 19.5053586,6.79760341 C21.6418482,13.6754986 16.5577943,20 10.0098493,20 Z" id="XMLID_49_"></path>
|
||||
<polygon id="XMLID_47_" points="9.52380952 16.1904762 5.71428571 16.1904762 5.71428571 12.3809524 5.71428571 12.3809524 9.52380952 12.3809524 9.52380952 12.3809524"></polygon>
|
||||
<polygon id="XMLID_46_" points="6.66666667 19.047619 3.80952381 19.047619 3.80952381 19.047619 3.80952381 16.1904762 6.66666667 16.1904762"></polygon>
|
||||
<polygon id="XMLID_45_" points="3.80952381 16.1904762 0.952380952 16.1904762 0.952380952 16.1904762 0.952380952 13.3333333 0.952380952 13.3333333 3.80952381 13.3333333 3.80952381 13.3333333"></polygon>
|
||||
</g>
|
||||
<!-- Modified to add GitHub font-family after DigitalOcean's font-family, otherwise it looks bad on GitHub -->
|
||||
<text id="Create-a-Droplet-Copy-3" font-family="Sailec-Medium, Sailec, -apple-system, BlinkMacSystemFont, Segoe UI, Helvetica, Arial, sans-serif, Apple Color Emoji, Segoe UI Emoji, Segoe UI Symbol" font-size="16" font-weight="400" fill="#0069FF">
|
||||
<tspan x="58" y="26">Create a Droplet</tspan>
|
||||
</text>
|
||||
</g>
|
||||
</g>
|
||||
</g>
|
||||
</svg>
|
||||
|
After Width: | Height: | Size: 2.7 KiB |
@@ -2,11 +2,11 @@
|
||||
|
||||
[Definition]
|
||||
|
||||
failregex = ^INFO\s+Failed login attempt\s+{\"password\":\"(\\.|[^"])*\",\"remoteAddress\":\"<HOST>\"
|
||||
failregex = ^Failed login attempt\s+{\"remoteAddress\":\"<HOST>\"
|
||||
|
||||
# Use this instead for proxies (ensure the proxy is configured to send the
|
||||
# X-Forwarded-For header).
|
||||
# failregex = ^INFO\s+Failed login attempt\s+{\"password\":\"(\\.|[^"])*\",\"xForwardedFor\":\"<HOST>\"
|
||||
# failregex = ^Failed login attempt\s+{\"xForwardedFor\":\"<HOST>\"
|
||||
|
||||
ignoreregex =
|
||||
|
||||
|
||||
@@ -58,7 +58,7 @@ spec:
|
||||
app: code-server
|
||||
spec:
|
||||
containers:
|
||||
- image: codercom/code-server
|
||||
- image: codercom/code-server:v2
|
||||
imagePullPolicy: Always
|
||||
name: code-servery
|
||||
ports:
|
||||
|
||||
@@ -35,7 +35,7 @@ spec:
|
||||
app: code-server
|
||||
spec:
|
||||
containers:
|
||||
- image: codercom/code-server
|
||||
- image: codercom/code-server:v2
|
||||
imagePullPolicy: Always
|
||||
name: code-server
|
||||
ports:
|
||||
|
||||
@@ -30,6 +30,6 @@ accessible from the internet (use localhost or block it in your firewall).
|
||||
## Fail2Ban
|
||||
Fail2Ban allows for automatically banning and logging repeated failed
|
||||
authentication attempts for many applications through regex filters. A working
|
||||
filter for code-server can be found in `./code-server.fail2ban.conf`. Once this
|
||||
filter for code-server can be found in `./examples/fail2ban.conf`. Once this
|
||||
is installed and configured correctly, repeated failed login attempts should
|
||||
automatically be banned from connecting to your server.
|
||||
|
||||
@@ -7,15 +7,6 @@
|
||||
## Usage
|
||||
Run `code-server --help` to view available options.
|
||||
|
||||
### Encrypting traffic with HTTPS
|
||||
To encrypt the traffic between the browser and server use `code-server --cert`
|
||||
followed by the path to your certificate. Additionally, you can use certificate
|
||||
keys with `--cert-key` followed by the path to your key. If you pass `--cert`
|
||||
without any path code-server will generate a self-signed certificate.
|
||||
|
||||
You can use [Let's Encrypt](https://letsencrypt.org/) to get an SSL certificate
|
||||
for free.
|
||||
|
||||
### Nginx Reverse Proxy
|
||||
The trailing slashes are important.
|
||||
|
||||
@@ -46,12 +37,62 @@ server {
|
||||
RewriteRule /(.*) http://localhost:8080/$1 [P,L]
|
||||
|
||||
ProxyRequests off
|
||||
|
||||
RequestHeader set X-Forwarded-Proto https
|
||||
RequestHeader set X-Forwarded-Port 443
|
||||
|
||||
ProxyPass / http://localhost:8080/ nocanon
|
||||
ProxyPass / http://localhost:8080/ nocanon
|
||||
ProxyPassReverse / http://localhost:8080/
|
||||
|
||||
</VirtualHost>
|
||||
```
|
||||
|
||||
### Run automatically at startup
|
||||
|
||||
In some cases you might need to run code-server automatically once the host starts. You may use your local init service to do so.
|
||||
|
||||
#### Systemd
|
||||
|
||||
```ini
|
||||
[Unit]
|
||||
Description=Code Server IDE
|
||||
After=network.target
|
||||
|
||||
[Service]
|
||||
Type=simple
|
||||
User=<USER>
|
||||
EnvironmentFile=$HOME/.profile
|
||||
WorkingDirectory=$HOME
|
||||
Restart=on-failure
|
||||
RestartSec=10
|
||||
|
||||
ExecStart=<PATH TO BINARY> $(pwd)
|
||||
|
||||
StandardOutput=file:/var/log/code-server-output.log
|
||||
StandardError=file:/var/log/code-server-error.log
|
||||
|
||||
[Install]
|
||||
WantedBy=multi-user.target
|
||||
```
|
||||
|
||||
#### OpenRC
|
||||
|
||||
```sh
|
||||
#!/sbin/openrc-run
|
||||
|
||||
depend() {
|
||||
after net-online
|
||||
need net
|
||||
}
|
||||
|
||||
supervisor=supervise-daemon
|
||||
name="code-server"
|
||||
command="/opt/cdr/code-server"
|
||||
command_args=""
|
||||
|
||||
pidfile="/var/run/cdr.pid"
|
||||
respawn_delay=5
|
||||
|
||||
set -o allexport
|
||||
if [ -f /etc/environment ]; then source /etc/environment; fi
|
||||
set +o allexport
|
||||
```
|
||||
|
||||
#### Kubernetes/Docker
|
||||
|
||||
Make sure you set your restart policy to always - this will ensure your container starts as the daemon starts.
|
||||
|
||||
13
docker-compose.yml
Normal file
13
docker-compose.yml
Normal file
@@ -0,0 +1,13 @@
|
||||
version: "3"
|
||||
|
||||
services:
|
||||
code-server:
|
||||
container_name: code-server
|
||||
image: codercom/code-server
|
||||
ports:
|
||||
- "8080:8080"
|
||||
volumes:
|
||||
- "${PWD}:/home/coder/project"
|
||||
- "${HOME}/.local/share/code-server:/home/coder/.local/share/code-server"
|
||||
environment:
|
||||
PASSWORD: ${PASSWORD}
|
||||
2
main.js
2
main.js
@@ -4,4 +4,4 @@
|
||||
// while still allowing us to access files within the binary.
|
||||
process.env.NBIN_BYPASS = true;
|
||||
|
||||
require("../../bootstrap-amd").load("vs/server/src/cli");
|
||||
require("../../bootstrap-amd").load("vs/server/src/node/cli");
|
||||
|
||||
36
package.json
36
package.json
@@ -1,36 +1,42 @@
|
||||
{
|
||||
"license": "MIT",
|
||||
"scripts": {
|
||||
"ensure-in-vscode": "bash ./scripts/tasks.bash ensure-in-vscode",
|
||||
"preinstall": "yarn ensure-in-vscode && cd ../../../ && yarn || true",
|
||||
"postinstall": "rm -rf node_modules/@types/node",
|
||||
"start": "yarn ensure-in-vscode && nodemon --watch ../../../out --verbose ../../../out/vs/server/main.js",
|
||||
"watch": "yarn ensure-in-vscode && cd ../../../ && yarn watch",
|
||||
"build": "bash ./scripts/tasks.bash build",
|
||||
"package": "bash ./scripts/tasks.bash package",
|
||||
"package-prebuilt": "bash ./scripts/tasks.bash package-prebuilt",
|
||||
"binary": "bash ./scripts/tasks.bash binary",
|
||||
"patch:generate": "yarn ensure-in-vscode && cd ../../../ && git diff --staged > ./src/vs/server/scripts/vscode.patch",
|
||||
"patch:apply": "yarn ensure-in-vscode && cd ../../../ && git apply ./src/vs/server/scripts/vscode.patch"
|
||||
"runner": "cd ./scripts && node --max-old-space-size=32384 -r ts-node/register ./build.ts",
|
||||
"start": "nodemon --watch ../../../out --verbose ../../../out/vs/server/main.js",
|
||||
"test": "./scripts/test.sh",
|
||||
"watch": "cd ../../../ && yarn watch",
|
||||
"build": "yarn && yarn runner build",
|
||||
"package": "yarn runner package",
|
||||
"binary": "yarn runner binary",
|
||||
"patch:generate": "cd ../../../ && git diff --staged > ./src/vs/server/scripts/vscode.patch",
|
||||
"patch:apply": "cd ../../../ && git apply ./src/vs/server/scripts/vscode.patch"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@coder/nbin": "^1.2.0",
|
||||
"@coder/nbin": "^1.2.7",
|
||||
"@types/fs-extra": "^8.0.1",
|
||||
"@types/node": "^10.12.12",
|
||||
"@types/pem": "^1.9.5",
|
||||
"@types/safe-compare": "^1.1.0",
|
||||
"@types/tar-fs": "^1.16.1",
|
||||
"@types/tar-stream": "^1.6.1",
|
||||
"nodemon": "^1.19.1"
|
||||
"fs-extra": "^8.1.0",
|
||||
"nodemon": "^1.19.1",
|
||||
"ts-node": "^8.4.1",
|
||||
"typescript": "3.6"
|
||||
},
|
||||
"resolutions": {
|
||||
"@types/node": "^10.12.12",
|
||||
"safe-buffer": "^5.1.1"
|
||||
},
|
||||
"dependencies": {
|
||||
"@coder/logger": "^1.1.8",
|
||||
"@coder/logger": "^1.1.12",
|
||||
"@coder/node-browser": "^1.0.6",
|
||||
"@coder/requirefs": "^1.0.6",
|
||||
"httpolyglot": "^0.1.2",
|
||||
"pem": "^1.14.2",
|
||||
"safe-compare": "^1.1.4",
|
||||
"tar-fs": "^2.0.0",
|
||||
"tar-stream": "^2.1.0"
|
||||
"tar-stream": "^2.1.0",
|
||||
"util": "^0.12.1"
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,58 +0,0 @@
|
||||
// This builds the package and product JSON files for the final build.
|
||||
const crypto = require("crypto");
|
||||
const fs = require("fs");
|
||||
const path = require("path");
|
||||
const rootPath = path.resolve(__dirname, "..");
|
||||
const sourcePath = process.argv[2];
|
||||
const buildPath = process.argv[3];
|
||||
const vscodeVersion = process.argv[4];
|
||||
const codeServerVersion = process.argv[5];
|
||||
const util = require(path.join(sourcePath, "build/lib/util"));
|
||||
|
||||
function computeChecksum(filename) {
|
||||
return crypto.createHash("md5").update(fs.readFileSync(filename))
|
||||
.digest("base64").replace(/=+$/, "");
|
||||
}
|
||||
|
||||
const computeChecksums = (filenames) => {
|
||||
const result = {};
|
||||
filenames.forEach(function (filename) {
|
||||
result[filename] = computeChecksum(path.join(buildPath, "out", filename));
|
||||
});
|
||||
return result;
|
||||
};
|
||||
|
||||
const mergeAndWrite = (name, json = {}) => {
|
||||
const aJson = JSON.parse(fs.readFileSync(path.join(sourcePath, `${name}.json`)));
|
||||
const bJson = JSON.parse(fs.readFileSync(path.join(rootPath, "scripts", `${name}.json`)));
|
||||
|
||||
delete aJson.scripts;
|
||||
delete aJson.dependencies;
|
||||
delete aJson.devDependencies;
|
||||
delete aJson.optionalDependencies;
|
||||
|
||||
fs.writeFileSync(path.join(buildPath, `${name}.json`), JSON.stringify({
|
||||
...aJson,
|
||||
...bJson,
|
||||
...json,
|
||||
}, null, 2));
|
||||
};
|
||||
|
||||
|
||||
const writeProduct = () => {
|
||||
const checksums = computeChecksums([
|
||||
"vs/workbench/workbench.web.api.js",
|
||||
"vs/workbench/workbench.web.api.css",
|
||||
"vs/code/browser/workbench/workbench.html",
|
||||
"vs/code/browser/workbench/workbench.js",
|
||||
"vs/server/src/cli.js",
|
||||
"vs/server/src/uriTransformer.js",
|
||||
"vs/server/src/login/index.html"
|
||||
]);
|
||||
const date = new Date().toISOString();
|
||||
const commit = util.getVersion(rootPath);
|
||||
mergeAndWrite("product", { commit, date, checksums });
|
||||
mergeAndWrite("package", { codeServerVersion: `${codeServerVersion}-vsc${vscodeVersion}` });
|
||||
};
|
||||
|
||||
writeProduct();
|
||||
391
scripts/build.ts
Normal file
391
scripts/build.ts
Normal file
@@ -0,0 +1,391 @@
|
||||
import { Binary } from "@coder/nbin";
|
||||
import * as cp from "child_process";
|
||||
// import * as crypto from "crypto";
|
||||
import * as fs from "fs-extra";
|
||||
import * as os from "os";
|
||||
import * as path from "path";
|
||||
import * as util from "util";
|
||||
|
||||
enum Task {
|
||||
/**
|
||||
* Use before running anything that only works inside VS Code.
|
||||
*/
|
||||
EnsureInVscode = "ensure-in-vscode",
|
||||
Binary = "binary",
|
||||
Package = "package",
|
||||
Build = "build",
|
||||
}
|
||||
|
||||
class Builder {
|
||||
private readonly rootPath = path.resolve(__dirname, "..");
|
||||
private readonly outPath = process.env.OUT || this.rootPath;
|
||||
private _target?: "darwin" | "alpine" | "linux";
|
||||
private currentTask?: Task;
|
||||
|
||||
public run(task: Task | undefined, args: string[]): void {
|
||||
this.currentTask = task;
|
||||
this.doRun(task, args).catch((error) => {
|
||||
console.error(error.message);
|
||||
process.exit(1);
|
||||
});
|
||||
}
|
||||
|
||||
private async task<T>(message: string, fn: () => Promise<T>): Promise<T> {
|
||||
const time = Date.now();
|
||||
this.log(`${message}...`, true);
|
||||
try {
|
||||
const t = await fn();
|
||||
process.stdout.write(`took ${Date.now() - time}ms\n`);
|
||||
return t;
|
||||
} catch (error) {
|
||||
process.stdout.write("failed\n");
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Writes to stdout with an optional newline.
|
||||
*/
|
||||
private log(message: string, skipNewline: boolean = false): void {
|
||||
process.stdout.write(`[${this.currentTask || "default"}] ${message}`);
|
||||
if (!skipNewline) {
|
||||
process.stdout.write("\n");
|
||||
}
|
||||
}
|
||||
|
||||
private async doRun(task: Task | undefined, args: string[]): Promise<void> {
|
||||
if (!task) {
|
||||
throw new Error("No task provided");
|
||||
}
|
||||
|
||||
if (task === Task.EnsureInVscode) {
|
||||
return process.exit(this.isInVscode(this.rootPath) ? 0 : 1);
|
||||
}
|
||||
|
||||
// If we're inside VS Code assume we want to develop. In that case we should
|
||||
// set an OUT directory and not build in this directory, otherwise when you
|
||||
// build/watch VS Code the build directory will be included.
|
||||
if (this.isInVscode(this.outPath)) {
|
||||
throw new Error("Should not build inside VS Code; set the OUT environment variable");
|
||||
}
|
||||
|
||||
this.ensureArgument("rootPath", this.rootPath);
|
||||
this.ensureArgument("outPath", this.outPath);
|
||||
|
||||
const arch = this.ensureArgument("arch", os.arch().replace(/^x/, "x86_"));
|
||||
const target = this.ensureArgument("target", await this.target());
|
||||
const vscodeVersion = this.ensureArgument("vscodeVersion", args[0]);
|
||||
const codeServerVersion = this.ensureArgument("codeServerVersion", args[1]);
|
||||
|
||||
const vscodeSourcePath = path.join(this.outPath, "source", `vscode-${vscodeVersion}-source`);
|
||||
const binariesPath = path.join(this.outPath, "binaries");
|
||||
const binaryName = `code-server${codeServerVersion}-vsc${vscodeVersion}-${target}-${arch}`;
|
||||
const finalBuildPath = path.join(this.outPath, "build", `${binaryName}-built`);
|
||||
|
||||
switch (task) {
|
||||
case Task.Binary:
|
||||
return this.binary(finalBuildPath, binariesPath, binaryName);
|
||||
case Task.Package:
|
||||
return this.package(vscodeSourcePath, binariesPath, binaryName);
|
||||
case Task.Build:
|
||||
return this.build(vscodeSourcePath, vscodeVersion, codeServerVersion, finalBuildPath);
|
||||
default:
|
||||
throw new Error(`No task matching "${task}"`);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the target of the system.
|
||||
*/
|
||||
private async target(): Promise<"darwin" | "alpine" | "linux"> {
|
||||
if (!this._target) {
|
||||
if (os.platform() === "darwin" || (process.env.OSTYPE && /^darwin/.test(process.env.OSTYPE))) {
|
||||
this._target = "darwin";
|
||||
} else {
|
||||
// Alpine's ldd doesn't have a version flag but if you use an invalid flag
|
||||
// (like --version) it outputs the version to stderr and exits with 1.
|
||||
const result = await util.promisify(cp.exec)("ldd --version")
|
||||
.catch((error) => ({ stderr: error.message, stdout: "" }));
|
||||
if (/musl/.test(result.stderr) || /musl/.test(result.stdout)) {
|
||||
this._target = "alpine";
|
||||
} else {
|
||||
this._target = "linux";
|
||||
}
|
||||
}
|
||||
}
|
||||
return this._target;
|
||||
}
|
||||
|
||||
/**
|
||||
* Make sure the argument is set. Display the value if it is.
|
||||
*/
|
||||
private ensureArgument(name: string, arg?: string): string {
|
||||
if (!arg) {
|
||||
this.log(`${name} is missing`);
|
||||
throw new Error("Usage: <vscodeVersion> <codeServerVersion>");
|
||||
}
|
||||
this.log(`${name} is "${arg}"`);
|
||||
return arg;
|
||||
}
|
||||
|
||||
/**
|
||||
* Return true if it looks like we're inside VS Code. This is used to prevent
|
||||
* accidentally building inside VS Code while developing which causes issues
|
||||
* because the watcher will try compiling those built files.
|
||||
*/
|
||||
private isInVscode(pathToCheck: string): boolean {
|
||||
let inside = false;
|
||||
const maybeVsCode = path.join(pathToCheck, "../../../");
|
||||
try {
|
||||
// If it has a package.json with the right name it's probably VS Code.
|
||||
inside = require(path.join(maybeVsCode, "package.json")).name === "code-oss-dev";
|
||||
} catch (error) {}
|
||||
this.log(
|
||||
inside
|
||||
? `Running inside VS Code ([${maybeVsCode}]${path.relative(maybeVsCode, pathToCheck)})`
|
||||
: "Not running inside VS Code"
|
||||
);
|
||||
return inside;
|
||||
}
|
||||
|
||||
/**
|
||||
* Build code-server within VS Code.
|
||||
*/
|
||||
private async build(vscodeSourcePath: string, vscodeVersion: string, codeServerVersion: string, finalBuildPath: string): Promise<void> {
|
||||
// Install dependencies (should be cached by CI).
|
||||
await this.task("Installing code-server dependencies", async () => {
|
||||
await util.promisify(cp.exec)("yarn", { cwd: this.rootPath });
|
||||
});
|
||||
|
||||
// Download and prepare VS Code if necessary (should be cached by CI).
|
||||
if (fs.existsSync(vscodeSourcePath)) {
|
||||
this.log("Using existing VS Code clone");
|
||||
} else {
|
||||
await this.task("Cloning VS Code", () => {
|
||||
return util.promisify(cp.exec)(
|
||||
"git clone https://github.com/microsoft/vscode"
|
||||
+ ` --quiet --branch "${vscodeVersion}"`
|
||||
+ ` --single-branch --depth=1 "${vscodeSourcePath}"`);
|
||||
});
|
||||
}
|
||||
|
||||
await this.task("Installing VS Code dependencies", () => {
|
||||
return util.promisify(cp.exec)("yarn", { cwd: vscodeSourcePath });
|
||||
});
|
||||
|
||||
if (fs.existsSync(path.join(vscodeSourcePath, ".build/extensions"))) {
|
||||
this.log("Using existing built-in-extensions");
|
||||
} else {
|
||||
await this.task("Building default extensions", () => {
|
||||
return util.promisify(cp.exec)(
|
||||
"yarn gulp compile-extensions-build --max-old-space-size=32384",
|
||||
{ cwd: vscodeSourcePath },
|
||||
);
|
||||
});
|
||||
}
|
||||
|
||||
// Clean before patching or it could fail if already patched.
|
||||
await this.task("Patching VS Code", async () => {
|
||||
await util.promisify(cp.exec)("git reset --hard", { cwd: vscodeSourcePath });
|
||||
await util.promisify(cp.exec)("git clean -fd", { cwd: vscodeSourcePath });
|
||||
await util.promisify(cp.exec)(`git apply ${this.rootPath}/scripts/vscode.patch`, { cwd: vscodeSourcePath });
|
||||
});
|
||||
|
||||
const serverPath = path.join(vscodeSourcePath, "src/vs/server");
|
||||
await this.task("Copying code-server into VS Code", async () => {
|
||||
await fs.remove(serverPath);
|
||||
await fs.mkdirp(serverPath);
|
||||
await Promise.all(["main.js", "node_modules", "src", "typings"].map((fileName) => {
|
||||
return fs.copy(path.join(this.rootPath, fileName), path.join(serverPath, fileName));
|
||||
}));
|
||||
});
|
||||
|
||||
await this.task("Building VS Code", () => {
|
||||
return util.promisify(cp.exec)("yarn gulp compile-build --max-old-space-size=32384", { cwd: vscodeSourcePath });
|
||||
});
|
||||
|
||||
await this.task("Optimizing VS Code", async () => {
|
||||
await fs.copyFile(path.join(this.rootPath, "scripts/optimize.js"), path.join(vscodeSourcePath, "coder.js"));
|
||||
await util.promisify(cp.exec)(`yarn gulp optimize --max-old-space-size=32384 --gulpfile ./coder.js`, { cwd: vscodeSourcePath });
|
||||
});
|
||||
|
||||
const { productJson, packageJson } = await this.task("Generating final package.json and product.json", async () => {
|
||||
const merge = async (name: string, extraJson: { [key: string]: string } = {}): Promise<{ [key: string]: string }> => {
|
||||
const [aJson, bJson] = (await Promise.all([
|
||||
fs.readFile(path.join(vscodeSourcePath, `${name}.json`), "utf8"),
|
||||
fs.readFile(path.join(this.rootPath, `scripts/${name}.json`), "utf8"),
|
||||
])).map((raw) => {
|
||||
const json = JSON.parse(raw);
|
||||
delete json.scripts;
|
||||
delete json.dependencies;
|
||||
delete json.devDependencies;
|
||||
delete json.optionalDependencies;
|
||||
return json;
|
||||
});
|
||||
|
||||
return { ...aJson, ...bJson, ...extraJson };
|
||||
};
|
||||
|
||||
const date = new Date().toISOString();
|
||||
const commit = require(path.join(vscodeSourcePath, "build/lib/util")).getVersion(this.rootPath);
|
||||
|
||||
const [productJson, packageJson] = await Promise.all([
|
||||
merge("product", { commit, date }),
|
||||
merge("package", { codeServerVersion: `${codeServerVersion}-vsc${vscodeVersion}` }),
|
||||
]);
|
||||
|
||||
// We could do this before the optimization but then it'd be copied into
|
||||
// three files and unused in two which seems like a waste of bytes.
|
||||
const apiPath = path.join(vscodeSourcePath, "out-vscode/vs/workbench/workbench.web.api.js");
|
||||
await fs.writeFile(apiPath, (await fs.readFile(apiPath, "utf8")).replace('{ /*BUILD->INSERT_PRODUCT_CONFIGURATION*/}', JSON.stringify({
|
||||
version: packageJson.version,
|
||||
codeServerVersion: packageJson.codeServerVersion,
|
||||
...productJson,
|
||||
})));
|
||||
|
||||
return { productJson, packageJson };
|
||||
});
|
||||
|
||||
if (process.env.MINIFY) {
|
||||
await this.task("Minifying VS Code", () => {
|
||||
return util.promisify(cp.exec)("yarn gulp minify --max-old-space-size=32384 --gulpfile ./coder.js", { cwd: vscodeSourcePath });
|
||||
});
|
||||
}
|
||||
|
||||
const finalServerPath = path.join(finalBuildPath, "out/vs/server");
|
||||
await this.task("Copying into final build directory", async () => {
|
||||
await fs.remove(finalBuildPath);
|
||||
await fs.mkdirp(finalBuildPath);
|
||||
await Promise.all([
|
||||
fs.copy(path.join(vscodeSourcePath, "remote/node_modules"), path.join(finalBuildPath, "node_modules")),
|
||||
fs.copy(path.join(vscodeSourcePath, ".build/extensions"), path.join(finalBuildPath, "extensions")),
|
||||
fs.copy(path.join(vscodeSourcePath, `out-vscode${process.env.MINIFY ? "-min" : ""}`), path.join(finalBuildPath, "out")).then(() => {
|
||||
return Promise.all([
|
||||
fs.remove(path.join(finalServerPath, "node_modules")).then(() => {
|
||||
return fs.copy(path.join(serverPath, "node_modules"), path.join(finalServerPath, "node_modules"));
|
||||
}),
|
||||
fs.copy(path.join(finalServerPath, "src/browser/workbench-build.html"), path.join(finalServerPath, "src/browser/workbench.html")),
|
||||
]);
|
||||
}),
|
||||
]);
|
||||
});
|
||||
|
||||
if (process.env.MINIFY) {
|
||||
await this.task("Restricting to production dependencies", async () => {
|
||||
await Promise.all(["package.json", "yarn.lock"].map((fileName) => {
|
||||
Promise.all([
|
||||
fs.copy(path.join(this.rootPath, fileName), path.join(finalServerPath, fileName)),
|
||||
fs.copy(path.join(path.join(vscodeSourcePath, "remote"), fileName), path.join(finalBuildPath, fileName)),
|
||||
]);
|
||||
}));
|
||||
|
||||
await Promise.all([finalServerPath, finalBuildPath].map((cwd) => {
|
||||
return util.promisify(cp.exec)("yarn --production", { cwd });
|
||||
}));
|
||||
|
||||
await Promise.all(["package.json", "yarn.lock"].map((fileName) => {
|
||||
return Promise.all([
|
||||
fs.remove(path.join(finalServerPath, fileName)),
|
||||
fs.remove(path.join(finalBuildPath, fileName)),
|
||||
]);
|
||||
}));
|
||||
});
|
||||
}
|
||||
|
||||
await this.task("Writing final package.json and product.json", () => {
|
||||
return Promise.all([
|
||||
fs.writeFile(path.join(finalBuildPath, "package.json"), JSON.stringify(packageJson, null, 2)),
|
||||
fs.writeFile(path.join(finalBuildPath, "product.json"), JSON.stringify(productJson, null, 2)),
|
||||
]);
|
||||
});
|
||||
|
||||
// Prevent needless cache changes.
|
||||
await this.task("Cleaning for smaller cache", () => {
|
||||
return Promise.all([
|
||||
fs.remove(serverPath),
|
||||
fs.remove(path.join(vscodeSourcePath, "out-vscode")),
|
||||
fs.remove(path.join(vscodeSourcePath, "out-vscode-min")),
|
||||
fs.remove(path.join(vscodeSourcePath, "out-build")),
|
||||
util.promisify(cp.exec)("git reset --hard", { cwd: vscodeSourcePath }).then(() => {
|
||||
return util.promisify(cp.exec)("git clean -fd", { cwd: vscodeSourcePath });
|
||||
}),
|
||||
]);
|
||||
});
|
||||
|
||||
// Prepend code to the target which enables finding files within the binary.
|
||||
const prependLoader = async (relativeFilePath: string): Promise<void> => {
|
||||
const filePath = path.join(finalBuildPath, relativeFilePath);
|
||||
const shim = `
|
||||
if (!global.NBIN_LOADED) {
|
||||
try {
|
||||
const nbin = require("nbin");
|
||||
nbin.shimNativeFs("${finalBuildPath}");
|
||||
global.NBIN_LOADED = true;
|
||||
const path = require("path");
|
||||
const rg = require("vscode-ripgrep");
|
||||
rg.binaryRgPath = rg.rgPath;
|
||||
rg.rgPath = path.join(require("os").tmpdir(), "code-server", path.basename(rg.binaryRgPath));
|
||||
} catch (error) { /* Not in the binary. */ }
|
||||
}
|
||||
`;
|
||||
await fs.writeFile(filePath, shim + (await fs.readFile(filePath, "utf8")));
|
||||
};
|
||||
|
||||
await this.task("Prepending nbin loader", () => {
|
||||
return Promise.all([
|
||||
prependLoader("out/vs/server/main.js"),
|
||||
prependLoader("out/bootstrap-fork.js"),
|
||||
prependLoader("extensions/node_modules/typescript/lib/tsserver.js"),
|
||||
]);
|
||||
});
|
||||
|
||||
this.log(`Final build: ${finalBuildPath}`);
|
||||
}
|
||||
|
||||
/**
|
||||
* Bundles the built code into a binary.
|
||||
*/
|
||||
private async binary(targetPath: string, binariesPath: string, binaryName: string): Promise<void> {
|
||||
const bin = new Binary({
|
||||
mainFile: path.join(targetPath, "out/vs/server/main.js"),
|
||||
target: await this.target(),
|
||||
});
|
||||
|
||||
bin.writeFiles(path.join(targetPath, "**"));
|
||||
|
||||
await fs.mkdirp(binariesPath);
|
||||
|
||||
const binaryPath = path.join(binariesPath, binaryName);
|
||||
await fs.writeFile(binaryPath, await bin.build());
|
||||
await fs.chmod(binaryPath, "755");
|
||||
|
||||
this.log(`Binary: ${binaryPath}`);
|
||||
}
|
||||
|
||||
/**
|
||||
* Package the binary into a release archive.
|
||||
*/
|
||||
private async package(vscodeSourcePath: string, binariesPath: string, binaryName: string): Promise<void> {
|
||||
const releasePath = path.join(this.outPath, "release");
|
||||
const archivePath = path.join(releasePath, binaryName);
|
||||
|
||||
await fs.remove(archivePath);
|
||||
await fs.mkdirp(archivePath);
|
||||
|
||||
await fs.copyFile(path.join(binariesPath, binaryName), path.join(archivePath, "code-server"));
|
||||
await fs.copyFile(path.join(this.rootPath, "README.md"), path.join(archivePath, "README.md"));
|
||||
await fs.copyFile(path.join(vscodeSourcePath, "LICENSE.txt"), path.join(archivePath, "LICENSE.txt"));
|
||||
await fs.copyFile(path.join(vscodeSourcePath, "ThirdPartyNotices.txt"), path.join(archivePath, "ThirdPartyNotices.txt"));
|
||||
|
||||
if ((await this.target()) === "darwin") {
|
||||
await util.promisify(cp.exec)(`zip -r "${binaryName}.zip" "${binaryName}"`, { cwd: releasePath });
|
||||
this.log(`Archive: ${archivePath}.zip`);
|
||||
} else {
|
||||
await util.promisify(cp.exec)(`tar -czf "${binaryName}.tar.gz" "${binaryName}"`, { cwd: releasePath });
|
||||
this.log(`Archive: ${archivePath}.tar.gz`);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const builder = new Builder();
|
||||
builder.run(process.argv[2] as Task, process.argv.slice(3));
|
||||
53
scripts/cacher.sh
Executable file
53
scripts/cacher.sh
Executable file
@@ -0,0 +1,53 @@
|
||||
#!/usr/bin/env sh
|
||||
# cacher.sh -- Restore and rebuild cache.
|
||||
# Cache paths are designed to work with multi-arch builds and are organized
|
||||
# based on the branch or tag. The master branch cache is used as a fallback.
|
||||
# This will download and package the cache but it will not upload it.
|
||||
|
||||
set -eu
|
||||
|
||||
# Try restoring from each argument in turn until we get something.
|
||||
restore() {
|
||||
for branch in "$@" ; do
|
||||
if [ -n "$branch" ] ; then
|
||||
cache_path="https://codesrv-ci.cdr.sh/cache/$branch/$tar.tar.gz"
|
||||
if wget "$cache_path" ; then
|
||||
tar xzvf "$tar.tar.gz"
|
||||
break
|
||||
fi
|
||||
fi
|
||||
done
|
||||
}
|
||||
|
||||
# We need to cache the built-in extensions and Node modules. Everything inside
|
||||
# the cache-upload directory will be uploaded as-is to the code-server bucket.
|
||||
package() {
|
||||
mkdir -p "cache-upload/cache/$1"
|
||||
tar czfv "cache-upload/cache/$1/$tar.tar.gz" node_modules source yarn-cache
|
||||
}
|
||||
|
||||
main() {
|
||||
cd "$(dirname "$0")/.."
|
||||
|
||||
# Get the branch for this build.
|
||||
branch=${DRONE_BRANCH:-${DRONE_SOURCE_BRANCH:-${DRONE_TAG:-}}}
|
||||
|
||||
# The cache will be named based on the arch, platform, and libc.
|
||||
arch=$DRONE_STAGE_ARCH
|
||||
platform=${PLATFORM:-linux}
|
||||
case $DRONE_STAGE_NAME in
|
||||
*alpine*) libc=musl ;;
|
||||
* ) libc=glibc ;;
|
||||
esac
|
||||
|
||||
tar="$platform-$arch-$libc"
|
||||
|
||||
# The action is determined by the name of the step.
|
||||
case $DRONE_STEP_NAME in
|
||||
*restore*) restore "$branch" "$DRONE_REPO_BRANCH" ;;
|
||||
*rebuild*|*package*) package "$branch" ;;
|
||||
*) exit 1 ;;
|
||||
esac
|
||||
}
|
||||
|
||||
main "$@"
|
||||
105
scripts/ci.bash
105
scripts/ci.bash
@@ -1,83 +1,48 @@
|
||||
#!/bin/bash
|
||||
#!/usr/bin/env bash
|
||||
# ci.bash -- Build code-server in the CI.
|
||||
|
||||
set -euo pipefail
|
||||
|
||||
# Build using a Docker container.
|
||||
function docker-build() {
|
||||
local target="${TARGET:-}"
|
||||
local image="codercom/nbin-${target}"
|
||||
local token="${GITHUB_TOKEN:-}"
|
||||
local minify="${MINIFY:-}"
|
||||
if [[ "${target}" == "linux" ]] ; then
|
||||
image="codercom/nbin-centos"
|
||||
fi
|
||||
|
||||
local containerId
|
||||
containerId=$(docker create --network=host --rm -it -v "$(pwd)"/.cache:/src/.cache "${image}")
|
||||
docker start "${containerId}"
|
||||
docker exec "${containerId}" mkdir -p /src
|
||||
|
||||
# TODO: temporary as long as we are rebuilding modules.
|
||||
if [[ "${image}" == "codercom/nbin-alpine" ]] ; then
|
||||
docker exec "${containerId}" apk add libxkbfile-dev libsecret-dev
|
||||
else
|
||||
# TODO: at some point git existed but it seems to have disappeared.
|
||||
docker exec "${containerId}" yum install -y libxkbfile-devel libsecret-devel git
|
||||
fi
|
||||
|
||||
function docker-exec() {
|
||||
local command="${1}" ; shift
|
||||
local args="'${vscodeVersion}' '${codeServerVersion}'"
|
||||
docker exec "${containerId}" \
|
||||
bash -c "cd /src && CI=true GITHUB_TOKEN=${token} MINIFY=${minify} yarn ${command} ${args}"
|
||||
}
|
||||
|
||||
docker cp ./. "${containerId}":/src
|
||||
docker-exec build
|
||||
if [[ -n "${package}" ]] ; then
|
||||
docker-exec binary
|
||||
docker-exec package
|
||||
mkdir -p release
|
||||
docker cp "${containerId}":/src/release/. ./release/
|
||||
fi
|
||||
|
||||
docker stop "${containerId}"
|
||||
}
|
||||
|
||||
# Build locally.
|
||||
function local-build() {
|
||||
function local-exec() {
|
||||
local command="${1}" ; shift
|
||||
CI=true yarn "${command}" "${vscodeVersion}" "${codeServerVersion}"
|
||||
}
|
||||
|
||||
local-exec build
|
||||
if [[ -n "${package}" ]] ; then
|
||||
local-exec binary
|
||||
local-exec package
|
||||
fi
|
||||
}
|
||||
|
||||
# Build code-server in the CI.
|
||||
function main() {
|
||||
cd "$(dirname "${0}")/.."
|
||||
|
||||
local codeServerVersion="${VERSION:-}"
|
||||
local vscodeVersion="${VSCODE_VERSION:-}"
|
||||
local ostype="${OSTYPE:-}"
|
||||
local package="${PACKAGE:-}"
|
||||
# Get the version information. If a specific version wasn't set, generate it
|
||||
# from the tag and VS Code version.
|
||||
local vscode_version=${VSCODE_VERSION:-1.41.1}
|
||||
local code_server_version=${VERSION:-${TRAVIS_TAG:-${DRONE_TAG:-daily}}}
|
||||
|
||||
if [[ -z "${codeServerVersion}" ]] ; then
|
||||
>&2 echo "Must set VERSION environment variable"; exit 1
|
||||
# Remove everything that isn't the current VS Code source for caching
|
||||
# (otherwise the cache will contain old versions).
|
||||
if [[ -d "source/vscode-$vscode_version-source" ]] ; then
|
||||
mv "source/vscode-$vscode_version-source" "vscode-$vscode_version-source"
|
||||
fi
|
||||
rm -rf source/vscode-*-source
|
||||
if [[ -d "vscode-$vscode_version-source" ]] ; then
|
||||
mv "vscode-$vscode_version-source" "source/vscode-$vscode_version-source"
|
||||
fi
|
||||
|
||||
if [[ -z "${vscodeVersion}" ]] ; then
|
||||
>&2 echo "Must set VSCODE_VERSION environment variable"; exit 1
|
||||
YARN_CACHE_FOLDER="$(pwd)/yarn-cache"
|
||||
export YARN_CACHE_FOLDER
|
||||
|
||||
# Always minify and package on tags since that's when releases are pushed.
|
||||
if [[ -n ${DRONE_TAG:-} || -n ${TRAVIS_TAG:-} ]] ; then
|
||||
export MINIFY="true"
|
||||
export PACKAGE="true"
|
||||
fi
|
||||
|
||||
if [[ "${ostype}" == "darwin"* ]]; then
|
||||
local-build
|
||||
else
|
||||
docker-build
|
||||
function run-yarn() {
|
||||
yarn "$1" "$vscode_version" "$code_server_version"
|
||||
}
|
||||
|
||||
run-yarn build
|
||||
run-yarn binary
|
||||
if [[ -n ${PACKAGE:-} ]] ; then
|
||||
run-yarn package
|
||||
fi
|
||||
|
||||
# In this case provide a plainly named "code-server" binary.
|
||||
if [[ -n ${BINARY:-} ]] ; then
|
||||
mv binaries/code-server*-vsc* binaries/code-server
|
||||
fi
|
||||
}
|
||||
|
||||
|
||||
39
scripts/ci.dockerfile
Normal file
39
scripts/ci.dockerfile
Normal file
@@ -0,0 +1,39 @@
|
||||
# We deploy with Ubuntu so that devs have a familiar environment.
|
||||
FROM ubuntu:18.04
|
||||
|
||||
RUN apt-get update && apt-get install -y \
|
||||
openssl \
|
||||
net-tools \
|
||||
git \
|
||||
locales \
|
||||
sudo \
|
||||
dumb-init \
|
||||
vim \
|
||||
curl \
|
||||
wget
|
||||
|
||||
RUN locale-gen en_US.UTF-8
|
||||
# We cannot use update-locale because docker will not use the env variables
|
||||
# configured in /etc/default/locale so we need to set it manually.
|
||||
ENV LC_ALL=en_US.UTF-8 \
|
||||
SHELL=/bin/bash
|
||||
|
||||
RUN adduser --gecos '' --disabled-password coder && \
|
||||
echo "coder ALL=(ALL) NOPASSWD:ALL" >> /etc/sudoers.d/nopasswd
|
||||
|
||||
USER coder
|
||||
# Create first so these directories will be owned by coder instead of root
|
||||
# (workdir and mounting appear to both default to root).
|
||||
RUN mkdir -p /home/coder/project
|
||||
RUN mkdir -p /home/coder/.local/share/code-server
|
||||
|
||||
WORKDIR /home/coder/project
|
||||
|
||||
# This ensures we have a volume mounted even if the user forgot to do bind
|
||||
# mount. So that they do not lose their data if they delete the container.
|
||||
VOLUME [ "/home/coder/project" ]
|
||||
|
||||
COPY ./binaries/code-server* /usr/local/bin/code-server
|
||||
EXPOSE 8080
|
||||
|
||||
ENTRYPOINT ["dumb-init", "code-server", "--host", "0.0.0.0"]
|
||||
@@ -1,16 +0,0 @@
|
||||
// This file is prepended to loader/entry code (like our main.js or VS Code's
|
||||
// bootstrap-fork.js). {{ROOT_PATH}} is replaced during the build process.
|
||||
if (!global.NBIN_LOADED) {
|
||||
try {
|
||||
const nbin = require("nbin");
|
||||
nbin.shimNativeFs("{{ROOT_PATH}}");
|
||||
global.NBIN_LOADED = true;
|
||||
const path = require("path");
|
||||
const rg = require("vscode-ripgrep");
|
||||
rg.binaryRgPath = rg.rgPath;
|
||||
rg.rgPath = path.join(
|
||||
require("os").tmpdir(),
|
||||
`code-server/${path.basename(rg.binaryRgPath)}`
|
||||
);
|
||||
} catch (error) { /* Not in the binary. */ }
|
||||
}
|
||||
@@ -1,23 +0,0 @@
|
||||
const { Binary } = require("@coder/nbin");
|
||||
const fs = require("fs");
|
||||
const path = require("path");
|
||||
|
||||
const source = process.argv[2];
|
||||
const target = process.argv[3];
|
||||
const binaryName = process.argv[4];
|
||||
|
||||
const bin = new Binary({
|
||||
mainFile: path.join(source, "out/vs/server/main.js"),
|
||||
target: target,
|
||||
});
|
||||
|
||||
bin.writeFiles(path.join(source, "**"));
|
||||
|
||||
bin.build().then((binaryData) => {
|
||||
const outputPath = path.join(source, binaryName);
|
||||
fs.writeFileSync(outputPath, binaryData);
|
||||
fs.chmodSync(outputPath, "755");
|
||||
}).catch((ex) => {
|
||||
console.error(ex);
|
||||
process.exit(1);
|
||||
});
|
||||
71
scripts/optimize.js
Normal file
71
scripts/optimize.js
Normal file
@@ -0,0 +1,71 @@
|
||||
// This must be ran from VS Code's root.
|
||||
const gulp = require("gulp");
|
||||
const path = require("path");
|
||||
const _ = require("underscore");
|
||||
const buildfile = require("./src/buildfile");
|
||||
const common = require("./build/lib/optimize");
|
||||
const util = require("./build/lib/util");
|
||||
const deps = require("./build/dependencies");
|
||||
|
||||
const vscodeEntryPoints = _.flatten([
|
||||
buildfile.entrypoint("vs/workbench/workbench.web.api"),
|
||||
buildfile.entrypoint("vs/server/src/node/cli"),
|
||||
buildfile.base,
|
||||
buildfile.workbenchWeb,
|
||||
buildfile.workerExtensionHost,
|
||||
buildfile.keyboardMaps,
|
||||
buildfile.entrypoint('vs/platform/files/node/watcher/unix/watcherApp', ["vs/css", "vs/nls"]),
|
||||
buildfile.entrypoint('vs/platform/files/node/watcher/nsfw/watcherApp', ["vs/css", "vs/nls"]),
|
||||
buildfile.entrypoint('vs/workbench/services/extensions/node/extensionHostProcess', ["vs/css", "vs/nls"]),
|
||||
]);
|
||||
|
||||
const vscodeResources = [
|
||||
"out-build/vs/server/main.js",
|
||||
"out-build/vs/server/src/node/uriTransformer.js",
|
||||
"!out-build/vs/server/doc/**",
|
||||
"out-build/vs/server/src/media/*",
|
||||
"out-build/vs/workbench/services/extensions/worker/extensionHostWorkerMain.js",
|
||||
"out-build/bootstrap.js",
|
||||
"out-build/bootstrap-fork.js",
|
||||
"out-build/bootstrap-amd.js",
|
||||
"out-build/paths.js",
|
||||
'out-build/vs/**/*.{svg,png,html}',
|
||||
"!out-build/vs/code/browser/workbench/*.html",
|
||||
'!out-build/vs/code/electron-browser/**',
|
||||
"out-build/vs/base/common/performance.js",
|
||||
"out-build/vs/base/node/languagePacks.js",
|
||||
"out-build/vs/base/browser/ui/octiconLabel/octicons/**",
|
||||
"out-build/vs/base/browser/ui/codiconLabel/codicon/**",
|
||||
"out-build/vs/workbench/browser/media/*-theme.css",
|
||||
"out-build/vs/workbench/contrib/debug/**/*.json",
|
||||
"out-build/vs/workbench/contrib/externalTerminal/**/*.scpt",
|
||||
"out-build/vs/workbench/contrib/webview/browser/pre/*.js",
|
||||
"out-build/vs/**/markdown.css",
|
||||
"out-build/vs/workbench/contrib/tasks/**/*.json",
|
||||
"out-build/vs/platform/files/**/*.md",
|
||||
"!**/test/**"
|
||||
];
|
||||
|
||||
const rootPath = __dirname;
|
||||
const nodeModules = ["electron", "original-fs"]
|
||||
.concat(_.uniq(deps.getProductionDependencies(rootPath).map((d) => d.name)))
|
||||
.concat(_.uniq(deps.getProductionDependencies(path.join(rootPath, "src/vs/server")).map((d) => d.name)))
|
||||
.concat(Object.keys(process.binding("natives")).filter((n) => !/^_|\//.test(n)));
|
||||
|
||||
gulp.task("optimize", gulp.series(
|
||||
util.rimraf("out-vscode"),
|
||||
common.optimizeTask({
|
||||
src: "out-build",
|
||||
entryPoints: vscodeEntryPoints,
|
||||
resources: vscodeResources,
|
||||
loaderConfig: common.loaderConfig(nodeModules),
|
||||
out: "out-vscode",
|
||||
inlineAmdImages: true,
|
||||
bundleInfo: undefined
|
||||
}),
|
||||
));
|
||||
|
||||
gulp.task("minify", gulp.series(
|
||||
util.rimraf("out-vscode-min"),
|
||||
common.minifyTask("out-vscode")
|
||||
));
|
||||
@@ -15,5 +15,7 @@
|
||||
"win32ShellNameShort": "C&ode Server",
|
||||
"darwinBundleIdentifier": "com.code.server",
|
||||
"linuxIconName": "com.code.server",
|
||||
"urlProtocol": "code-server"
|
||||
"urlProtocol": "code-server",
|
||||
"updateUrl": "https://api.github.com/repos/cdr/code-server/releases",
|
||||
"quality": "latest"
|
||||
}
|
||||
|
||||
@@ -1,276 +0,0 @@
|
||||
#!/bin/bash
|
||||
set -euox pipefail
|
||||
|
||||
function log() {
|
||||
local message="${1}" ; shift
|
||||
local level="${1:-info}"
|
||||
if [[ "${level}" == "error" ]] ; then
|
||||
>&2 echo "${message}"
|
||||
else
|
||||
echo "${message}"
|
||||
fi
|
||||
}
|
||||
|
||||
# Copy code-server into VS Code along with its dependencies.
|
||||
function copy-server() {
|
||||
local serverPath="${sourcePath}/src/vs/server"
|
||||
rm -rf "${serverPath}"
|
||||
mkdir -p "${serverPath}"
|
||||
|
||||
cp -r "${rootPath}/src" "${serverPath}"
|
||||
cp -r "${rootPath}/typings" "${serverPath}"
|
||||
cp "${rootPath}/main.js" "${serverPath}"
|
||||
cp "${rootPath}/package.json" "${serverPath}"
|
||||
cp "${rootPath}/yarn.lock" "${serverPath}"
|
||||
|
||||
if [[ -d "${rootPath}/node_modules" ]] ; then
|
||||
cp -r "${rootPath}/node_modules" "${serverPath}"
|
||||
else
|
||||
# Ignore scripts to avoid also installing VS Code dependencies which has
|
||||
# already been done.
|
||||
cd "${serverPath}" && yarn --ignore-scripts
|
||||
rm -r node_modules/@types/node # I keep getting type conflicts
|
||||
fi
|
||||
|
||||
# TODO: Duplicate identifier issue. There must be a better way to fix this.
|
||||
if [[ "${target}" == "darwin" ]] ; then
|
||||
rm "${serverPath}/node_modules/fsevents/node_modules/safe-buffer/index.d.ts"
|
||||
fi
|
||||
}
|
||||
|
||||
# Prepend the nbin shim which enables finding files within the binary.
|
||||
function prepend-loader() {
|
||||
local filePath="${buildPath}/${1}" ; shift
|
||||
cat "${rootPath}/scripts/nbin-shim.js" "${filePath}" > "${filePath}.temp"
|
||||
mv "${filePath}.temp" "${filePath}"
|
||||
# Using : as the delimiter so the escaping here is easier to read.
|
||||
# ${parameter/pattern/string}, so the pattern is /: (if the pattern starts
|
||||
# with / it matches all instances) and the string is \\: (results in \:).
|
||||
if [[ "${target}" == "darwin" ]] ; then
|
||||
sed -i "" -e "s:{{ROOT_PATH}}:${buildPath//:/\\:}:g" "${filePath}"
|
||||
else
|
||||
sed -i "s:{{ROOT_PATH}}:${buildPath//:/\\:}:g" "${filePath}"
|
||||
fi
|
||||
}
|
||||
|
||||
# Copy code-server into VS Code then build it.
|
||||
function build-code-server() {
|
||||
copy-server
|
||||
cd "${sourcePath}" && yarn gulp compile-build --max-old-space-size=32384
|
||||
|
||||
local min=""
|
||||
if [[ -n "${minify}" ]] ; then
|
||||
min="-min"
|
||||
yarn gulp minify-vscode --max-old-space-size=32384
|
||||
else
|
||||
yarn gulp optimize-vscode --max-old-space-size=32384
|
||||
fi
|
||||
|
||||
rm -rf "${buildPath}"
|
||||
mkdir -p "${buildPath}"
|
||||
|
||||
# Rebuild to make sure native modules work on the target system.
|
||||
cp "${sourcePath}/remote/"{package.json,yarn.lock,.yarnrc} "${buildPath}"
|
||||
cd "${buildPath}" && yarn --production --force --build-from-source
|
||||
rm "${buildPath}/"{package.json,yarn.lock,.yarnrc}
|
||||
|
||||
cp -r "${sourcePath}/.build/extensions" "${buildPath}"
|
||||
cp -r "${sourcePath}/out-vscode${min}" "${buildPath}/out"
|
||||
node "${rootPath}/scripts/build-json.js" "${sourcePath}" "${buildPath}" "${vscodeVersion}" "${codeServerVersion}"
|
||||
|
||||
# Only keep production dependencies for the server.
|
||||
cp "${rootPath}/"{package.json,yarn.lock} "${buildPath}/out/vs/server"
|
||||
cd "${buildPath}/out/vs/server" && yarn --production --ignore-scripts
|
||||
rm "${buildPath}/out/vs/server/"{package.json,yarn.lock}
|
||||
|
||||
# onigasm 2.2.2 has a bug that makes it broken for PHP files so use 2.2.1.
|
||||
# https://github.com/NeekSandhu/onigasm/issues/17
|
||||
local onigasmPath="${buildPath}/node_modules/onigasm-umd"
|
||||
rm -rf "${onigasmPath}"
|
||||
git clone "https://github.com/alexandrudima/onigasm-umd" "${onigasmPath}"
|
||||
cd "${onigasmPath}" && yarn && yarn add --dev onigasm@2.2.1 && yarn package
|
||||
mkdir "${onigasmPath}-temp"
|
||||
mv "${onigasmPath}/"{release,LICENSE} "${onigasmPath}-temp"
|
||||
rm -rf "${onigasmPath}"
|
||||
mv "${onigasmPath}-temp" "${onigasmPath}"
|
||||
|
||||
prepend-loader "out/vs/server/main.js"
|
||||
prepend-loader "out/bootstrap-fork.js"
|
||||
prepend-loader "extensions/node_modules/typescript/lib/tsserver.js"
|
||||
|
||||
log "Final build: ${buildPath}"
|
||||
}
|
||||
|
||||
# Download and extract a tar from a URL with either curl or wget depending on
|
||||
# which is available.
|
||||
function download-tar() {
|
||||
local url="${1}" ; shift
|
||||
if command -v wget &> /dev/null ; then
|
||||
wget "${url}" --quiet -O - | tar -C "${stagingPath}" -xz
|
||||
else
|
||||
curl "${url}" --silent --fail | tar -C "${stagingPath}" -xz
|
||||
fi
|
||||
}
|
||||
|
||||
# Download a pre-built package. If it doesn't exist and we are in the CI, exit.
|
||||
# Otherwise the return will be whether it existed or not. The pre-built package
|
||||
# is provided to reduce CI build time.
|
||||
function download-pre-built() {
|
||||
local archiveName="${1}" ; shift
|
||||
local url="https://codesrv-ci.cdr.sh/${archiveName}"
|
||||
if ! download-tar "${url}" ; then
|
||||
if [[ -n "${ci}" ]] ; then
|
||||
log "${url} does not exist" "error"
|
||||
exit 1
|
||||
fi
|
||||
return 1
|
||||
fi
|
||||
return 0
|
||||
}
|
||||
|
||||
# Fully build code-server.
|
||||
function build-task() {
|
||||
mkdir -p "${stagingPath}"
|
||||
if [[ ! -d "${sourcePath}" ]] ; then
|
||||
if ! download-pre-built "vscode-${vscodeVersion}.tar.gz" ; then
|
||||
git clone https://github.com/microsoft/vscode --quiet \
|
||||
--branch "${vscodeVersion}" --single-branch --depth=1 \
|
||||
"${sourcePath}"
|
||||
fi
|
||||
fi
|
||||
cd "${sourcePath}"
|
||||
git reset --hard && git clean -fd
|
||||
git apply "${rootPath}/scripts/vscode.patch"
|
||||
if [[ ! -d "${sourcePath}/node_modules" ]] ; then
|
||||
if [[ -n "${ci}" ]] ; then
|
||||
log "Pre-built VS Code ${vscodeVersion} has no node_modules" "error"
|
||||
exit 1
|
||||
fi
|
||||
yarn
|
||||
fi
|
||||
if [[ ! -d "${sourcePath}/.build/extensions" ]] ; then
|
||||
if [[ -n "${ci}" ]] ; then
|
||||
log "Pre-built VS Code ${vscodeVersion} has no built extensions" "error"
|
||||
exit 1
|
||||
fi
|
||||
yarn gulp compile-extensions-build --max-old-space-size=32384
|
||||
fi
|
||||
build-code-server
|
||||
}
|
||||
|
||||
# Package the binary into a tar or zip for release.
|
||||
function package-task() {
|
||||
local archivePath="${releasePath}/${binaryName}"
|
||||
rm -rf "${archivePath}"
|
||||
mkdir -p "${archivePath}"
|
||||
|
||||
cp "${buildPath}/${binaryName}" "${archivePath}/code-server"
|
||||
cp "${rootPath}/README.md" "${archivePath}"
|
||||
cp "${sourcePath}/LICENSE.txt" "${archivePath}"
|
||||
cp "${sourcePath}/ThirdPartyNotices.txt" "${archivePath}"
|
||||
|
||||
cd "${releasePath}"
|
||||
if [[ "${target}" == "darwin" ]] ; then
|
||||
zip -r "${binaryName}.zip" "${binaryName}"
|
||||
log "Archive: ${archivePath}.zip"
|
||||
else
|
||||
tar -czf "${binaryName}.tar.gz" "${binaryName}"
|
||||
log "Archive: ${archivePath}.tar.gz"
|
||||
fi
|
||||
}
|
||||
|
||||
# Bundle built code into a binary.
|
||||
function binary-task() {
|
||||
cd "${rootPath}"
|
||||
node "${rootPath}/scripts/nbin.js" "${buildPath}" "${target}" "${binaryName}"
|
||||
log "Binary: ${buildPath}/${binaryName}"
|
||||
}
|
||||
|
||||
# Check if it looks like we are inside VS Code.
|
||||
function in-vscode () {
|
||||
local dir="${1}" ; shift
|
||||
local maybeVsCode
|
||||
local dirName
|
||||
maybeVsCode="$(cd "${dir}/../../.." ; pwd -P)"
|
||||
dirName="$(basename "${maybeVsCode}")"
|
||||
if [[ "${dirName}" != "vscode" ]] ; then
|
||||
return 1
|
||||
fi
|
||||
if [[ ! -f "${maybeVsCode}/package.json" ]] ; then
|
||||
return 1
|
||||
fi
|
||||
if ! grep '"name": "code-oss-dev"' "${maybeVsCode}/package.json" -q ; then
|
||||
return 1
|
||||
fi
|
||||
return 0
|
||||
}
|
||||
|
||||
function main() {
|
||||
local rootPath
|
||||
rootPath="$(cd "$(dirname "${0}")/.." ; pwd -P)"
|
||||
|
||||
local task="${1}" ; shift
|
||||
if [[ "${task}" == "ensure-in-vscode" ]] ; then
|
||||
if ! in-vscode "${rootPath}"; then
|
||||
log "Not in VS Code" "error"
|
||||
exit 1
|
||||
fi
|
||||
exit 0
|
||||
fi
|
||||
|
||||
# This lets you build in a separate directory since building within this
|
||||
# directory while developing makes it hard to keep developing since compiling
|
||||
# will compile everything in the build directory as well.
|
||||
local outPath="${OUT:-${rootPath}}"
|
||||
local releasePath="${outPath}/release"
|
||||
local stagingPath="${outPath}/build"
|
||||
|
||||
# If we're inside a VS Code directory, assume we want to develop. In that case
|
||||
# we should set an OUT directory and not build in this directory.
|
||||
if in-vscode "${outPath}" ; then
|
||||
log "Set the OUT environment variable to something outside of VS Code" "error"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
local vscodeVersion="${1}" ; shift
|
||||
local sourceName="vscode-${vscodeVersion}-source"
|
||||
local sourcePath="${stagingPath}/${sourceName}"
|
||||
|
||||
if [[ "${task}" == "package-prebuilt" ]] ; then
|
||||
local archiveName="vscode-${vscodeVersion}.tar.gz"
|
||||
cd "${sourcePath}"
|
||||
git reset --hard && git clean -xfd -e '.build/extensions' -e 'node_modules'
|
||||
cd "${stagingPath}"
|
||||
tar -czf "${archiveName}" "${sourceName}"
|
||||
mkdir -p "${releasePath}" && mv -f "${archiveName}" "${releasePath}"
|
||||
exit 0
|
||||
fi
|
||||
|
||||
local codeServerVersion="${1}" ; shift
|
||||
local ci="${CI:-}"
|
||||
local minify="${MINIFY:-}"
|
||||
|
||||
local arch
|
||||
arch=$(uname -m)
|
||||
|
||||
local target="linux"
|
||||
local ostype="${OSTYPE:-}"
|
||||
if [[ "${ostype}" == "darwin"* ]] ; then
|
||||
target="darwin"
|
||||
else
|
||||
# On Alpine there seems no way to get the version except to use an invalid
|
||||
# command which will output the version to stderr and exit with 1.
|
||||
local output
|
||||
output=$(ldd --version 2>&1 || :)
|
||||
if [[ "${output}" == "musl"* ]] ; then
|
||||
target="alpine"
|
||||
fi
|
||||
fi
|
||||
|
||||
local binaryName="code-server${codeServerVersion}-vsc${vscodeVersion}-${target}-${arch}"
|
||||
local buildPath="${stagingPath}/${binaryName}-built"
|
||||
|
||||
"${task}-task" "$@"
|
||||
}
|
||||
|
||||
main "$@"
|
||||
19
scripts/test.sh
Executable file
19
scripts/test.sh
Executable file
@@ -0,0 +1,19 @@
|
||||
#!/usr/bin/env sh
|
||||
# test.sh -- Simple test for CI.
|
||||
# We'll have more involved tests eventually. This just ensures the binary has
|
||||
# been built and runs.
|
||||
|
||||
set -eu
|
||||
|
||||
main() {
|
||||
cd "$(dirname "$0")/.."
|
||||
|
||||
version=$(./binaries/code-server* --version | head -1)
|
||||
echo "Got '$version' for the version"
|
||||
case $version in
|
||||
*-vsc1.41.1) exit 0 ;;
|
||||
*) exit 1 ;;
|
||||
esac
|
||||
}
|
||||
|
||||
main "$@"
|
||||
17
scripts/tsconfig.json
Normal file
17
scripts/tsconfig.json
Normal file
@@ -0,0 +1,17 @@
|
||||
{
|
||||
"compilerOptions": {
|
||||
"module": "commonjs",
|
||||
"moduleResolution": "node",
|
||||
"noImplicitAny": true,
|
||||
"experimentalDecorators": true,
|
||||
"noImplicitReturns": true,
|
||||
"noUnusedLocals": true,
|
||||
"noImplicitThis": true,
|
||||
"alwaysStrict": true,
|
||||
"strictBindCallApply": true,
|
||||
"strictNullChecks": true,
|
||||
"forceConsistentCasingInFileNames": true,
|
||||
"baseUrl": ".",
|
||||
"target": "esnext"
|
||||
}
|
||||
}
|
||||
1612
scripts/vscode.patch
1612
scripts/vscode.patch
File diff suppressed because it is too large
Load Diff
@@ -1,5 +1,5 @@
|
||||
import * as vscode from "vscode";
|
||||
import { CoderApi, VSCodeApi } from "../typings/api";
|
||||
import { CoderApi, VSCodeApi } from "../../typings/api";
|
||||
import { createCSSRule } from "vs/base/browser/dom";
|
||||
import { Emitter, Event } from "vs/base/common/event";
|
||||
import { IDisposable } from "vs/base/common/lifecycle";
|
||||
@@ -15,13 +15,13 @@ import { IInstantiationService, ServiceIdentifier } from "vs/platform/instantiat
|
||||
import { ServiceCollection } from "vs/platform/instantiation/common/serviceCollection";
|
||||
import { INotificationService } from "vs/platform/notification/common/notification";
|
||||
import { Registry } from "vs/platform/registry/common/platform";
|
||||
import { IStatusbarEntry, IStatusbarEntryAccessor, IStatusbarService, StatusbarAlignment } from "vs/platform/statusbar/common/statusbar";
|
||||
import { IStatusbarEntry, IStatusbarEntryAccessor, IStatusbarService, StatusbarAlignment } from "vs/workbench/services/statusbar/common/statusbar";
|
||||
import { IStorageService } from "vs/platform/storage/common/storage";
|
||||
import { ITelemetryService } from "vs/platform/telemetry/common/telemetry";
|
||||
import { IThemeService } from "vs/platform/theme/common/themeService";
|
||||
import { IWorkspaceContextService } from "vs/platform/workspace/common/workspace";
|
||||
import * as extHostTypes from "vs/workbench/api/common/extHostTypes";
|
||||
import { CustomTreeView, CustomTreeViewPanel } from "vs/workbench/browser/parts/views/customView";
|
||||
import { CustomTreeView, CustomTreeViewPane } from "vs/workbench/browser/parts/views/customView";
|
||||
import { ViewContainerViewlet } from "vs/workbench/browser/parts/views/viewsViewlet";
|
||||
import { Extensions as ViewletExtensions, ShowViewletAction, ViewletDescriptor, ViewletRegistry } from "vs/workbench/browser/viewlet";
|
||||
import { Extensions as ActionExtensions, IWorkbenchActionRegistry } from "vs/workbench/common/actions";
|
||||
@@ -120,11 +120,11 @@ export const coderApi = (serviceCollection: ServiceCollection): CoderApi => {
|
||||
}
|
||||
|
||||
Registry.as<ViewletRegistry>(ViewletExtensions.Viewlets).registerViewlet(
|
||||
new ViewletDescriptor(CustomViewlet as any, id, containerName, cssClass, undefined, URI.parse(icon)),
|
||||
ViewletDescriptor.create(CustomViewlet as any, id, containerName, cssClass, undefined, URI.parse(icon)),
|
||||
);
|
||||
|
||||
Registry.as<IWorkbenchActionRegistry>(ActionExtensions.WorkbenchActions).registerWorkbenchAction(
|
||||
new SyncActionDescriptor(OpenCustomViewletAction as any, id, localize("showViewlet", "Show {0}", containerName)),
|
||||
SyncActionDescriptor.create(OpenCustomViewletAction as any, id, localize("showViewlet", "Show {0}", containerName)),
|
||||
"View: Show {0}",
|
||||
localize("view", "View"),
|
||||
);
|
||||
@@ -137,7 +137,7 @@ export const coderApi = (serviceCollection: ServiceCollection): CoderApi => {
|
||||
Registry.as<IViewsRegistry>(ViewsExtensions.ViewsRegistry).registerViews([{
|
||||
id: viewId,
|
||||
name: viewName,
|
||||
ctorDescriptor: { ctor: CustomTreeViewPanel },
|
||||
ctorDescriptor: { ctor: CustomTreeViewPane },
|
||||
treeView: getService(IInstantiationService).createInstance(CustomTreeView as any, viewId, container),
|
||||
}] as ITreeViewDescriptor[], container);
|
||||
},
|
||||
@@ -286,8 +286,8 @@ class StatusBarEntry implements vscode.StatusBarItem {
|
||||
|
||||
private _id: number;
|
||||
private entry: IStatusBarEntry;
|
||||
private visible: boolean;
|
||||
private disposed: boolean;
|
||||
private visible?: boolean;
|
||||
private disposed?: boolean;
|
||||
private statusId: string;
|
||||
private statusName: string;
|
||||
private accessor?: IStatusbarEntryAccessor;
|
||||
133
src/browser/client.ts
Normal file
133
src/browser/client.ts
Normal file
@@ -0,0 +1,133 @@
|
||||
import { Emitter } from "vs/base/common/event";
|
||||
import { URI } from "vs/base/common/uri";
|
||||
import { localize } from "vs/nls";
|
||||
import { Extensions, IConfigurationRegistry } from "vs/platform/configuration/common/configurationRegistry";
|
||||
import { registerSingleton } from "vs/platform/instantiation/common/extensions";
|
||||
import { ServiceCollection } from "vs/platform/instantiation/common/serviceCollection";
|
||||
import { ILocalizationsService } from "vs/platform/localizations/common/localizations";
|
||||
import { INotificationService, Severity } from "vs/platform/notification/common/notification";
|
||||
import { Registry } from "vs/platform/registry/common/platform";
|
||||
import { PersistentConnectionEventType } from "vs/platform/remote/common/remoteAgentConnection";
|
||||
import { ITelemetryService } from "vs/platform/telemetry/common/telemetry";
|
||||
import { coderApi, vscodeApi } from "vs/server/src/browser/api";
|
||||
import { INodeProxyService, NodeProxyChannelClient } from "vs/server/src/common/nodeProxy";
|
||||
import { TelemetryChannelClient } from "vs/server/src/common/telemetry";
|
||||
import { split } from "vs/server/src/common/util";
|
||||
import "vs/workbench/contrib/localizations/browser/localizations.contribution";
|
||||
import { LocalizationsService } from "vs/workbench/services/localizations/electron-browser/localizationsService";
|
||||
import { IRemoteAgentService } from "vs/workbench/services/remote/common/remoteAgentService";
|
||||
|
||||
class TelemetryService extends TelemetryChannelClient {
|
||||
public constructor(
|
||||
@IRemoteAgentService remoteAgentService: IRemoteAgentService,
|
||||
) {
|
||||
super(remoteAgentService.getConnection()!.getChannel("telemetry"));
|
||||
}
|
||||
}
|
||||
|
||||
const TELEMETRY_SECTION_ID = "telemetry";
|
||||
|
||||
Registry.as<IConfigurationRegistry>(Extensions.Configuration).registerConfiguration({
|
||||
"id": TELEMETRY_SECTION_ID,
|
||||
"order": 110,
|
||||
"type": "object",
|
||||
"title": localize("telemetryConfigurationTitle", "Telemetry"),
|
||||
"properties": {
|
||||
"telemetry.enableTelemetry": {
|
||||
"type": "boolean",
|
||||
"description": localize("telemetry.enableTelemetry", "Enable usage data and errors to be sent to a Microsoft online service."),
|
||||
"default": true,
|
||||
"tags": ["usesOnlineServices"]
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
class NodeProxyService extends NodeProxyChannelClient implements INodeProxyService {
|
||||
private readonly _onClose = new Emitter<void>();
|
||||
public readonly onClose = this._onClose.event;
|
||||
private readonly _onDown = new Emitter<void>();
|
||||
public readonly onDown = this._onDown.event;
|
||||
private readonly _onUp = new Emitter<void>();
|
||||
public readonly onUp = this._onUp.event;
|
||||
|
||||
public constructor(
|
||||
@IRemoteAgentService remoteAgentService: IRemoteAgentService,
|
||||
) {
|
||||
super(remoteAgentService.getConnection()!.getChannel("nodeProxy"));
|
||||
remoteAgentService.getConnection()!.onDidStateChange((state) => {
|
||||
switch (state.type) {
|
||||
case PersistentConnectionEventType.ConnectionGain:
|
||||
return this._onUp.fire();
|
||||
case PersistentConnectionEventType.ConnectionLost:
|
||||
return this._onDown.fire();
|
||||
case PersistentConnectionEventType.ReconnectionPermanentFailure:
|
||||
return this._onClose.fire();
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
registerSingleton(ILocalizationsService, LocalizationsService);
|
||||
registerSingleton(INodeProxyService, NodeProxyService);
|
||||
registerSingleton(ITelemetryService, TelemetryService);
|
||||
|
||||
/**
|
||||
* This is called by vs/workbench/browser/web.main.ts after the workbench has
|
||||
* been initialized so we can initialize our own client-side code.
|
||||
*/
|
||||
export const initialize = async (services: ServiceCollection): Promise<void> => {
|
||||
const target = window as any;
|
||||
target.ide = coderApi(services);
|
||||
target.vscode = vscodeApi(services);
|
||||
|
||||
const event = new CustomEvent("ide-ready");
|
||||
(event as any).ide = target.ide;
|
||||
(event as any).vscode = target.vscode;
|
||||
window.dispatchEvent(event);
|
||||
|
||||
if (!window.isSecureContext) {
|
||||
(services.get(INotificationService) as INotificationService).notify({
|
||||
severity: Severity.Warning,
|
||||
message: "code-server is being accessed over an insecure domain. Some functionality may not work as expected.",
|
||||
actions: {
|
||||
primary: [{
|
||||
id: "understand",
|
||||
label: "I understand",
|
||||
tooltip: "",
|
||||
class: undefined,
|
||||
enabled: true,
|
||||
checked: true,
|
||||
dispose: () => undefined,
|
||||
run: () => {
|
||||
return Promise.resolve();
|
||||
}
|
||||
}],
|
||||
}
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
export interface Query {
|
||||
[key: string]: string | undefined;
|
||||
}
|
||||
|
||||
/**
|
||||
* Return the URL modified with the specified query variables. It's pretty
|
||||
* stupid so it probably doesn't cover any edge cases. Undefined values will
|
||||
* unset existing values. Doesn't allow duplicates.
|
||||
*/
|
||||
export const withQuery = (url: string, replace: Query): string => {
|
||||
const uri = URI.parse(url);
|
||||
const query = { ...replace };
|
||||
uri.query.split("&").forEach((kv) => {
|
||||
const [key, value] = split(kv, "=");
|
||||
if (!(key in query)) {
|
||||
query[key] = value;
|
||||
}
|
||||
});
|
||||
return uri.with({
|
||||
query: Object.keys(query)
|
||||
.filter((k) => typeof query[k] !== "undefined")
|
||||
.map((k) => `${k}=${query[k]}`).join("&"),
|
||||
}).toString(true);
|
||||
};
|
||||
46
src/browser/extHostNodeProxy.ts
Normal file
46
src/browser/extHostNodeProxy.ts
Normal file
@@ -0,0 +1,46 @@
|
||||
import { Emitter } from "vs/base/common/event";
|
||||
import { createDecorator } from "vs/platform/instantiation/common/instantiation";
|
||||
import { ExtHostNodeProxyShape, MainContext, MainThreadNodeProxyShape } from "vs/workbench/api/common/extHost.protocol";
|
||||
import { IExtHostRpcService } from "vs/workbench/api/common/extHostRpcService";
|
||||
|
||||
export class ExtHostNodeProxy implements ExtHostNodeProxyShape {
|
||||
_serviceBrand: any;
|
||||
|
||||
private readonly _onMessage = new Emitter<string>();
|
||||
public readonly onMessage = this._onMessage.event;
|
||||
private readonly _onClose = new Emitter<void>();
|
||||
public readonly onClose = this._onClose.event;
|
||||
private readonly _onDown = new Emitter<void>();
|
||||
public readonly onDown = this._onDown.event;
|
||||
private readonly _onUp = new Emitter<void>();
|
||||
public readonly onUp = this._onUp.event;
|
||||
|
||||
private readonly proxy: MainThreadNodeProxyShape;
|
||||
|
||||
constructor(@IExtHostRpcService rpc: IExtHostRpcService) {
|
||||
this.proxy = rpc.getProxy(MainContext.MainThreadNodeProxy);
|
||||
}
|
||||
|
||||
public $onMessage(message: string): void {
|
||||
this._onMessage.fire(message);
|
||||
}
|
||||
|
||||
public $onClose(): void {
|
||||
this._onClose.fire();
|
||||
}
|
||||
|
||||
public $onUp(): void {
|
||||
this._onUp.fire();
|
||||
}
|
||||
|
||||
public $onDown(): void {
|
||||
this._onDown.fire();
|
||||
}
|
||||
|
||||
public send(message: string): void {
|
||||
this.proxy.$send(message);
|
||||
}
|
||||
}
|
||||
|
||||
export interface IExtHostNodeProxy extends ExtHostNodeProxy { }
|
||||
export const IExtHostNodeProxy = createDecorator<IExtHostNodeProxy>("IExtHostNodeProxy");
|
||||
@@ -4,8 +4,10 @@
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1.0, maximum-scale=1.0, minimum-scale=1.0, user-scalable=no">
|
||||
<meta http-equiv="Content-Security-Policy" content="default-src 'none'; style-src 'self' 'unsafe-inline'; script-src 'unsafe-inline'; manifest-src 'self'; img-src 'self';">
|
||||
<title>Authenticate: code-server</title>
|
||||
<link rel="icon" href="./favicon.ico" type="image/x-icon" />
|
||||
<link rel="manifest" href="./manifest.json">
|
||||
<link rel="icon" href="./static/out/vs/server/src/media/favicon.ico" type="image/x-icon" />
|
||||
<link rel="manifest" href="./manifest.json" crossorigin="use-credentials">
|
||||
<link rel="apple-touch-icon" href="./static/out/vs/server/src/media/code-server.png" />
|
||||
<meta name="apple-mobile-web-app-capable" content="yes">
|
||||
<link href="./static/out/vs/server/src/media/login.css" rel="stylesheet">
|
||||
</head>
|
||||
<body>
|
||||
37
src/browser/mainThreadNodeProxy.ts
Normal file
37
src/browser/mainThreadNodeProxy.ts
Normal file
@@ -0,0 +1,37 @@
|
||||
import { IDisposable } from "vs/base/common/lifecycle";
|
||||
import { INodeProxyService } from "vs/server/src/common/nodeProxy";
|
||||
import { ExtHostContext, IExtHostContext, MainContext, MainThreadNodeProxyShape } from "vs/workbench/api/common/extHost.protocol";
|
||||
import { extHostNamedCustomer } from "vs/workbench/api/common/extHostCustomers";
|
||||
|
||||
@extHostNamedCustomer(MainContext.MainThreadNodeProxy)
|
||||
export class MainThreadNodeProxy implements MainThreadNodeProxyShape {
|
||||
private disposed = false;
|
||||
private disposables = <IDisposable[]>[];
|
||||
|
||||
constructor(
|
||||
extHostContext: IExtHostContext,
|
||||
@INodeProxyService private readonly proxyService: INodeProxyService,
|
||||
) {
|
||||
if (!extHostContext.remoteAuthority) { // HACK: A terrible way to detect if running in the worker.
|
||||
const proxy = extHostContext.getProxy(ExtHostContext.ExtHostNodeProxy);
|
||||
this.disposables = [
|
||||
this.proxyService.onMessage((message: string) => proxy.$onMessage(message)),
|
||||
this.proxyService.onClose(() => proxy.$onClose()),
|
||||
this.proxyService.onDown(() => proxy.$onDown()),
|
||||
this.proxyService.onUp(() => proxy.$onUp()),
|
||||
];
|
||||
}
|
||||
}
|
||||
|
||||
$send(message: string): void {
|
||||
if (!this.disposed) {
|
||||
this.proxyService.send(message);
|
||||
}
|
||||
}
|
||||
|
||||
dispose(): void {
|
||||
this.disposables.forEach((d) => d.dispose());
|
||||
this.disposables = [];
|
||||
this.disposed = true;
|
||||
}
|
||||
}
|
||||
92
src/browser/workbench-build.html
Normal file
92
src/browser/workbench-build.html
Normal file
@@ -0,0 +1,92 @@
|
||||
<!-- Copyright (C) Microsoft Corporation. All rights reserved. -->
|
||||
<!DOCTYPE html>
|
||||
<html>
|
||||
<head>
|
||||
<meta charset="utf-8" />
|
||||
|
||||
<!-- Disable pinch zooming -->
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1.0, maximum-scale=1.0, minimum-scale=1.0, user-scalable=no">
|
||||
|
||||
<!-- Workbench Configuration -->
|
||||
<meta id="vscode-workbench-web-configuration" data-settings="{{WORKBENCH_WEB_CONFIGURATION}}">
|
||||
|
||||
<!-- Workarounds/Hacks (remote user data uri) -->
|
||||
<meta id="vscode-remote-user-data-uri" data-settings="{{REMOTE_USER_DATA_URI}}">
|
||||
<!-- NOTE@coder: Added the commit for use in caching, the product for the
|
||||
extensions gallery URL, and nls for language support. -->
|
||||
<meta id="vscode-remote-commit" data-settings="{{COMMIT}}">
|
||||
<meta id="vscode-remote-product-configuration" data-settings="{{PRODUCT_CONFIGURATION}}">
|
||||
<meta id="vscode-remote-nls-configuration" data-settings="{{NLS_CONFIGURATION}}">
|
||||
|
||||
<!-- Workbench Icon/Manifest/CSS -->
|
||||
<link rel="icon" href="./static-{{COMMIT}}/out/vs/server/src/media/favicon.ico" type="image/x-icon" />
|
||||
<link rel="manifest" href="./manifest.json" crossorigin="use-credentials">
|
||||
<link data-name="vs/workbench/workbench.web.api" rel="stylesheet" href="./static-{{COMMIT}}/out/vs/workbench/workbench.web.api.css">
|
||||
<link rel="apple-touch-icon" href="./static-{{COMMIT}}/out/vs/server/src/media/code-server.png" />
|
||||
<meta name="apple-mobile-web-app-capable" content="yes">
|
||||
|
||||
<!-- Prefetch to avoid waterfall -->
|
||||
<link rel="prefetch" href="./static-{{COMMIT}}/node_modules/semver-umd/lib/semver-umd.js">
|
||||
</head>
|
||||
|
||||
<body aria-label="">
|
||||
</body>
|
||||
|
||||
<!-- Startup (do not modify order of script tags!) -->
|
||||
<!-- NOTE:coder: Modified to work against the current path and use the commit for caching. -->
|
||||
<script>
|
||||
// NOTE: Changes to inline scripts require update of content security policy
|
||||
const basePath = window.location.pathname.replace(/\/+$/, '');
|
||||
const base = window.location.origin + basePath;
|
||||
const el = document.getElementById('vscode-remote-commit');
|
||||
const commit = el ? el.getAttribute('data-settings') : "";
|
||||
const staticBase = base + '/static-' + commit;
|
||||
let nlsConfig;
|
||||
try {
|
||||
nlsConfig = JSON.parse(document.getElementById('vscode-remote-nls-configuration').getAttribute('data-settings'));
|
||||
if (nlsConfig._resolvedLanguagePackCoreLocation) {
|
||||
const bundles = Object.create(null);
|
||||
nlsConfig.loadBundle = (bundle, language, cb) => {
|
||||
let result = bundles[bundle];
|
||||
if (result) {
|
||||
return cb(undefined, result);
|
||||
}
|
||||
// FIXME: Only works if path separators are /.
|
||||
const path = nlsConfig._resolvedLanguagePackCoreLocation
|
||||
+ '/' + bundle.replace(/\//g, '!') + '.nls.json';
|
||||
fetch(`${base}/resource/?path=${encodeURIComponent(path)}`)
|
||||
.then((response) => response.json())
|
||||
.then((json) => {
|
||||
bundles[bundle] = json;
|
||||
cb(undefined, json);
|
||||
})
|
||||
.catch(cb);
|
||||
};
|
||||
}
|
||||
} catch (error) { /* Probably fine. */ }
|
||||
self.require = {
|
||||
baseUrl: `${staticBase}/out`,
|
||||
paths: {
|
||||
'vscode-textmate': `${staticBase}/node_modules/vscode-textmate/release/main`,
|
||||
'onigasm-umd': `${staticBase}/node_modules/onigasm-umd/release/main`,
|
||||
'xterm': `${staticBase}/node_modules/xterm/lib/xterm.js`,
|
||||
'xterm-addon-search': `${staticBase}/node_modules/xterm-addon-search/lib/xterm-addon-search.js`,
|
||||
'xterm-addon-web-links': `${staticBase}/node_modules/xterm-addon-web-links/lib/xterm-addon-web-links.js`,
|
||||
'xterm-addon-webgl': `${staticBase}/node_modules/xterm-addon-webgl/lib/xterm-addon-webgl.js`,
|
||||
'semver-umd': `${staticBase}/node_modules/semver-umd/lib/semver-umd.js`,
|
||||
},
|
||||
'vs/nls': nlsConfig,
|
||||
};
|
||||
</script>
|
||||
<script src="./static-{{COMMIT}}/out/vs/loader.js"></script>
|
||||
<script src="./static-{{COMMIT}}/out/vs/workbench/workbench.web.api.nls.js"></script>
|
||||
<script src="./static-{{COMMIT}}/out/vs/workbench/workbench.web.api.js"></script>
|
||||
<!-- TODO@coder: This errors with multiple anonymous define calls (one is
|
||||
workbench.js and one is semver-umd.js). For now use the same method found in
|
||||
workbench-dev.html. Appears related to the timing of the script load events. -->
|
||||
<!-- <script src="./static-{{COMMIT}}/out/vs/workbench/workbench.js"></script> -->
|
||||
<script>
|
||||
// NOTE: Changes to inline scripts require update of content security policy
|
||||
require(['vs/code/browser/workbench/workbench'], function() {});
|
||||
</script>
|
||||
</html>
|
||||
53
src/browser/workbench.html
Normal file
53
src/browser/workbench.html
Normal file
@@ -0,0 +1,53 @@
|
||||
<!-- Copyright (C) Microsoft Corporation. All rights reserved. -->
|
||||
<!DOCTYPE html>
|
||||
<html>
|
||||
<head>
|
||||
<meta charset="utf-8" />
|
||||
|
||||
<!-- Disable pinch zooming -->
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1.0, maximum-scale=1.0, minimum-scale=1.0, user-scalable=no">
|
||||
|
||||
<!-- Workbench Configuration -->
|
||||
<meta id="vscode-workbench-web-configuration" data-settings="{{WORKBENCH_WEB_CONFIGURATION}}">
|
||||
|
||||
<!-- Workarounds/Hacks (remote user data uri) -->
|
||||
<meta id="vscode-remote-user-data-uri" data-settings="{{REMOTE_USER_DATA_URI}}">
|
||||
<!-- NOTE@coder: Added the commit for use in caching, the product for the
|
||||
extensions gallery URL, and nls for language support. -->
|
||||
<meta id="vscode-remote-commit" data-settings="{{COMMIT}}">
|
||||
<meta id="vscode-remote-product-configuration" data-settings="{{PRODUCT_CONFIGURATION}}">
|
||||
<meta id="vscode-remote-nls-configuration" data-settings="{{NLS_CONFIGURATION}}">
|
||||
|
||||
<!-- Workbench Icon/Manifest/CSS -->
|
||||
<link rel="icon" href="./static/out/vs/server/src/media/favicon.ico" type="image/x-icon" />
|
||||
<link rel="manifest" href="./manifest.json" crossorigin="use-credentials">
|
||||
</head>
|
||||
|
||||
<body aria-label="">
|
||||
</body>
|
||||
|
||||
<!-- Startup (do not modify order of script tags!) -->
|
||||
<script>
|
||||
const basePath = window.location.pathname.replace(/\/+$/, '');
|
||||
const base = window.location.origin + basePath;
|
||||
const el = document.getElementById('vscode-remote-commit');
|
||||
const commit = el ? el.getAttribute('data-settings') : "";
|
||||
const staticBase = base + '/static-' + commit;
|
||||
self.require = {
|
||||
baseUrl: `${staticBase}/out`,
|
||||
paths: {
|
||||
'vscode-textmate': `${staticBase}/node_modules/vscode-textmate/release/main`,
|
||||
'onigasm-umd': `${staticBase}/node_modules/onigasm-umd/release/main`,
|
||||
'xterm': `${staticBase}/node_modules/xterm/lib/xterm.js`,
|
||||
'xterm-addon-search': `${staticBase}/node_modules/xterm-addon-search/lib/xterm-addon-search.js`,
|
||||
'xterm-addon-web-links': `${staticBase}/node_modules/xterm-addon-web-links/lib/xterm-addon-web-links.js`,
|
||||
'xterm-addon-webgl': `${staticBase}/node_modules/xterm-addon-webgl/lib/xterm-addon-webgl.js`,
|
||||
'semver-umd': `${staticBase}/node_modules/semver-umd/lib/semver-umd.js`,
|
||||
},
|
||||
};
|
||||
</script>
|
||||
<script src="./static/out/vs/loader.js"></script>
|
||||
<script>
|
||||
require(['vs/code/browser/workbench/workbench'], function() {});
|
||||
</script>
|
||||
</html>
|
||||
57
src/browser/worker.ts
Normal file
57
src/browser/worker.ts
Normal file
@@ -0,0 +1,57 @@
|
||||
import { URI } from "vs/base/common/uri";
|
||||
import { IExtensionDescription } from "vs/platform/extensions/common/extensions";
|
||||
import { ILogService } from "vs/platform/log/common/log";
|
||||
import { Client } from "vs/server/node_modules/@coder/node-browser/out/client/client";
|
||||
import { fromTar } from "vs/server/node_modules/@coder/requirefs/out/requirefs";
|
||||
import { ExtensionActivationTimesBuilder } from "vs/workbench/api/common/extHostExtensionActivator";
|
||||
import { IExtHostNodeProxy } from "./extHostNodeProxy";
|
||||
|
||||
export const loadCommonJSModule = async <T>(
|
||||
module: IExtensionDescription,
|
||||
activationTimesBuilder: ExtensionActivationTimesBuilder,
|
||||
nodeProxy: IExtHostNodeProxy,
|
||||
logService: ILogService,
|
||||
vscode: any,
|
||||
): Promise<T> => {
|
||||
const fetchUri = URI.from({
|
||||
scheme: self.location.protocol.replace(":", ""),
|
||||
authority: self.location.host,
|
||||
path: `${self.location.pathname.replace(/\/static.*\/out\/vs\/workbench\/services\/extensions\/worker\/extensionHostWorkerMain.js$/, "")}/tar`,
|
||||
query: `path=${encodeURIComponent(module.extensionLocation.path)}`,
|
||||
});
|
||||
const response = await fetch(fetchUri.toString(true));
|
||||
if (response.status !== 200) {
|
||||
throw new Error(`Failed to download extension "${module.extensionLocation.path}"`);
|
||||
}
|
||||
const client = new Client(nodeProxy, { logger: logService });
|
||||
const init = await client.handshake();
|
||||
const buffer = new Uint8Array(await response.arrayBuffer());
|
||||
const rfs = fromTar(buffer);
|
||||
(<any>self).global = self;
|
||||
rfs.provide("vscode", vscode);
|
||||
Object.keys(client.modules).forEach((key) => {
|
||||
const mod = (client.modules as any)[key];
|
||||
if (key === "process") {
|
||||
(<any>self).process = mod;
|
||||
(<any>self).process.env = init.env;
|
||||
return;
|
||||
}
|
||||
|
||||
rfs.provide(key, mod);
|
||||
switch (key) {
|
||||
case "buffer":
|
||||
(<any>self).Buffer = mod.Buffer;
|
||||
break;
|
||||
case "timers":
|
||||
(<any>self).setImmediate = mod.setImmediate;
|
||||
break;
|
||||
}
|
||||
});
|
||||
|
||||
try {
|
||||
activationTimesBuilder.codeLoadingStart();
|
||||
return rfs.require(".");
|
||||
} finally {
|
||||
activationTimesBuilder.codeLoadingStop();
|
||||
}
|
||||
};
|
||||
@@ -1,69 +0,0 @@
|
||||
import { URI } from "vs/base/common/uri";
|
||||
import { registerSingleton } from "vs/platform/instantiation/common/extensions";
|
||||
import { ServiceCollection } from "vs/platform/instantiation/common/serviceCollection";
|
||||
import { ITelemetryService } from "vs/platform/telemetry/common/telemetry";
|
||||
import { ILocalizationsService } from "vs/platform/localizations/common/localizations";
|
||||
import { LocalizationsService } from "vs/platform/localizations/electron-browser/localizationsService";
|
||||
import { IUpdateService } from "vs/platform/update/common/update";
|
||||
import { UpdateService } from "vs/platform/update/electron-browser/updateService";
|
||||
import { TelemetryChannelClient } from "vs/server/src/telemetry";
|
||||
import { IUploadService, UploadService } from 'vs/server/src/upload';
|
||||
import { IRemoteAgentService } from "vs/workbench/services/remote/common/remoteAgentService";
|
||||
|
||||
class TelemetryService extends TelemetryChannelClient {
|
||||
public constructor(
|
||||
@IRemoteAgentService remoteAgentService: IRemoteAgentService,
|
||||
) {
|
||||
super(remoteAgentService.getConnection()!.getChannel("telemetry"));
|
||||
}
|
||||
}
|
||||
|
||||
registerSingleton(ILocalizationsService, LocalizationsService);
|
||||
registerSingleton(ITelemetryService, TelemetryService);
|
||||
registerSingleton(IUpdateService, UpdateService);
|
||||
registerSingleton(IUploadService, UploadService, true);
|
||||
|
||||
import "vs/workbench/contrib/update/electron-browser/update.contribution";
|
||||
import 'vs/workbench/contrib/localizations/browser/localizations.contribution';
|
||||
|
||||
import { coderApi, vscodeApi } from "vs/server/src/api";
|
||||
|
||||
/**
|
||||
* This is called by vs/workbench/browser/web.main.ts after the workbench has
|
||||
* been initialized so we can initialize our own client-side code.
|
||||
*/
|
||||
export const initialize = async (services: ServiceCollection): Promise<void> => {
|
||||
const target = window as any;
|
||||
target.ide = coderApi(services);
|
||||
target.vscode = vscodeApi(services);
|
||||
|
||||
const event = new CustomEvent("ide-ready");
|
||||
(event as any).ide = target.ide;
|
||||
(event as any).vscode = target.vscode;
|
||||
window.dispatchEvent(event);
|
||||
};
|
||||
|
||||
export interface Query {
|
||||
[key: string]: string | undefined;
|
||||
}
|
||||
|
||||
/**
|
||||
* Return the URL modified with the specified query variables. It's pretty
|
||||
* stupid so it probably doesn't cover any edge cases. Undefined values will
|
||||
* unset existing values. Doesn't allow duplicates.
|
||||
*/
|
||||
export const withQuery = (url: string, replace: Query): string => {
|
||||
const uri = URI.parse(url);
|
||||
const query = { ...replace };
|
||||
uri.query.split("&").forEach((kv) => {
|
||||
const [key, value] = kv.split("=", 2);
|
||||
if (!(key in query)) {
|
||||
query[key] = value;
|
||||
}
|
||||
});
|
||||
return uri.with({
|
||||
query: Object.keys(query)
|
||||
.filter((k) => typeof query[k] !== "undefined")
|
||||
.map((k) => `${k}=${query[k]}`).join("&"),
|
||||
}).toString(true);
|
||||
};
|
||||
47
src/common/nodeProxy.ts
Normal file
47
src/common/nodeProxy.ts
Normal file
@@ -0,0 +1,47 @@
|
||||
import { Event } from "vs/base/common/event";
|
||||
import { IChannel, IServerChannel } from "vs/base/parts/ipc/common/ipc";
|
||||
import { createDecorator } from "vs/platform/instantiation/common/instantiation";
|
||||
import { ReadWriteConnection } from "vs/server/node_modules/@coder/node-browser/out/common/connection";
|
||||
|
||||
export const INodeProxyService = createDecorator<INodeProxyService>("nodeProxyService");
|
||||
|
||||
export interface INodeProxyService extends ReadWriteConnection {
|
||||
_serviceBrand: any;
|
||||
send(message: string): void;
|
||||
onMessage: Event<string>;
|
||||
onUp: Event<void>;
|
||||
onClose: Event<void>;
|
||||
onDown: Event<void>;
|
||||
}
|
||||
|
||||
export class NodeProxyChannel implements IServerChannel {
|
||||
constructor(private service: INodeProxyService) {}
|
||||
|
||||
listen(_: unknown, event: string): Event<any> {
|
||||
switch (event) {
|
||||
case "onMessage": return this.service.onMessage;
|
||||
}
|
||||
throw new Error(`Invalid listen ${event}`);
|
||||
}
|
||||
|
||||
async call(_: unknown, command: string, args?: any): Promise<any> {
|
||||
switch (command) {
|
||||
case "send": return this.service.send(args[0]);
|
||||
}
|
||||
throw new Error(`Invalid call ${command}`);
|
||||
}
|
||||
}
|
||||
|
||||
export class NodeProxyChannelClient {
|
||||
_serviceBrand: any;
|
||||
|
||||
public readonly onMessage: Event<string>;
|
||||
|
||||
constructor(private readonly channel: IChannel) {
|
||||
this.onMessage = this.channel.listen<string>("onMessage");
|
||||
}
|
||||
|
||||
public send(data: string): void {
|
||||
this.channel.call("send", [data]);
|
||||
}
|
||||
}
|
||||
10
src/common/util.ts
Normal file
10
src/common/util.ts
Normal file
@@ -0,0 +1,10 @@
|
||||
/**
|
||||
* Split a string up to the delimiter. If the delimiter doesn't exist the first
|
||||
* item will have all the text and the second item will be an empty string.
|
||||
*/
|
||||
export const split = (str: string, delimiter: string): [string, string] => {
|
||||
const index = str.indexOf(delimiter);
|
||||
return index !== -1
|
||||
? [str.substring(0, index).trim(), str.substring(index + 1)]
|
||||
: [str, ""];
|
||||
};
|
||||
@@ -1,57 +0,0 @@
|
||||
import * as appInsights from "applicationinsights";
|
||||
import * as https from "https";
|
||||
import * as http from "http";
|
||||
import * as os from "os";
|
||||
|
||||
export class TelemetryClient implements appInsights.TelemetryClient {
|
||||
public config: any = {};
|
||||
|
||||
public channel = {
|
||||
setUseDiskRetryCaching: (): void => undefined,
|
||||
};
|
||||
|
||||
public trackEvent(options: appInsights.EventTelemetry): void {
|
||||
if (!options.properties) {
|
||||
options.properties = {};
|
||||
}
|
||||
if (!options.measurements) {
|
||||
options.measurements = {};
|
||||
}
|
||||
|
||||
try {
|
||||
const cpus = os.cpus();
|
||||
options.measurements.cores = cpus.length;
|
||||
options.properties["common.cpuModel"] = cpus[0].model;
|
||||
} catch (error) {}
|
||||
|
||||
try {
|
||||
options.measurements.memoryFree = os.freemem();
|
||||
options.measurements.memoryTotal = os.totalmem();
|
||||
} catch (error) {}
|
||||
|
||||
try {
|
||||
options.properties["common.shell"] = os.userInfo().shell;
|
||||
options.properties["common.release"] = os.release();
|
||||
options.properties["common.arch"] = os.arch();
|
||||
} catch (error) {}
|
||||
|
||||
try {
|
||||
const url = process.env.TELEMETRY_URL || "https://v1.telemetry.coder.com/track";
|
||||
const request = (/^http:/.test(url) ? http : https).request(url, {
|
||||
method: "POST",
|
||||
headers: {
|
||||
"Content-Type": "application/json",
|
||||
},
|
||||
});
|
||||
request.on("error", () => { /* We don't care. */ });
|
||||
request.write(JSON.stringify(options));
|
||||
request.end();
|
||||
} catch (error) {}
|
||||
}
|
||||
|
||||
public flush(options: appInsights.FlushOptions): void {
|
||||
if (options.callback) {
|
||||
options.callback("");
|
||||
}
|
||||
}
|
||||
}
|
||||
BIN
src/media/code-server.png
Normal file
BIN
src/media/code-server.png
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 39 KiB |
@@ -6,8 +6,8 @@
|
||||
"background-color": "#fff",
|
||||
"description": "Run VS Code on a remote server.",
|
||||
"icons": [{
|
||||
"src": "static/code-server.png",
|
||||
"sizes": "128x128",
|
||||
"src": "./code-server.png",
|
||||
"sizes": "384x384",
|
||||
"type": "image/png"
|
||||
}]
|
||||
}
|
||||
|
||||
@@ -1,23 +1,26 @@
|
||||
import * as path from "path";
|
||||
import { VSBuffer } from "vs/base/common/buffer";
|
||||
import { VSBuffer, VSBufferReadableStream } from "vs/base/common/buffer";
|
||||
import { Emitter, Event } from "vs/base/common/event";
|
||||
import { IDisposable } from "vs/base/common/lifecycle";
|
||||
import { OS } from "vs/base/common/platform";
|
||||
import { ReadableStreamEventPayload } from "vs/base/common/stream";
|
||||
import { URI, UriComponents } from "vs/base/common/uri";
|
||||
import { transformOutgoingURIs } from "vs/base/common/uriIpc";
|
||||
import { IServerChannel } from "vs/base/parts/ipc/common/ipc";
|
||||
import { IDiagnosticInfo } from "vs/platform/diagnostics/common/diagnostics";
|
||||
import { IEnvironmentService } from "vs/platform/environment/common/environment";
|
||||
import { ExtensionIdentifier, IExtensionDescription } from "vs/platform/extensions/common/extensions";
|
||||
import { FileDeleteOptions, FileOpenOptions, FileOverwriteOptions, FileType, IStat, IWatchOptions } from "vs/platform/files/common/files";
|
||||
import { FileDeleteOptions, FileOpenOptions, FileOverwriteOptions, FileReadStreamOptions, FileType, FileWriteOptions, IStat, IWatchOptions } from "vs/platform/files/common/files";
|
||||
import { createReadStream } from "vs/platform/files/common/io";
|
||||
import { DiskFileSystemProvider } from "vs/platform/files/node/diskFileSystemProvider";
|
||||
import { ILogService } from "vs/platform/log/common/log";
|
||||
import pkg from "vs/platform/product/node/package";
|
||||
import product from "vs/platform/product/node/product";
|
||||
import { IRemoteAgentEnvironment } from "vs/platform/remote/common/remoteAgentEnvironment";
|
||||
import product from "vs/platform/product/common/product";
|
||||
import { IRemoteAgentEnvironment, RemoteAgentConnectionContext } from "vs/platform/remote/common/remoteAgentEnvironment";
|
||||
import { ITelemetryService } from "vs/platform/telemetry/common/telemetry";
|
||||
import { getTranslations } from "vs/server/src/nls";
|
||||
import { getUriTransformer } from "vs/server/src/util";
|
||||
import { INodeProxyService } from "vs/server/src/common/nodeProxy";
|
||||
import { getTranslations } from "vs/server/src/node/nls";
|
||||
import { getUriTransformer, localRequire } from "vs/server/src/node/util";
|
||||
import { IFileChangeDto } from "vs/workbench/api/common/extHost.protocol";
|
||||
import { ExtensionScanner, ExtensionScannerInput } from "vs/workbench/services/extensions/node/extensionPoints";
|
||||
|
||||
/**
|
||||
@@ -42,7 +45,7 @@ class Watcher extends DiskFileSystemProvider {
|
||||
}
|
||||
}
|
||||
|
||||
export class FileProviderChannel implements IServerChannel, IDisposable {
|
||||
export class FileProviderChannel implements IServerChannel<RemoteAgentConnectionContext>, IDisposable {
|
||||
private readonly provider: DiskFileSystemProvider;
|
||||
private readonly watchers = new Map<string, Watcher>();
|
||||
|
||||
@@ -53,48 +56,67 @@ export class FileProviderChannel implements IServerChannel, IDisposable {
|
||||
this.provider = new DiskFileSystemProvider(this.logService);
|
||||
}
|
||||
|
||||
public listen(context: any, event: string, args?: any): Event<any> {
|
||||
public listen(context: RemoteAgentConnectionContext, event: string, args?: any): Event<any> {
|
||||
switch (event) {
|
||||
// This is where the actual file changes are sent. The watch method just
|
||||
// adds things that will fire here. That means we have to split up
|
||||
// watchers based on the session otherwise sessions would get events for
|
||||
// other sessions. There is also no point in having the watcher unless
|
||||
// something is listening. I'm not sure there is a different way to
|
||||
// dispose, anyway.
|
||||
case "filechange":
|
||||
const session = args[0];
|
||||
const emitter = new Emitter({
|
||||
onFirstListenerAdd: () => {
|
||||
const provider = new Watcher(this.logService);
|
||||
this.watchers.set(session, provider);
|
||||
const transformer = getUriTransformer(context.remoteAuthority);
|
||||
provider.onDidChangeFile((events) => {
|
||||
emitter.fire(events.map((event) => ({
|
||||
...event,
|
||||
resource: transformer.transformOutgoing(event.resource),
|
||||
})));
|
||||
});
|
||||
provider.onDidErrorOccur((event) => emitter.fire(event));
|
||||
},
|
||||
onLastListenerRemove: () => {
|
||||
this.watchers.get(session)!.dispose();
|
||||
this.watchers.delete(session);
|
||||
},
|
||||
});
|
||||
|
||||
return emitter.event;
|
||||
case "filechange": return this.filechange(context, args[0]);
|
||||
case "readFileStream": return this.readFileStream(args[0], args[1]);
|
||||
}
|
||||
|
||||
throw new Error(`Invalid listen "${event}"`);
|
||||
}
|
||||
|
||||
private filechange(context: RemoteAgentConnectionContext, session: string): Event<IFileChangeDto[]> {
|
||||
const emitter = new Emitter<IFileChangeDto[]>({
|
||||
onFirstListenerAdd: () => {
|
||||
const provider = new Watcher(this.logService);
|
||||
this.watchers.set(session, provider);
|
||||
const transformer = getUriTransformer(context.remoteAuthority);
|
||||
provider.onDidChangeFile((events) => {
|
||||
emitter.fire(events.map((event) => ({
|
||||
...event,
|
||||
resource: transformer.transformOutgoing(event.resource),
|
||||
})));
|
||||
});
|
||||
provider.onDidErrorOccur((event) => this.logService.error(event));
|
||||
},
|
||||
onLastListenerRemove: () => {
|
||||
this.watchers.get(session)!.dispose();
|
||||
this.watchers.delete(session);
|
||||
},
|
||||
});
|
||||
|
||||
return emitter.event;
|
||||
}
|
||||
|
||||
private readFileStream(resource: UriComponents, opts: FileReadStreamOptions): Event<ReadableStreamEventPayload<VSBuffer>> {
|
||||
let fileStream: VSBufferReadableStream | undefined;
|
||||
const emitter = new Emitter<ReadableStreamEventPayload<VSBuffer>>({
|
||||
onFirstListenerAdd: () => {
|
||||
if (!fileStream) {
|
||||
fileStream = createReadStream(this.provider, this.transform(resource), {
|
||||
...opts,
|
||||
bufferSize: 64 * 1024, // From DiskFileSystemProvider
|
||||
});
|
||||
fileStream.on("data", (data) => emitter.fire(data));
|
||||
fileStream.on("error", (error) => emitter.fire(error));
|
||||
fileStream.on("end", () => emitter.fire("end"));
|
||||
}
|
||||
},
|
||||
onLastListenerRemove: () => fileStream && fileStream.destroy(),
|
||||
});
|
||||
|
||||
return emitter.event;
|
||||
}
|
||||
|
||||
public call(_: unknown, command: string, args?: any): Promise<any> {
|
||||
switch (command) {
|
||||
case "stat": return this.stat(args[0]);
|
||||
case "open": return this.open(args[0], args[1]);
|
||||
case "close": return this.close(args[0]);
|
||||
case "read": return this.read(args[0], args[1], args[2]);
|
||||
case "readFile": return this.readFile(args[0]);
|
||||
case "write": return this.write(args[0], args[1], args[2], args[3], args[4]);
|
||||
case "writeFile": return this.writeFile(args[0], args[1], args[2]);
|
||||
case "delete": return this.delete(args[0], args[1]);
|
||||
case "mkdir": return this.mkdir(args[0]);
|
||||
case "readdir": return this.readdir(args[0]);
|
||||
@@ -130,10 +152,18 @@ export class FileProviderChannel implements IServerChannel, IDisposable {
|
||||
return [buffer, bytesRead];
|
||||
}
|
||||
|
||||
private async readFile(resource: UriComponents): Promise<VSBuffer> {
|
||||
return VSBuffer.wrap(await this.provider.readFile(this.transform(resource)));
|
||||
}
|
||||
|
||||
private write(fd: number, pos: number, buffer: VSBuffer, offset: number, length: number): Promise<number> {
|
||||
return this.provider.write(fd, pos, buffer.buffer, offset, length);
|
||||
}
|
||||
|
||||
private writeFile(resource: UriComponents, buffer: VSBuffer, opts: FileWriteOptions): Promise<void> {
|
||||
return this.provider.writeFile(this.transform(resource), buffer.buffer, opts);
|
||||
}
|
||||
|
||||
private async delete(resource: UriComponents, opts: FileDeleteOptions): Promise<void> {
|
||||
return this.provider.delete(this.transform(resource), opts);
|
||||
}
|
||||
@@ -164,8 +194,8 @@ export class FileProviderChannel implements IServerChannel, IDisposable {
|
||||
|
||||
private transform(resource: UriComponents): URI {
|
||||
// Used for walkthrough content.
|
||||
if (resource.path.indexOf("/static") === 0) {
|
||||
return URI.file(this.environmentService.appRoot + resource.path.replace(/^\/static/, ""));
|
||||
if (/^\/static[^/]*\//.test(resource.path)) {
|
||||
return URI.file(this.environmentService.appRoot + resource.path.replace(/^\/static[^/]*\//, "/"));
|
||||
// Used by the webview service worker to load resources.
|
||||
} else if (resource.path === "/vscode-resource" && resource.query) {
|
||||
try {
|
||||
@@ -227,7 +257,7 @@ export class ExtensionEnvironmentChannel implements IServerChannel {
|
||||
const scanMultiple = (isBuiltin: boolean, isUnderDevelopment: boolean, paths: string[]): Promise<IExtensionDescription[][]> => {
|
||||
return Promise.all(paths.map((path) => {
|
||||
return ExtensionScanner.scanExtensions(new ExtensionScannerInput(
|
||||
pkg.version,
|
||||
product.version,
|
||||
product.commit,
|
||||
locale,
|
||||
!!process.env.VSCODE_DEV,
|
||||
@@ -274,3 +304,40 @@ export class ExtensionEnvironmentChannel implements IServerChannel {
|
||||
this.telemetry.setEnabled(false);
|
||||
}
|
||||
}
|
||||
|
||||
export class NodeProxyService implements INodeProxyService {
|
||||
public _serviceBrand = undefined;
|
||||
|
||||
public readonly server: import("@coder/node-browser/out/server/server").Server;
|
||||
|
||||
private readonly _onMessage = new Emitter<string>();
|
||||
public readonly onMessage = this._onMessage.event;
|
||||
private readonly _$onMessage = new Emitter<string>();
|
||||
public readonly $onMessage = this._$onMessage.event;
|
||||
public readonly _onDown = new Emitter<void>();
|
||||
public readonly onDown = this._onDown.event;
|
||||
public readonly _onUp = new Emitter<void>();
|
||||
public readonly onUp = this._onUp.event;
|
||||
|
||||
// Unused because the server connection will never permanently close.
|
||||
private readonly _onClose = new Emitter<void>();
|
||||
public readonly onClose = this._onClose.event;
|
||||
|
||||
public constructor() {
|
||||
// TODO: down/up
|
||||
const { Server } = localRequire<typeof import("@coder/node-browser/out/server/server")>("@coder/node-browser/out/server/server");
|
||||
this.server = new Server({
|
||||
onMessage: this.$onMessage,
|
||||
onClose: this.onClose,
|
||||
onDown: this.onDown,
|
||||
onUp: this.onUp,
|
||||
send: (message: string): void => {
|
||||
this._onMessage.fire(message);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
public send(message: string): void {
|
||||
this._$onMessage.fire(message);
|
||||
}
|
||||
}
|
||||
@@ -5,14 +5,13 @@ import { setUnexpectedErrorHandler } from "vs/base/common/errors";
|
||||
import { main as vsCli } from "vs/code/node/cliProcessMain";
|
||||
import { validatePaths } from "vs/code/node/paths";
|
||||
import { ParsedArgs } from "vs/platform/environment/common/environment";
|
||||
import { buildHelpMessage, buildVersionMessage, Option as VsOption, options as vsOptions } from "vs/platform/environment/node/argv";
|
||||
import { buildHelpMessage, buildVersionMessage, Option as VsOption, OPTIONS, OptionDescriptions } from "vs/platform/environment/node/argv";
|
||||
import { parseMainProcessArgv } from "vs/platform/environment/node/argvHelper";
|
||||
import pkg from "vs/platform/product/node/package";
|
||||
import product from "vs/platform/product/node/product";
|
||||
import { ipcMain } from "vs/server/src/ipc";
|
||||
import { enableCustomMarketplace } from "vs/server/src/marketplace";
|
||||
import { MainServer } from "vs/server/src/server";
|
||||
import { AuthType, buildAllowedMessage, enumToArray, FormatType, generateCertificate, generatePassword, localRequire, open, unpackExecutables } from "vs/server/src/util";
|
||||
import product from "vs/platform/product/common/product";
|
||||
import { ipcMain } from "vs/server/src/node/ipc";
|
||||
import { enableCustomMarketplace } from "vs/server/src/node/marketplace";
|
||||
import { MainServer } from "vs/server/src/node/server";
|
||||
import { AuthType, buildAllowedMessage, enumToArray, FormatType, generateCertificate, generatePassword, localRequire, open, unpackExecutables } from "vs/server/src/node/util";
|
||||
|
||||
const { logger } = localRequire<typeof import("@coder/logger/out/index")>("@coder/logger/out/index");
|
||||
setUnexpectedErrorHandler((error) => logger.warn(error.message));
|
||||
@@ -24,7 +23,7 @@ interface Args extends ParsedArgs {
|
||||
"cert-key"?: string;
|
||||
format?: string;
|
||||
host?: string;
|
||||
open?: string;
|
||||
open?: boolean;
|
||||
port?: string;
|
||||
socket?: string;
|
||||
}
|
||||
@@ -35,14 +34,9 @@ interface Option extends VsOption {
|
||||
}
|
||||
|
||||
const getArgs = (): Args => {
|
||||
const options = vsOptions as Option[];
|
||||
// The last item is _ which is like -- so our options need to come before it.
|
||||
const last = options.pop()!;
|
||||
|
||||
// Remove options that won't work or don't make sense.
|
||||
let i = options.length;
|
||||
while (i--) {
|
||||
switch (options[i].id) {
|
||||
for (let key in OPTIONS) {
|
||||
switch (key) {
|
||||
case "add":
|
||||
case "diff":
|
||||
case "file-uri":
|
||||
@@ -53,28 +47,24 @@ const getArgs = (): Args => {
|
||||
case "wait":
|
||||
case "disable-gpu":
|
||||
// TODO: pretty sure these don't work but not 100%.
|
||||
case "max-memory":
|
||||
case "prof-startup":
|
||||
case "inspect-extensions":
|
||||
case "inspect-brk-extensions":
|
||||
options.splice(i, 1);
|
||||
delete OPTIONS[key];
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
options.push({ id: "base-path", type: "string", cat: "o", description: "Base path of the URL at which code-server is hosted (used for login redirects)." });
|
||||
options.push({ id: "cert", type: "string", cat: "o", description: "Path to certificate. If the path is omitted, both this and --cert-key will be generated." });
|
||||
options.push({ id: "cert-key", type: "string", cat: "o", description: "Path to the certificate's key if one was provided." });
|
||||
options.push({ id: "extra-builtin-extensions-dir", type: "string", cat: "o", description: "Path to an extra builtin extension directory." });
|
||||
options.push({ id: "extra-extensions-dir", type: "string", cat: "o", description: "Path to an extra user extension directory." });
|
||||
options.push({ id: "format", type: "string", cat: "o", description: `Format for the version. ${buildAllowedMessage(FormatType)}.` });
|
||||
options.push({ id: "host", type: "string", cat: "o", description: "Host for the server." });
|
||||
options.push({ id: "auth", type: "string", cat: "o", description: `The type of authentication to use. ${buildAllowedMessage(AuthType)}.` });
|
||||
options.push({ id: "open", type: "boolean", cat: "o", description: "Open in the browser on startup." });
|
||||
options.push({ id: "port", type: "string", cat: "o", description: "Port for the main server." });
|
||||
options.push({ id: "socket", type: "string", cat: "o", description: "Listen on a socket instead of host:port." });
|
||||
|
||||
options.push(last);
|
||||
const options = OPTIONS as OptionDescriptions<Required<Args>>;
|
||||
options["base-path"] = { type: "string", cat: "o", description: "Base path of the URL at which code-server is hosted (used for login redirects)." };
|
||||
options["cert"] = { type: "string", cat: "o", description: "Path to certificate. If the path is omitted, both this and --cert-key will be generated." };
|
||||
options["cert-key"] = { type: "string", cat: "o", description: "Path to the certificate's key if one was provided." };
|
||||
options["format"] = { type: "string", cat: "o", description: `Format for the version. ${buildAllowedMessage(FormatType)}.` };
|
||||
options["host"] = { type: "string", cat: "o", description: "Host for the server." };
|
||||
options["auth"] = { type: "string", cat: "o", description: `The type of authentication to use. ${buildAllowedMessage(AuthType)}.` };
|
||||
options["open"] = { type: "boolean", cat: "o", description: "Open in the browser on startup." };
|
||||
options["port"] = { type: "string", cat: "o", description: "Port for the main server." };
|
||||
options["socket"] = { type: "string", cat: "o", description: "Listen on a socket instead of host:port." };
|
||||
|
||||
const args = parseMainProcessArgv(process.argv);
|
||||
if (!args["user-data-dir"]) {
|
||||
@@ -84,25 +74,28 @@ const getArgs = (): Args => {
|
||||
args["extensions-dir"] = path.join(args["user-data-dir"], "extensions");
|
||||
}
|
||||
|
||||
if (!args.verbose && !args.log && process.env.LOG_LEVEL) {
|
||||
args.log = process.env.LOG_LEVEL;
|
||||
}
|
||||
|
||||
return validatePaths(args);
|
||||
};
|
||||
|
||||
const startVscode = async (): Promise<void | void[]> => {
|
||||
const args = getArgs();
|
||||
const startVscode = async (args: Args): Promise<void | void[]> => {
|
||||
const extra = args["_"] || [];
|
||||
const options = {
|
||||
auth: args.auth,
|
||||
auth: args.auth || AuthType.Password,
|
||||
basePath: args["base-path"],
|
||||
cert: args.cert,
|
||||
certKey: args["cert-key"],
|
||||
folderUri: extra.length > 1 ? extra[extra.length - 1] : undefined,
|
||||
openUri: extra.length > 1 ? extra[extra.length - 1] : undefined,
|
||||
host: args.host,
|
||||
password: process.env.PASSWORD,
|
||||
};
|
||||
|
||||
if (options.auth && enumToArray(AuthType).filter((t) => t === options.auth).length === 0) {
|
||||
if (enumToArray(AuthType).filter((t) => t === options.auth).length === 0) {
|
||||
throw new Error(`'${options.auth}' is not a valid authentication type.`);
|
||||
} else if (options.auth && !options.password) {
|
||||
} else if (options.auth === "password" && !options.password) {
|
||||
options.password = await generatePassword();
|
||||
}
|
||||
|
||||
@@ -120,7 +113,7 @@ const startVscode = async (): Promise<void | void[]> => {
|
||||
|
||||
const server = new MainServer({
|
||||
...options,
|
||||
port: typeof args.port !== "undefined" && parseInt(args.port, 10) || 8080,
|
||||
port: typeof args.port !== "undefined" ? parseInt(args.port, 10) : 8080,
|
||||
socket: args.socket,
|
||||
}, args);
|
||||
|
||||
@@ -130,10 +123,13 @@ const startVscode = async (): Promise<void | void[]> => {
|
||||
]);
|
||||
logger.info(`Server listening on ${serverAddress}`);
|
||||
|
||||
if (options.auth && !process.env.PASSWORD) {
|
||||
if (options.auth === "password" && !process.env.PASSWORD) {
|
||||
logger.info(` - Password is ${options.password}`);
|
||||
logger.info(" - To use your own password, set the PASSWORD environment variable");
|
||||
} else if (options.auth) {
|
||||
logger.info(" - To use your own password, set the PASSWORD environment variable");
|
||||
if (!args.auth) {
|
||||
logger.info(" - To disable use `--auth none`");
|
||||
}
|
||||
} else if (options.auth === "password") {
|
||||
logger.info(" - Using custom password for authentication");
|
||||
} else {
|
||||
logger.info(" - No authentication");
|
||||
@@ -151,29 +147,28 @@ const startVscode = async (): Promise<void | void[]> => {
|
||||
|
||||
if (!server.options.socket && args.open) {
|
||||
// The web socket doesn't seem to work if browsing with 0.0.0.0.
|
||||
const openAddress = `http://localhost:${server.options.port}`;
|
||||
const openAddress = serverAddress.replace(/:\/\/0.0.0.0/, "://localhost");
|
||||
await open(openAddress).catch(console.error);
|
||||
logger.info(` - Opened ${openAddress}`);
|
||||
}
|
||||
};
|
||||
|
||||
const startCli = (): boolean | Promise<void> => {
|
||||
const args = getArgs();
|
||||
const startCli = (args: Args): boolean | Promise<void> => {
|
||||
if (args.help) {
|
||||
const executable = `${product.applicationName}${os.platform() === "win32" ? ".exe" : ""}`;
|
||||
console.log(buildHelpMessage(product.nameLong, executable, pkg.codeServerVersion, undefined, false));
|
||||
console.log(buildHelpMessage(product.nameLong, executable, product.codeServerVersion, OPTIONS, false));
|
||||
return true;
|
||||
}
|
||||
|
||||
if (args.version) {
|
||||
if (args.format === "json") {
|
||||
console.log(JSON.stringify({
|
||||
codeServerVersion: pkg.codeServerVersion,
|
||||
codeServerVersion: product.codeServerVersion,
|
||||
commit: product.commit,
|
||||
vscodeVersion: pkg.version,
|
||||
vscodeVersion: product.version,
|
||||
}));
|
||||
} else {
|
||||
buildVersionMessage(pkg.codeServerVersion, product.commit).split("\n").map((line) => logger.info(line));
|
||||
buildVersionMessage(product.codeServerVersion, product.commit).split("\n").map((line) => logger.info(line));
|
||||
}
|
||||
return true;
|
||||
}
|
||||
@@ -198,14 +193,17 @@ const startCli = (): boolean | Promise<void> => {
|
||||
export class WrapperProcess {
|
||||
private process?: cp.ChildProcess;
|
||||
private started?: Promise<void>;
|
||||
private currentVersion = product.codeServerVersion;
|
||||
|
||||
public constructor() {
|
||||
public constructor(private readonly args: Args) {
|
||||
ipcMain.onMessage(async (message) => {
|
||||
switch (message) {
|
||||
switch (message.type) {
|
||||
case "relaunch":
|
||||
logger.info("Relaunching...");
|
||||
logger.info(`Relaunching: ${this.currentVersion} -> ${message.version}`);
|
||||
this.currentVersion = message.version;
|
||||
this.started = undefined;
|
||||
if (this.process) {
|
||||
this.process.removeAllListeners();
|
||||
this.process.kill();
|
||||
}
|
||||
try {
|
||||
@@ -225,17 +223,35 @@ export class WrapperProcess {
|
||||
public start(): Promise<void> {
|
||||
if (!this.started) {
|
||||
const child = this.spawn();
|
||||
this.started = ipcMain.handshake(child);
|
||||
this.started = ipcMain.handshake(child).then(() => {
|
||||
child.once("exit", (code) => exit(code!));
|
||||
});
|
||||
this.process = child;
|
||||
}
|
||||
return this.started;
|
||||
}
|
||||
|
||||
private spawn(): cp.ChildProcess {
|
||||
return cp.spawn(process.argv[0], process.argv.slice(1), {
|
||||
// Flags to pass along to the Node binary. We use the environment variable
|
||||
// since otherwise the code-server binary will swallow them.
|
||||
const maxMemory = this.args["max-memory"] || 2048;
|
||||
let nodeOptions = `${process.env.NODE_OPTIONS || ""} ${this.args["js-flags"] || ""}`;
|
||||
if (!/max_old_space_size=(\d+)/g.exec(nodeOptions)) {
|
||||
nodeOptions += ` --max_old_space_size=${maxMemory}`;
|
||||
}
|
||||
|
||||
// If we're using loose files then we need to specify the path. If we're in
|
||||
// the binary we need to let the binary determine the path (via nbin) since
|
||||
// it could be different between binaries which presents a problem when
|
||||
// upgrading (different version numbers or different staging directories).
|
||||
const isBinary = (global as any).NBIN_LOADED;
|
||||
return cp.spawn(process.argv[0], process.argv.slice(isBinary ? 2 : 1), {
|
||||
env: {
|
||||
...process.env,
|
||||
LAUNCH_VSCODE: "true",
|
||||
NBIN_BYPASS: undefined,
|
||||
VSCODE_PARENT_PID: process.pid.toString(),
|
||||
NODE_OPTIONS: nodeOptions,
|
||||
},
|
||||
stdio: ["inherit", "inherit", "inherit", "ipc"],
|
||||
});
|
||||
@@ -243,20 +259,41 @@ export class WrapperProcess {
|
||||
}
|
||||
|
||||
const main = async(): Promise<boolean | void | void[]> => {
|
||||
const args = getArgs();
|
||||
if (process.env.LAUNCH_VSCODE) {
|
||||
await ipcMain.handshake();
|
||||
return startVscode();
|
||||
return startVscode(args);
|
||||
}
|
||||
return startCli() || new WrapperProcess().start();
|
||||
return startCli(args) || new WrapperProcess(args).start();
|
||||
};
|
||||
|
||||
const exit = process.exit;
|
||||
process.exit = function (code?: number) {
|
||||
const err = new Error(`process.exit() was prevented: ${code || "unknown code"}.`);
|
||||
console.warn(err.stack);
|
||||
} as (code?: number) => never;
|
||||
|
||||
// Copy the extension host behavior of killing oneself if the parent dies. This
|
||||
// also exists in bootstrap-fork.js but spawning with that won't work because we
|
||||
// override process.exit.
|
||||
if (typeof process.env.VSCODE_PARENT_PID !== "undefined") {
|
||||
const parentPid = parseInt(process.env.VSCODE_PARENT_PID, 10);
|
||||
setInterval(() => {
|
||||
try {
|
||||
process.kill(parentPid, 0); // Throws an exception if the process doesn't exist anymore.
|
||||
} catch (e) {
|
||||
exit();
|
||||
}
|
||||
}, 5000);
|
||||
}
|
||||
|
||||
// It's possible that the pipe has closed (for example if you run code-server
|
||||
// --version | head -1). Assume that means we're done.
|
||||
if (!process.stdout.isTTY) {
|
||||
process.stdout.on("error", () => process.exit());
|
||||
process.stdout.on("error", () => exit());
|
||||
}
|
||||
|
||||
main().catch((error) => {
|
||||
logger.error(error.message);
|
||||
process.exit(typeof error.code === "number" ? error.code : 1);
|
||||
exit(typeof error.code === "number" ? error.code : 1);
|
||||
});
|
||||
@@ -6,9 +6,9 @@ import { ISocket } from "vs/base/parts/ipc/common/ipc.net";
|
||||
import { NodeSocket } from "vs/base/parts/ipc/node/ipc.net";
|
||||
import { IEnvironmentService } from "vs/platform/environment/common/environment";
|
||||
import { ILogService } from "vs/platform/log/common/log";
|
||||
import { getNlsConfiguration } from "vs/server/src/nls";
|
||||
import { Protocol } from "vs/server/src/protocol";
|
||||
import { uriTransformerPath } from "vs/server/src/util";
|
||||
import { getNlsConfiguration } from "vs/server/src/node/nls";
|
||||
import { Protocol } from "vs/server/src/node/protocol";
|
||||
import { uriTransformerPath } from "vs/server/src/node/util";
|
||||
import { IExtHostReadyMessage } from "vs/workbench/services/extensions/common/extensionHostProtocol";
|
||||
|
||||
export abstract class Connection {
|
||||
@@ -17,10 +17,7 @@ export abstract class Connection {
|
||||
private disposed = false;
|
||||
private _offline: number | undefined;
|
||||
|
||||
public constructor(protected protocol: Protocol) {
|
||||
protocol.onClose(() => this.dispose()); // Explicit close.
|
||||
protocol.onSocketClose(() => this._offline = Date.now()); // Might reconnect.
|
||||
}
|
||||
public constructor(protected protocol: Protocol, public readonly token: string) {}
|
||||
|
||||
public get offline(): number | undefined {
|
||||
return this._offline;
|
||||
@@ -39,6 +36,12 @@ export abstract class Connection {
|
||||
}
|
||||
}
|
||||
|
||||
protected setOffline(): void {
|
||||
if (!this._offline) {
|
||||
this._offline = Date.now();
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Set up the connection on a new socket.
|
||||
*/
|
||||
@@ -50,6 +53,12 @@ export abstract class Connection {
|
||||
* Used for all the IPC channels.
|
||||
*/
|
||||
export class ManagementConnection extends Connection {
|
||||
public constructor(protected protocol: Protocol, token: string) {
|
||||
super(protocol, token);
|
||||
protocol.onClose(() => this.dispose()); // Explicit close.
|
||||
protocol.onSocketClose(() => this.setOffline()); // Might reconnect.
|
||||
}
|
||||
|
||||
protected doDispose(): void {
|
||||
this.protocol.sendDisconnect();
|
||||
this.protocol.dispose();
|
||||
@@ -66,11 +75,11 @@ export class ExtensionHostConnection extends Connection {
|
||||
private process?: cp.ChildProcess;
|
||||
|
||||
public constructor(
|
||||
locale:string, protocol: Protocol, buffer: VSBuffer,
|
||||
locale:string, protocol: Protocol, buffer: VSBuffer, token: string,
|
||||
private readonly log: ILogService,
|
||||
private readonly environment: IEnvironmentService,
|
||||
) {
|
||||
super(protocol);
|
||||
super(protocol, token);
|
||||
this.protocol.dispose();
|
||||
this.spawn(locale, buffer).then((p) => this.process = p);
|
||||
this.protocol.getUnderlyingSocket().pause();
|
||||
@@ -114,6 +123,7 @@ export class ExtensionHostConnection extends Connection {
|
||||
VSCODE_EXTHOST_WILL_SEND_SOCKET: "true",
|
||||
VSCODE_HANDLES_UNCAUGHT_ERRORS: "true",
|
||||
VSCODE_LOG_STACK: "false",
|
||||
VSCODE_LOG_LEVEL: this.environment.verbose ? "trace" : this.environment.log,
|
||||
VSCODE_NLS_CONFIG: JSON.stringify(config),
|
||||
},
|
||||
silent: true,
|
||||
@@ -129,6 +139,9 @@ export class ExtensionHostConnection extends Connection {
|
||||
const severity = (<any>this.log)[event.severity] ? event.severity : "info";
|
||||
(<any>this.log)[severity]("Extension host", event.arguments);
|
||||
}
|
||||
if (event && event.type === "VSCODE_EXTHOST_DISCONNECTED") {
|
||||
this.setOffline();
|
||||
}
|
||||
});
|
||||
|
||||
const listen = (message: IExtHostReadyMessage) => {
|
||||
124
src/node/insights.ts
Normal file
124
src/node/insights.ts
Normal file
@@ -0,0 +1,124 @@
|
||||
import * as appInsights from "applicationinsights";
|
||||
import * as https from "https";
|
||||
import * as http from "http";
|
||||
import * as os from "os";
|
||||
|
||||
class Channel {
|
||||
public get _sender() {
|
||||
throw new Error("unimplemented");
|
||||
}
|
||||
public get _buffer() {
|
||||
throw new Error("unimplemented");
|
||||
}
|
||||
|
||||
public setUseDiskRetryCaching(): void {
|
||||
throw new Error("unimplemented");
|
||||
}
|
||||
public send(): void {
|
||||
throw new Error("unimplemented");
|
||||
}
|
||||
public triggerSend(): void {
|
||||
throw new Error("unimplemented");
|
||||
}
|
||||
}
|
||||
|
||||
export class TelemetryClient {
|
||||
public context: any = undefined;
|
||||
public commonProperties: any = undefined;
|
||||
public config: any = {};
|
||||
|
||||
public channel: any = new Channel();
|
||||
|
||||
public addTelemetryProcessor(): void {
|
||||
throw new Error("unimplemented");
|
||||
}
|
||||
|
||||
public clearTelemetryProcessors(): void {
|
||||
throw new Error("unimplemented");
|
||||
}
|
||||
|
||||
public runTelemetryProcessors(): void {
|
||||
throw new Error("unimplemented");
|
||||
}
|
||||
|
||||
public trackTrace(): void {
|
||||
throw new Error("unimplemented");
|
||||
}
|
||||
|
||||
public trackMetric(): void {
|
||||
throw new Error("unimplemented");
|
||||
}
|
||||
|
||||
public trackException(): void {
|
||||
throw new Error("unimplemented");
|
||||
}
|
||||
|
||||
public trackRequest(): void {
|
||||
throw new Error("unimplemented");
|
||||
}
|
||||
|
||||
public trackDependency(): void {
|
||||
throw new Error("unimplemented");
|
||||
}
|
||||
|
||||
public track(): void {
|
||||
throw new Error("unimplemented");
|
||||
}
|
||||
|
||||
public trackNodeHttpRequestSync(): void {
|
||||
throw new Error("unimplemented");
|
||||
}
|
||||
|
||||
public trackNodeHttpRequest(): void {
|
||||
throw new Error("unimplemented");
|
||||
}
|
||||
|
||||
public trackNodeHttpDependency(): void {
|
||||
throw new Error("unimplemented");
|
||||
}
|
||||
|
||||
public trackEvent(options: appInsights.Contracts.EventTelemetry): void {
|
||||
if (!options.properties) {
|
||||
options.properties = {};
|
||||
}
|
||||
if (!options.measurements) {
|
||||
options.measurements = {};
|
||||
}
|
||||
|
||||
try {
|
||||
const cpus = os.cpus();
|
||||
options.measurements.cores = cpus.length;
|
||||
options.properties["common.cpuModel"] = cpus[0].model;
|
||||
} catch (error) {}
|
||||
|
||||
try {
|
||||
options.measurements.memoryFree = os.freemem();
|
||||
options.measurements.memoryTotal = os.totalmem();
|
||||
} catch (error) {}
|
||||
|
||||
try {
|
||||
options.properties["common.shell"] = os.userInfo().shell;
|
||||
options.properties["common.release"] = os.release();
|
||||
options.properties["common.arch"] = os.arch();
|
||||
} catch (error) {}
|
||||
|
||||
try {
|
||||
const url = process.env.TELEMETRY_URL || "https://v1.telemetry.coder.com/track";
|
||||
const request = (/^http:/.test(url) ? http : https).request(url, {
|
||||
method: "POST",
|
||||
headers: {
|
||||
"Content-Type": "application/json",
|
||||
},
|
||||
});
|
||||
request.on("error", () => { /* We don"t care. */ });
|
||||
request.write(JSON.stringify(options));
|
||||
request.end();
|
||||
} catch (error) {}
|
||||
}
|
||||
|
||||
public flush(options: { callback: (v: string) => void }): void {
|
||||
if (options.callback) {
|
||||
options.callback("");
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -6,7 +6,12 @@ enum ControlMessage {
|
||||
okFromChild = "ok<",
|
||||
}
|
||||
|
||||
export type Message = "relaunch";
|
||||
interface RelaunchMessage {
|
||||
type: "relaunch";
|
||||
version: string;
|
||||
}
|
||||
|
||||
export type Message = RelaunchMessage;
|
||||
|
||||
class IpcMain {
|
||||
protected readonly _onMessage = new Emitter<Message>();
|
||||
@@ -41,11 +46,15 @@ class IpcMain {
|
||||
});
|
||||
}
|
||||
|
||||
public relaunch(): void {
|
||||
public relaunch(version: string): void {
|
||||
this.send({ type: "relaunch", version });
|
||||
}
|
||||
|
||||
private send(message: Message): void {
|
||||
if (!process.send) {
|
||||
throw new Error("Not a child process with IPC enabled");
|
||||
}
|
||||
process.send("relaunch");
|
||||
process.send(message);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -5,8 +5,8 @@ import { CancellationToken } from "vs/base/common/cancellation";
|
||||
import { mkdirp } from "vs/base/node/pfs";
|
||||
import * as vszip from "vs/base/node/zip";
|
||||
import * as nls from "vs/nls";
|
||||
import product from "vs/platform/product/node/product";
|
||||
import { localRequire } from "vs/server/src/util";
|
||||
import product from "vs/platform/product/common/product";
|
||||
import { localRequire } from "vs/server/src/node/util";
|
||||
|
||||
const tarStream = localRequire<typeof import("tar-stream")>("tar-stream/index");
|
||||
|
||||
@@ -79,52 +79,55 @@ export const buffer = (targetPath: string, filePath: string): Promise<Buffer> =>
|
||||
};
|
||||
|
||||
const extractAssets = async (tarPath: string, match: RegExp, callback: (path: string, data: Buffer) => void): Promise<void> => {
|
||||
const buffer = await util.promisify(fs.readFile)(tarPath);
|
||||
return new Promise<void>(async (resolve, reject): Promise<void> => {
|
||||
return new Promise<void>((resolve, reject): void => {
|
||||
const extractor = tarStream.extract();
|
||||
extractor.once("error", reject);
|
||||
const fail = (error: Error) => {
|
||||
extractor.destroy();
|
||||
reject(error);
|
||||
};
|
||||
extractor.once("error", fail);
|
||||
extractor.on("entry", async (header, stream, next) => {
|
||||
const name = header.name;
|
||||
if (match.test(name)) {
|
||||
extractData(stream).then((data) => {
|
||||
callback(name, data);
|
||||
next();
|
||||
}).catch(reject);
|
||||
stream.resume();
|
||||
}).catch(fail);
|
||||
} else {
|
||||
stream.on("end", () => next());
|
||||
stream.resume();
|
||||
stream.resume(); // Just drain it.
|
||||
}
|
||||
});
|
||||
extractor.on("finish", resolve);
|
||||
extractor.write(buffer);
|
||||
extractor.end();
|
||||
fs.createReadStream(tarPath).pipe(extractor);
|
||||
});
|
||||
};
|
||||
|
||||
const extractData = (stream: NodeJS.ReadableStream): Promise<Buffer> => {
|
||||
return new Promise((resolve, reject): void => {
|
||||
const fileData: Buffer[] = [];
|
||||
stream.on("data", (data) => fileData.push(data));
|
||||
stream.on("end", () => resolve(Buffer.concat(fileData)));
|
||||
stream.on("error", reject);
|
||||
stream.on("end", () => resolve(Buffer.concat(fileData)));
|
||||
stream.on("data", (data) => fileData.push(data));
|
||||
});
|
||||
};
|
||||
|
||||
const extractTar = async (tarPath: string, targetPath: string, options: IExtractOptions = {}, token: CancellationToken): Promise<void> => {
|
||||
const buffer = await util.promisify(fs.readFile)(tarPath);
|
||||
return new Promise<void>(async (resolve, reject): Promise<void> => {
|
||||
return new Promise<void>((resolve, reject): void => {
|
||||
const sourcePathRegex = new RegExp(options.sourcePath ? `^${options.sourcePath}` : "");
|
||||
const extractor = tarStream.extract();
|
||||
extractor.once("error", reject);
|
||||
const fail = (error: Error) => {
|
||||
extractor.destroy();
|
||||
reject(error);
|
||||
};
|
||||
extractor.once("error", fail);
|
||||
extractor.on("entry", async (header, stream, next) => {
|
||||
const rawName = path.normalize(header.name);
|
||||
|
||||
const nextEntry = (): void => {
|
||||
stream.on("end", () => next());
|
||||
stream.resume();
|
||||
next();
|
||||
};
|
||||
|
||||
const rawName = path.normalize(header.name);
|
||||
if (token.isCancellationRequested || !sourcePathRegex.test(rawName)) {
|
||||
return nextEntry();
|
||||
}
|
||||
@@ -138,20 +141,18 @@ const extractTar = async (tarPath: string, targetPath: string, options: IExtract
|
||||
const dirName = path.dirname(fileName);
|
||||
const targetDirName = path.join(targetPath, dirName);
|
||||
if (targetDirName.indexOf(targetPath) !== 0) {
|
||||
return reject(nls.localize("invalid file", "Error extracting {0}. Invalid file.", fileName));
|
||||
return fail(new Error(nls.localize("invalid file", "Error extracting {0}. Invalid file.", fileName)));
|
||||
}
|
||||
|
||||
return mkdirp(targetDirName, undefined, token).then(() => {
|
||||
const fstream = fs.createWriteStream(targetFileName, { mode: header.mode });
|
||||
fstream.once("close", () => next());
|
||||
fstream.once("error", reject);
|
||||
stream.pipe(fstream);
|
||||
stream.resume();
|
||||
});
|
||||
await mkdirp(targetDirName, undefined);
|
||||
|
||||
const fstream = fs.createWriteStream(targetFileName, { mode: header.mode });
|
||||
fstream.once("close", () => next());
|
||||
fstream.once("error", fail);
|
||||
stream.pipe(fstream);
|
||||
});
|
||||
extractor.once("finish", resolve);
|
||||
extractor.write(buffer);
|
||||
extractor.end();
|
||||
fs.createReadStream(tarPath).pipe(extractor);
|
||||
});
|
||||
};
|
||||
|
||||
@@ -3,7 +3,7 @@ import * as path from "path";
|
||||
import * as util from "util";
|
||||
import { getPathFromAmdModule } from "vs/base/common/amd";
|
||||
import * as lp from "vs/base/node/languagePacks";
|
||||
import product from "vs/platform/product/node/product";
|
||||
import product from "vs/platform/product/common/product";
|
||||
import { Translations } from "vs/workbench/services/extensions/common/extensionPoints";
|
||||
|
||||
const configurations = new Map<string, Promise<lp.NLSConfiguration>>();
|
||||
@@ -28,6 +28,12 @@ export const getNlsConfiguration = async (locale: string, userDataPath: string):
|
||||
if (isInternalConfiguration(config)) {
|
||||
config._languagePackSupport = true;
|
||||
}
|
||||
// If the configuration has no results keep trying since code-server
|
||||
// doesn't restart when a language is installed so this result would
|
||||
// persist (the plugin might not be installed yet or something).
|
||||
if (config.locale !== "en" && config.locale !== "en-us" && Object.keys(config.availableLanguages).length === 0) {
|
||||
configurations.delete(id);
|
||||
}
|
||||
resolve(config);
|
||||
}));
|
||||
}
|
||||
@@ -17,13 +17,12 @@ import { generateUuid } from "vs/base/common/uuid";
|
||||
import { getMachineId } from 'vs/base/node/id';
|
||||
import { NLSConfiguration } from "vs/base/node/languagePacks";
|
||||
import { mkdirp, rimraf } from "vs/base/node/pfs";
|
||||
import { ClientConnectionEvent, IPCServer, StaticRouter } from "vs/base/parts/ipc/common/ipc";
|
||||
import { ClientConnectionEvent, IPCServer, IServerChannel } from "vs/base/parts/ipc/common/ipc";
|
||||
import { createChannelReceiver } from "vs/base/parts/ipc/node/ipc";
|
||||
import { LogsDataCleaner } from "vs/code/electron-browser/sharedProcess/contrib/logsDataCleaner";
|
||||
import { IConfigurationService } from "vs/platform/configuration/common/configuration";
|
||||
import { ConfigurationService } from "vs/platform/configuration/node/configurationService";
|
||||
import { ExtensionHostDebugBroadcastChannel } from "vs/platform/debug/common/extensionHostDebugIpc";
|
||||
import { IDialogService } from "vs/platform/dialogs/common/dialogs";
|
||||
import { DialogChannelClient } from "vs/platform/dialogs/node/dialogIpc";
|
||||
import { IEnvironmentService, ParsedArgs } from "vs/platform/environment/common/environment";
|
||||
import { EnvironmentService } from "vs/platform/environment/node/environmentService";
|
||||
import { ExtensionGalleryService } from "vs/platform/extensionManagement/common/extensionGalleryService";
|
||||
@@ -38,14 +37,13 @@ import { InstantiationService } from "vs/platform/instantiation/common/instantia
|
||||
import { ServiceCollection } from "vs/platform/instantiation/common/serviceCollection";
|
||||
import { ILocalizationsService } from "vs/platform/localizations/common/localizations";
|
||||
import { LocalizationsService } from "vs/platform/localizations/node/localizations";
|
||||
import { LocalizationsChannel } from "vs/platform/localizations/node/localizationsIpc";
|
||||
import { getLogLevel, ILogService } from "vs/platform/log/common/log";
|
||||
import { LogLevelSetterChannel } from "vs/platform/log/common/logIpc";
|
||||
import { LoggerChannel } from "vs/platform/log/common/logIpc";
|
||||
import { SpdLogService } from "vs/platform/log/node/spdlogService";
|
||||
import { IProductService } from "vs/platform/product/common/product";
|
||||
import pkg from "vs/platform/product/node/package";
|
||||
import product from "vs/platform/product/node/product";
|
||||
import product from 'vs/platform/product/common/product';
|
||||
import { IProductService } from "vs/platform/product/common/productService";
|
||||
import { ConnectionType, ConnectionTypeRequest } from "vs/platform/remote/common/remoteAgentConnection";
|
||||
import { RemoteAgentConnectionContext } from "vs/platform/remote/common/remoteAgentEnvironment";
|
||||
import { REMOTE_FILE_SYSTEM_CHANNEL_NAME } from "vs/platform/remote/common/remoteAgentFileSystemChannel";
|
||||
import { IRequestService } from "vs/platform/request/common/request";
|
||||
import { RequestChannel } from "vs/platform/request/common/requestIpc";
|
||||
@@ -56,15 +54,17 @@ import { ITelemetryServiceConfig, TelemetryService } from "vs/platform/telemetry
|
||||
import { combinedAppender, LogAppender, NullTelemetryService } from "vs/platform/telemetry/common/telemetryUtils";
|
||||
import { AppInsightsAppender } from "vs/platform/telemetry/node/appInsightsAppender";
|
||||
import { resolveCommonProperties } from "vs/platform/telemetry/node/commonProperties";
|
||||
import { UpdateChannel } from "vs/platform/update/node/updateIpc";
|
||||
import { ExtensionEnvironmentChannel, FileProviderChannel } from "vs/server/src/channel";
|
||||
import { Connection, ExtensionHostConnection, ManagementConnection } from "vs/server/src/connection";
|
||||
import { TelemetryClient } from "vs/server/src/insights";
|
||||
import { getLocaleFromConfig, getNlsConfiguration } from "vs/server/src/nls";
|
||||
import { Protocol } from "vs/server/src/protocol";
|
||||
import { TelemetryChannel } from "vs/server/src/telemetry";
|
||||
import { UpdateService } from "vs/server/src/update";
|
||||
import { AuthType, getMediaMime, getUriTransformer, localRequire, tmpdir } from "vs/server/src/util";
|
||||
import { UpdateChannel } from "vs/platform/update/electron-main/updateIpc";
|
||||
import { INodeProxyService, NodeProxyChannel } from "vs/server/src/common/nodeProxy";
|
||||
import { TelemetryChannel } from "vs/server/src/common/telemetry";
|
||||
import { split } from "vs/server/src/common/util";
|
||||
import { ExtensionEnvironmentChannel, FileProviderChannel, NodeProxyService } from "vs/server/src/node/channel";
|
||||
import { Connection, ExtensionHostConnection, ManagementConnection } from "vs/server/src/node/connection";
|
||||
import { TelemetryClient } from "vs/server/src/node/insights";
|
||||
import { getLocaleFromConfig, getNlsConfiguration } from "vs/server/src/node/nls";
|
||||
import { Protocol } from "vs/server/src/node/protocol";
|
||||
import { UpdateService } from "vs/server/src/node/update";
|
||||
import { AuthType, getMediaMime, getUriTransformer, hash, localRequire, tmpdir } from "vs/server/src/node/util";
|
||||
import { RemoteExtensionLogFileName } from "vs/workbench/services/remote/common/remoteAgentService";
|
||||
import { IWorkbenchConstructionOptions } from "vs/workbench/workbench.web.api";
|
||||
|
||||
@@ -81,8 +81,9 @@ export enum HttpCode {
|
||||
}
|
||||
|
||||
export interface Options {
|
||||
WORKBENCH_WEB_CONGIGURATION: IWorkbenchConstructionOptions;
|
||||
WORKBENCH_WEB_CONFIGURATION: IWorkbenchConstructionOptions & { folderUri?: UriComponents, workspaceUri?: UriComponents };
|
||||
REMOTE_USER_DATA_URI: UriComponents | URI;
|
||||
PRODUCT_CONFIGURATION: Partial<IProductService>;
|
||||
NLS_CONFIGURATION: NLSConfiguration;
|
||||
}
|
||||
|
||||
@@ -101,6 +102,10 @@ export interface LoginPayload {
|
||||
password?: string;
|
||||
}
|
||||
|
||||
export interface AuthPayload {
|
||||
key?: string[];
|
||||
}
|
||||
|
||||
export class HttpError extends Error {
|
||||
public constructor(message: string, public readonly code: number) {
|
||||
super(message);
|
||||
@@ -111,12 +116,12 @@ export class HttpError extends Error {
|
||||
}
|
||||
|
||||
export interface ServerOptions {
|
||||
readonly auth?: AuthType;
|
||||
readonly auth: AuthType;
|
||||
readonly basePath?: string;
|
||||
readonly connectionToken?: string;
|
||||
readonly cert?: string;
|
||||
readonly certKey?: string;
|
||||
readonly folderUri?: string;
|
||||
readonly openUri?: string;
|
||||
readonly host?: string;
|
||||
readonly password?: string;
|
||||
readonly port?: number;
|
||||
@@ -125,7 +130,7 @@ export interface ServerOptions {
|
||||
|
||||
export abstract class Server {
|
||||
protected readonly server: http.Server | https.Server;
|
||||
protected rootPath = path.resolve(__dirname, "../../../..");
|
||||
protected rootPath = path.resolve(__dirname, "../../../../..");
|
||||
protected serverRoot = path.join(this.rootPath, "/out/vs/server/src");
|
||||
protected readonly allowedRequestPaths: string[] = [this.rootPath];
|
||||
private listenPromise: Promise<string> | undefined;
|
||||
@@ -134,9 +139,10 @@ export abstract class Server {
|
||||
|
||||
public constructor(options: ServerOptions) {
|
||||
this.options = {
|
||||
host: options.auth && options.cert ? "0.0.0.0" : "localhost",
|
||||
host: options.auth === "password" && options.cert ? "0.0.0.0" : "localhost",
|
||||
...options,
|
||||
basePath: options.basePath ? options.basePath.replace(/\/+$/, "") : "",
|
||||
password: options.password ? hash(options.password) : undefined,
|
||||
};
|
||||
this.protocol = this.options.cert ? "https" : "http";
|
||||
if (this.protocol === "https") {
|
||||
@@ -194,9 +200,14 @@ export abstract class Server {
|
||||
return { content: await util.promisify(fs.readFile)(filePath), filePath };
|
||||
}
|
||||
|
||||
protected async getAnyResource(...parts: string[]): Promise<Response> {
|
||||
const filePath = path.join(...parts);
|
||||
return { content: await util.promisify(fs.readFile)(filePath), filePath };
|
||||
}
|
||||
|
||||
protected async getTarredResource(...parts: string[]): Promise<Response> {
|
||||
const filePath = this.ensureAuthorizedFilePath(...parts);
|
||||
return { stream: tarFs.pack(filePath), filePath, mime: "application/tar" };
|
||||
return { stream: tarFs.pack(filePath), filePath, mime: "application/tar", cache: true };
|
||||
}
|
||||
|
||||
protected ensureAuthorizedFilePath(...parts: string[]): string {
|
||||
@@ -208,8 +219,8 @@ export abstract class Server {
|
||||
}
|
||||
|
||||
protected withBase(request: http.IncomingMessage, path: string): string {
|
||||
const split = request.url ? request.url.split("?", 2) : [];
|
||||
return `${this.protocol}://${request.headers.host}${this.options.basePath}${path}${split.length === 2 ? `?${split[1]}` : ""}`;
|
||||
const [, query] = request.url ? split(request.url, "?") : [];
|
||||
return `${this.protocol}://${request.headers.host}${this.options.basePath}${path}${query ? `?${query}` : ""}`;
|
||||
}
|
||||
|
||||
private isAllowedRequestPath(path: string): boolean {
|
||||
@@ -270,7 +281,7 @@ export abstract class Server {
|
||||
base = path.normalize(base);
|
||||
requestPath = path.normalize(requestPath || "/index.html");
|
||||
|
||||
if (base !== "/login" || !this.options.auth || requestPath !== "/index.html") {
|
||||
if (base !== "/login" || this.options.auth !== "password" || requestPath !== "/index.html") {
|
||||
this.ensureGet(request);
|
||||
}
|
||||
|
||||
@@ -279,15 +290,17 @@ export abstract class Server {
|
||||
// without adding query parameters which have their own issues.
|
||||
// REVIEW: Discuss whether this is the best option; this is sort of a quick
|
||||
// hack almost to get caching in the meantime but it does work pretty well.
|
||||
if (/static-.+/.test(base)) {
|
||||
if (/^\/static-/.test(base)) {
|
||||
base = "/static";
|
||||
}
|
||||
|
||||
switch (base) {
|
||||
case "/":
|
||||
switch (requestPath) {
|
||||
case "/favicon.ico":
|
||||
// NOTE: This must be served at the correct location based on the
|
||||
// start_url in the manifest.
|
||||
case "/manifest.json":
|
||||
case "/code-server.png":
|
||||
const response = await this.getResource(this.serverRoot, "media", requestPath);
|
||||
response.cache = true;
|
||||
return response;
|
||||
@@ -301,7 +314,7 @@ export abstract class Server {
|
||||
response.cache = true;
|
||||
return response;
|
||||
case "/login":
|
||||
if (!this.options.auth || requestPath !== "/index.html") {
|
||||
if (this.options.auth !== "password" || requestPath !== "/index.html") {
|
||||
throw new HttpError("Not found", HttpCode.NotFound);
|
||||
}
|
||||
return this.tryLogin(request);
|
||||
@@ -331,7 +344,7 @@ export abstract class Server {
|
||||
this.ensureGet(request);
|
||||
if (!this.authenticate(request)) {
|
||||
throw new HttpError("Unauthorized", HttpCode.Unauthorized);
|
||||
} else if (request.headers.upgrade !== "websocket") {
|
||||
} else if (!request.headers.upgrade || request.headers.upgrade.toLowerCase() !== "websocket") {
|
||||
throw new Error("HTTP/1.1 400 Bad Request");
|
||||
}
|
||||
|
||||
@@ -352,16 +365,25 @@ export abstract class Server {
|
||||
}
|
||||
|
||||
private async tryLogin(request: http.IncomingMessage): Promise<Response> {
|
||||
if (this.authenticate(request) && (request.method === "GET" || request.method === "POST")) {
|
||||
return { redirect: "/" };
|
||||
const redirect = (password: string | true) => {
|
||||
return {
|
||||
redirect: "/",
|
||||
headers: typeof password === "string"
|
||||
? { "Set-Cookie": `key=${password}; Path=${this.options.basePath || "/"}; HttpOnly; SameSite=strict` }
|
||||
: {},
|
||||
};
|
||||
};
|
||||
const providedPassword = this.authenticate(request);
|
||||
if (providedPassword && (request.method === "GET" || request.method === "POST")) {
|
||||
return redirect(providedPassword);
|
||||
}
|
||||
if (request.method === "POST") {
|
||||
const data = await this.getData<LoginPayload>(request);
|
||||
if (this.authenticate(request, data)) {
|
||||
return {
|
||||
redirect: "/",
|
||||
headers: {"Set-Cookie": `password=${data.password}` }
|
||||
};
|
||||
const password = this.authenticate(request, {
|
||||
key: typeof data.password === "string" ? [hash(data.password)] : undefined,
|
||||
});
|
||||
if (password) {
|
||||
return redirect(password);
|
||||
}
|
||||
console.error("Failed login attempt", JSON.stringify({
|
||||
xForwardedFor: request.headers["x-forwarded-for"],
|
||||
@@ -376,7 +398,7 @@ export abstract class Server {
|
||||
}
|
||||
|
||||
private async getLogin(error: string = "", payload?: LoginPayload): Promise<Response> {
|
||||
const filePath = path.join(this.serverRoot, "login/index.html");
|
||||
const filePath = path.join(this.serverRoot, "browser/login.html");
|
||||
const content = (await util.promisify(fs.readFile)(filePath, "utf8"))
|
||||
.replace("{{ERROR}}", error)
|
||||
.replace("display:none", error ? "display:block" : "display:none")
|
||||
@@ -421,35 +443,54 @@ export abstract class Server {
|
||||
: Promise.resolve({} as T);
|
||||
}
|
||||
|
||||
private authenticate(request: http.IncomingMessage, payload?: LoginPayload): boolean {
|
||||
if (!this.options.auth) {
|
||||
private authenticate(request: http.IncomingMessage, payload?: AuthPayload): string | boolean {
|
||||
if (this.options.auth === "none") {
|
||||
return true;
|
||||
}
|
||||
const safeCompare = localRequire<typeof import("safe-compare")>("safe-compare/index");
|
||||
if (typeof payload === "undefined") {
|
||||
payload = this.parseCookies<LoginPayload>(request);
|
||||
payload = this.parseCookies<AuthPayload>(request);
|
||||
}
|
||||
return !!this.options.password && safeCompare(payload.password || "", this.options.password);
|
||||
if (this.options.password && payload.key) {
|
||||
for (let i = 0; i < payload.key.length; ++i) {
|
||||
if (safeCompare(payload.key[i], this.options.password)) {
|
||||
return payload.key[i];
|
||||
}
|
||||
}
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
private parseCookies<T extends object>(request: http.IncomingMessage): T {
|
||||
const cookies: { [key: string]: string } = {};
|
||||
const cookies: { [key: string]: string[] } = {};
|
||||
if (request.headers.cookie) {
|
||||
request.headers.cookie.split(";").forEach((keyValue) => {
|
||||
const [key, value] = keyValue.split("=", 2);
|
||||
cookies[key.trim()] = decodeURI(value);
|
||||
const [key, value] = split(keyValue, "=");
|
||||
if (!cookies[key]) {
|
||||
cookies[key] = [];
|
||||
}
|
||||
cookies[key].push(decodeURI(value));
|
||||
});
|
||||
}
|
||||
return cookies as T;
|
||||
}
|
||||
}
|
||||
|
||||
interface StartPath {
|
||||
path?: string[] | string;
|
||||
workspace?: boolean;
|
||||
}
|
||||
|
||||
interface Settings {
|
||||
lastVisited?: StartPath;
|
||||
}
|
||||
|
||||
export class MainServer extends Server {
|
||||
public readonly _onDidClientConnect = new Emitter<ClientConnectionEvent>();
|
||||
public readonly onDidClientConnect = this._onDidClientConnect.event;
|
||||
private readonly ipc = new IPCServer(this.onDidClientConnect);
|
||||
private readonly ipc = new IPCServer<RemoteAgentConnectionContext>(this.onDidClientConnect);
|
||||
|
||||
private readonly maxOfflineConnections = 5;
|
||||
private readonly maxExtraOfflineConnections = 0;
|
||||
private readonly connections = new Map<ConnectionType, Map<string, Connection>>();
|
||||
|
||||
private readonly services = new ServiceCollection();
|
||||
@@ -460,6 +501,11 @@ export class MainServer extends Server {
|
||||
private _proxyServer?: Promise<net.Server>;
|
||||
private readonly proxyTimeout = 5000;
|
||||
|
||||
private settings: Settings = {};
|
||||
private heartbeatTimer?: NodeJS.Timeout;
|
||||
private heartbeatInterval = 60000;
|
||||
private lastHeartbeat = 0;
|
||||
|
||||
public constructor(options: ServerOptions, args: ParsedArgs) {
|
||||
super(options);
|
||||
this.servicesPromise = this.initializeServices(args);
|
||||
@@ -476,6 +522,7 @@ export class MainServer extends Server {
|
||||
}
|
||||
|
||||
protected async handleWebSocket(socket: net.Socket, parsedUrl: url.UrlWithParsedQuery): Promise<void> {
|
||||
this.heartbeat();
|
||||
if (!parsedUrl.query.reconnectionToken) {
|
||||
throw new Error("Reconnection token is missing from query parameters");
|
||||
}
|
||||
@@ -499,12 +546,13 @@ export class MainServer extends Server {
|
||||
parsedUrl: url.UrlWithParsedQuery,
|
||||
request: http.IncomingMessage,
|
||||
): Promise<Response> {
|
||||
this.heartbeat();
|
||||
switch (base) {
|
||||
case "/": return this.getRoot(request, parsedUrl);
|
||||
case "/resource":
|
||||
case "/vscode-remote-resource":
|
||||
if (typeof parsedUrl.query.path === "string") {
|
||||
return this.getResource(parsedUrl.query.path);
|
||||
return this.getAnyResource(parsedUrl.query.path);
|
||||
}
|
||||
break;
|
||||
case "/tar":
|
||||
@@ -513,8 +561,8 @@ export class MainServer extends Server {
|
||||
}
|
||||
break;
|
||||
case "/webview":
|
||||
if (requestPath.indexOf("/vscode-resource") === 0) {
|
||||
return this.getResource(requestPath.replace(/^\/vscode-resource/, ""));
|
||||
if (/^\/vscode-resource/.test(requestPath)) {
|
||||
return this.getAnyResource(requestPath.replace(/^\/vscode-resource(\/file)?/, ""));
|
||||
}
|
||||
return this.getResource(
|
||||
this.rootPath,
|
||||
@@ -526,38 +574,49 @@ export class MainServer extends Server {
|
||||
}
|
||||
|
||||
private async getRoot(request: http.IncomingMessage, parsedUrl: url.UrlWithParsedQuery): Promise<Response> {
|
||||
const filePath = path.join(this.rootPath, "out/vs/code/browser/workbench/workbench.html");
|
||||
let [content] = await Promise.all([
|
||||
const filePath = path.join(this.serverRoot, "browser/workbench.html");
|
||||
let [content, startPath] = await Promise.all([
|
||||
util.promisify(fs.readFile)(filePath, "utf8"),
|
||||
this.getFirstValidPath([
|
||||
{ path: parsedUrl.query.workspace, workspace: true },
|
||||
{ path: parsedUrl.query.folder, workspace: false },
|
||||
(await this.readSettings()).lastVisited,
|
||||
{ path: this.options.openUri }
|
||||
]),
|
||||
this.servicesPromise,
|
||||
]);
|
||||
|
||||
if (startPath) {
|
||||
this.writeSettings({
|
||||
lastVisited: {
|
||||
path: startPath.uri.fsPath,
|
||||
workspace: startPath.workspace
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
const logger = this.services.get(ILogService) as ILogService;
|
||||
logger.info("request.url", `"${request.url}"`);
|
||||
const environment = this.services.get(IEnvironmentService) as IEnvironmentService;
|
||||
const locale = environment.args.locale || await getLocaleFromConfig(environment.userDataPath);
|
||||
const cwd = process.env.VSCODE_CWD || process.cwd();
|
||||
const workspacePath = parsedUrl.query.workspace as string | undefined;
|
||||
const folderPath = !workspacePath ? parsedUrl.query.folder as string | undefined || this.options.folderUri : undefined;
|
||||
|
||||
const remoteAuthority = request.headers.host as string;
|
||||
const transformer = getUriTransformer(remoteAuthority);
|
||||
|
||||
const environment = this.services.get(IEnvironmentService) as IEnvironmentService;
|
||||
const options: Options = {
|
||||
WORKBENCH_WEB_CONGIGURATION: {
|
||||
workspaceUri: workspacePath
|
||||
? transformer.transformOutgoing(URI.file(sanitizeFilePath(workspacePath, cwd)))
|
||||
: undefined,
|
||||
folderUri: folderPath
|
||||
? transformer.transformOutgoing(URI.file(sanitizeFilePath(folderPath, cwd)))
|
||||
: undefined,
|
||||
WORKBENCH_WEB_CONFIGURATION: {
|
||||
workspaceUri: startPath && startPath.workspace ? transformer.transformOutgoing(startPath.uri) : undefined,
|
||||
folderUri: startPath && !startPath.workspace ? transformer.transformOutgoing(startPath.uri) : undefined,
|
||||
remoteAuthority,
|
||||
productConfiguration: product,
|
||||
logLevel: getLogLevel(environment),
|
||||
},
|
||||
REMOTE_USER_DATA_URI: transformer.transformOutgoing(
|
||||
(this.services.get(IEnvironmentService) as EnvironmentService).webUserDataHome,
|
||||
),
|
||||
NLS_CONFIGURATION: await getNlsConfiguration(locale, environment.userDataPath),
|
||||
REMOTE_USER_DATA_URI: transformer.transformOutgoing(URI.file(environment.userDataPath)),
|
||||
PRODUCT_CONFIGURATION: {
|
||||
extensionsGallery: product.extensionsGallery,
|
||||
},
|
||||
NLS_CONFIGURATION: await getNlsConfiguration(environment.args.locale || await getLocaleFromConfig(environment.userDataPath), environment.userDataPath),
|
||||
};
|
||||
|
||||
content = content.replace(/\/static\//g, `/static${product.commit ? `-${product.commit}` : ""}/`).replace("{{WEBVIEW_ENDPOINT}}", "");
|
||||
content = content.replace(/{{COMMIT}}/g, product.commit || "");
|
||||
for (const key in options) {
|
||||
content = content.replace(`"{{${key}}}"`, `'${JSON.stringify(options[key as keyof Options])}'`);
|
||||
}
|
||||
@@ -565,6 +624,35 @@ export class MainServer extends Server {
|
||||
return { content, filePath };
|
||||
}
|
||||
|
||||
/**
|
||||
* Choose the first valid path. If `workspace` is undefined then either a
|
||||
* workspace or a directory are acceptable. Otherwise it must be a file if a
|
||||
* workspace or a directory otherwise.
|
||||
*/
|
||||
private async getFirstValidPath(startPaths: Array<StartPath | undefined>): Promise<{ uri: URI, workspace?: boolean} | undefined> {
|
||||
const logger = this.services.get(ILogService) as ILogService;
|
||||
const cwd = process.env.VSCODE_CWD || process.cwd();
|
||||
for (let i = 0; i < startPaths.length; ++i) {
|
||||
const startPath = startPaths[i];
|
||||
if (!startPath) {
|
||||
continue;
|
||||
}
|
||||
const paths = typeof startPath.path === "string" ? [startPath.path] : (startPath.path || []);
|
||||
for (let j = 0; j < paths.length; ++j) {
|
||||
const uri = URI.file(sanitizeFilePath(paths[j], cwd));
|
||||
try {
|
||||
const stat = await util.promisify(fs.stat)(uri.fsPath);
|
||||
if (typeof startPath.workspace === "undefined" || startPath.workspace !== stat.isDirectory()) {
|
||||
return { uri, workspace: !stat.isDirectory() };
|
||||
}
|
||||
} catch (error) {
|
||||
logger.warn(error.message);
|
||||
}
|
||||
}
|
||||
}
|
||||
return undefined;
|
||||
}
|
||||
|
||||
private async connect(message: ConnectionTypeRequest, protocol: Protocol): Promise<void> {
|
||||
if (product.commit && message.commit !== product.commit) {
|
||||
throw new Error(`Version mismatch (${message.commit} instead of ${product.commit})`);
|
||||
@@ -601,36 +689,39 @@ export class MainServer extends Server {
|
||||
|
||||
let connection: Connection;
|
||||
if (message.desiredConnectionType === ConnectionType.Management) {
|
||||
connection = new ManagementConnection(protocol);
|
||||
connection = new ManagementConnection(protocol, token);
|
||||
this._onDidClientConnect.fire({
|
||||
protocol, onDidClientDisconnect: connection.onClose,
|
||||
});
|
||||
// TODO: Need a way to match clients with a connection. For now
|
||||
// dispose everything which only works because no extensions currently
|
||||
// utilize long-running proxies.
|
||||
(this.services.get(INodeProxyService) as NodeProxyService)._onUp.fire();
|
||||
connection.onClose(() => (this.services.get(INodeProxyService) as NodeProxyService)._onDown.fire());
|
||||
} else {
|
||||
const buffer = protocol.readEntireBuffer();
|
||||
connection = new ExtensionHostConnection(
|
||||
message.args ? message.args.language : "en",
|
||||
protocol, buffer,
|
||||
protocol, buffer, token,
|
||||
this.services.get(ILogService) as ILogService,
|
||||
this.services.get(IEnvironmentService) as IEnvironmentService,
|
||||
);
|
||||
}
|
||||
connections.set(token, connection);
|
||||
this.disposeOldOfflineConnections();
|
||||
connection.onClose(() => connections.delete(token));
|
||||
this.disposeOldOfflineConnections(connections);
|
||||
break;
|
||||
case ConnectionType.Tunnel: return protocol.tunnel();
|
||||
default: throw new Error("Unrecognized connection type");
|
||||
}
|
||||
}
|
||||
|
||||
private disposeOldOfflineConnections(): void {
|
||||
this.connections.forEach((connections) => {
|
||||
const offline = Array.from(connections.values())
|
||||
.filter((connection) => typeof connection.offline !== "undefined");
|
||||
for (let i = 0, max = offline.length - this.maxOfflineConnections; i < max; ++i) {
|
||||
offline[i].dispose();
|
||||
}
|
||||
});
|
||||
private disposeOldOfflineConnections(connections: Map<string, Connection>): void {
|
||||
const offline = Array.from(connections.values())
|
||||
.filter((connection) => typeof connection.offline !== "undefined");
|
||||
for (let i = 0, max = offline.length - this.maxExtraOfflineConnections; i < max; ++i) {
|
||||
offline[i].dispose();
|
||||
}
|
||||
}
|
||||
|
||||
private async initializeServices(args: ParsedArgs): Promise<void> {
|
||||
@@ -647,28 +738,26 @@ export class MainServer extends Server {
|
||||
...environmentService.extraBuiltinExtensionPaths,
|
||||
);
|
||||
|
||||
this.ipc.registerChannel("loglevel", new LogLevelSetterChannel(logService));
|
||||
this.ipc.registerChannel("logger", new LoggerChannel(logService));
|
||||
this.ipc.registerChannel(ExtensionHostDebugBroadcastChannel.ChannelName, new ExtensionHostDebugBroadcastChannel());
|
||||
|
||||
const router = new StaticRouter((ctx: any) => ctx.clientId === "renderer");
|
||||
this.services.set(ILogService, logService);
|
||||
this.services.set(IEnvironmentService, environmentService);
|
||||
this.services.set(IConfigurationService, new SyncDescriptor(ConfigurationService, [environmentService.machineSettingsResource]));
|
||||
this.services.set(IRequestService, new SyncDescriptor(RequestService));
|
||||
this.services.set(IFileService, fileService);
|
||||
this.services.set(IProductService, { _serviceBrand: undefined, ...product });
|
||||
this.services.set(IDialogService, new DialogChannelClient(this.ipc.getChannel("dialog", router)));
|
||||
this.services.set(IExtensionGalleryService, new SyncDescriptor(ExtensionGalleryService));
|
||||
this.services.set(IExtensionManagementService, new SyncDescriptor(ExtensionManagementService));
|
||||
|
||||
if (!environmentService.args["disable-telemetry"]) {
|
||||
this.services.set(ITelemetryService, new SyncDescriptor(TelemetryService, [{
|
||||
appender: combinedAppender(
|
||||
new AppInsightsAppender("code-server", null, () => new TelemetryClient(), logService),
|
||||
new AppInsightsAppender("code-server", null, () => new TelemetryClient() as any, logService),
|
||||
new LogAppender(logService),
|
||||
),
|
||||
commonProperties: resolveCommonProperties(
|
||||
product.commit, pkg.codeServerVersion, await getMachineId(),
|
||||
product.commit, product.codeServerVersion, await getMachineId(),
|
||||
[], environmentService.installSourcePath, "code-server",
|
||||
),
|
||||
piiPaths: this.allowedRequestPaths,
|
||||
@@ -679,28 +768,25 @@ export class MainServer extends Server {
|
||||
|
||||
await new Promise((resolve) => {
|
||||
const instantiationService = new InstantiationService(this.services);
|
||||
const localizationService = instantiationService.createInstance(LocalizationsService);
|
||||
this.services.set(ILocalizationsService, localizationService);
|
||||
this.ipc.registerChannel("localizations", new LocalizationsChannel(localizationService));
|
||||
this.services.set(ILocalizationsService, instantiationService.createInstance(LocalizationsService));
|
||||
this.services.set(INodeProxyService, instantiationService.createInstance(NodeProxyService));
|
||||
|
||||
instantiationService.invokeFunction(() => {
|
||||
instantiationService.createInstance(LogsDataCleaner);
|
||||
|
||||
const extensionsService = this.services.get(IExtensionManagementService) as IExtensionManagementService;
|
||||
const telemetryService = this.services.get(ITelemetryService) as ITelemetryService;
|
||||
|
||||
const extensionsChannel = new ExtensionManagementChannel(extensionsService, (context) => getUriTransformer(context.remoteAuthority));
|
||||
const extensionsEnvironmentChannel = new ExtensionEnvironmentChannel(environmentService, logService, telemetryService, this.options.connectionToken || "");
|
||||
const fileChannel = new FileProviderChannel(environmentService, logService);
|
||||
const requestChannel = new RequestChannel(this.services.get(IRequestService) as IRequestService);
|
||||
const telemetryChannel = new TelemetryChannel(telemetryService);
|
||||
const updateChannel = new UpdateChannel(instantiationService.createInstance(UpdateService));
|
||||
|
||||
this.ipc.registerChannel("extensions", extensionsChannel);
|
||||
this.ipc.registerChannel("remoteextensionsenvironment", extensionsEnvironmentChannel);
|
||||
this.ipc.registerChannel("request", requestChannel);
|
||||
this.ipc.registerChannel("telemetry", telemetryChannel);
|
||||
this.ipc.registerChannel("update", updateChannel);
|
||||
this.ipc.registerChannel(REMOTE_FILE_SYSTEM_CHANNEL_NAME, fileChannel);
|
||||
this.ipc.registerChannel("extensions", new ExtensionManagementChannel(
|
||||
this.services.get(IExtensionManagementService) as IExtensionManagementService,
|
||||
(context) => getUriTransformer(context.remoteAuthority),
|
||||
));
|
||||
this.ipc.registerChannel("remoteextensionsenvironment", new ExtensionEnvironmentChannel(
|
||||
environmentService, logService, telemetryService, this.options.connectionToken || "",
|
||||
));
|
||||
this.ipc.registerChannel("request", new RequestChannel(this.services.get(IRequestService) as IRequestService));
|
||||
this.ipc.registerChannel("telemetry", new TelemetryChannel(telemetryService));
|
||||
this.ipc.registerChannel("nodeProxy", new NodeProxyChannel(this.services.get(INodeProxyService) as INodeProxyService));
|
||||
this.ipc.registerChannel("localizations", <IServerChannel<any>>createChannelReceiver(this.services.get(ILocalizationsService) as ILocalizationsService));
|
||||
this.ipc.registerChannel("update", new UpdateChannel(instantiationService.createInstance(UpdateService)));
|
||||
this.ipc.registerChannel(REMOTE_FILE_SYSTEM_CHANNEL_NAME, new FileProviderChannel(environmentService, logService));
|
||||
resolve(new ErrorTelemetry(telemetryService));
|
||||
});
|
||||
});
|
||||
@@ -787,4 +873,85 @@ export class MainServer extends Server {
|
||||
}
|
||||
return path;
|
||||
}
|
||||
|
||||
/**
|
||||
* Return the file path for Coder settings.
|
||||
*/
|
||||
private get settingsPath(): string {
|
||||
const environment = this.services.get(IEnvironmentService) as IEnvironmentService;
|
||||
return path.join(environment.userDataPath, "coder.json");
|
||||
}
|
||||
|
||||
/**
|
||||
* Read settings from the file. On a failure return last known settings and
|
||||
* log a warning.
|
||||
*
|
||||
*/
|
||||
private async readSettings(): Promise<Settings> {
|
||||
try {
|
||||
const raw = (await util.promisify(fs.readFile)(this.settingsPath, "utf8")).trim();
|
||||
this.settings = raw ? JSON.parse(raw) : {};
|
||||
} catch (error) {
|
||||
if (error.code !== "ENOENT") {
|
||||
(this.services.get(ILogService) as ILogService).warn(error.message);
|
||||
}
|
||||
}
|
||||
return this.settings;
|
||||
}
|
||||
|
||||
/**
|
||||
* Write settings combined with current settings. On failure log a warning.
|
||||
*/
|
||||
private async writeSettings(newSettings: Partial<Settings>): Promise<void> {
|
||||
this.settings = { ...this.settings, ...newSettings };
|
||||
try {
|
||||
await util.promisify(fs.writeFile)(this.settingsPath, JSON.stringify(this.settings));
|
||||
} catch (error) {
|
||||
(this.services.get(ILogService) as ILogService).warn(error.message);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Return the file path for the heartbeat file.
|
||||
*/
|
||||
private get heartbeatPath(): string {
|
||||
const environment = this.services.get(IEnvironmentService) as IEnvironmentService;
|
||||
return path.join(environment.userDataPath, "heartbeat");
|
||||
}
|
||||
|
||||
/**
|
||||
* Return all online connections regardless of type.
|
||||
*/
|
||||
private get onlineConnections(): Connection[] {
|
||||
const online = <Connection[]>[];
|
||||
this.connections.forEach((connections) => {
|
||||
connections.forEach((connection) => {
|
||||
if (typeof connection.offline === "undefined") {
|
||||
online.push(connection);
|
||||
}
|
||||
});
|
||||
});
|
||||
return online;
|
||||
}
|
||||
|
||||
/**
|
||||
* Write to the heartbeat file if we haven't already done so within the
|
||||
* timeout and start or reset a timer that keeps running as long as there are
|
||||
* active connections. Failures are logged as warnings.
|
||||
*/
|
||||
private heartbeat(): void {
|
||||
const now = Date.now();
|
||||
if (now - this.lastHeartbeat >= this.heartbeatInterval) {
|
||||
util.promisify(fs.writeFile)(this.heartbeatPath, "").catch((error) => {
|
||||
(this.services.get(ILogService) as ILogService).warn(error.message);
|
||||
});
|
||||
this.lastHeartbeat = now;
|
||||
clearTimeout(this.heartbeatTimer!); // We can clear undefined so ! is fine.
|
||||
this.heartbeatTimer = setTimeout(() => {
|
||||
if (this.onlineConnections.length > 0) {
|
||||
this.heartbeat();
|
||||
}
|
||||
}, this.heartbeatInterval);
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,9 +1,7 @@
|
||||
import * as cp from "child_process";
|
||||
import * as os from "os";
|
||||
import * as path from "path";
|
||||
import { Stream } from "stream";
|
||||
import * as util from "util";
|
||||
import { toVSBufferReadableStream } from "vs/base/common/buffer";
|
||||
import { CancellationToken } from "vs/base/common/cancellation";
|
||||
import { URI } from "vs/base/common/uri";
|
||||
import * as pfs from "vs/base/node/pfs";
|
||||
@@ -11,14 +9,13 @@ import { IConfigurationService } from "vs/platform/configuration/common/configur
|
||||
import { IEnvironmentService } from "vs/platform/environment/common/environment";
|
||||
import { IFileService } from "vs/platform/files/common/files";
|
||||
import { ILogService } from "vs/platform/log/common/log";
|
||||
import pkg from "vs/platform/product/node/package";
|
||||
import product from "vs/platform/product/common/product";
|
||||
import { asJson, IRequestService } from "vs/platform/request/common/request";
|
||||
import { AvailableForDownload, State, StateType, UpdateType } from "vs/platform/update/common/update";
|
||||
import { AvailableForDownload, State, UpdateType, StateType } from "vs/platform/update/common/update";
|
||||
import { AbstractUpdateService } from "vs/platform/update/electron-main/abstractUpdateService";
|
||||
import { ipcMain } from "vs/server/src/ipc";
|
||||
import { extract } from "vs/server/src/marketplace";
|
||||
import { tmpdir } from "vs/server/src/util";
|
||||
import * as zlib from "zlib";
|
||||
import { ipcMain } from "vs/server/src/node/ipc";
|
||||
import { extract } from "vs/server/src/node/marketplace";
|
||||
import { tmpdir } from "vs/server/src/node/util";
|
||||
|
||||
interface IUpdate {
|
||||
name: string;
|
||||
@@ -37,27 +34,41 @@ export class UpdateService extends AbstractUpdateService {
|
||||
super(null, configurationService, environmentService, requestService, logService);
|
||||
}
|
||||
|
||||
public async isLatestVersion(): Promise<boolean | undefined> {
|
||||
const latest = await this.getLatestVersion();
|
||||
return !latest || latest.name === pkg.codeServerVersion;
|
||||
/**
|
||||
* Return true if the currently installed version is the latest.
|
||||
*/
|
||||
public async isLatestVersion(latest?: IUpdate | null): Promise<boolean | undefined> {
|
||||
if (!latest) {
|
||||
latest = await this.getLatestVersion();
|
||||
}
|
||||
if (latest) {
|
||||
const latestMajor = parseInt(latest.name);
|
||||
const currentMajor = parseInt(product.codeServerVersion);
|
||||
// If these are invalid versions we can't compare meaningfully.
|
||||
return isNaN(latestMajor) || isNaN(currentMajor) ||
|
||||
// This can happen when there is a pre-release for a new major version.
|
||||
currentMajor > latestMajor ||
|
||||
// Otherwise assume that if it's not the same then we're out of date.
|
||||
latest.name === product.codeServerVersion;
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
protected buildUpdateFeedUrl(): string {
|
||||
return "https://api.github.com/repos/cdr/code-server/releases/latest";
|
||||
protected buildUpdateFeedUrl(quality: string): string {
|
||||
return `${product.updateUrl}/${quality}`;
|
||||
}
|
||||
|
||||
protected doQuitAndInstall(): void {
|
||||
ipcMain.relaunch();
|
||||
public async doQuitAndInstall(): Promise<void> {
|
||||
if (this.state.type === StateType.Ready) {
|
||||
ipcMain.relaunch(this.state.update.version);
|
||||
}
|
||||
}
|
||||
|
||||
protected async doCheckForUpdates(context: any): Promise<void> {
|
||||
if (this.state.type !== StateType.Idle) {
|
||||
return Promise.resolve();
|
||||
}
|
||||
this.setState(State.CheckingForUpdates(context));
|
||||
try {
|
||||
const update = await this.getLatestVersion();
|
||||
if (!update || !update.name || update.name === pkg.codeServerVersion) {
|
||||
if (!update || await this.isLatestVersion(update)) {
|
||||
this.setState(State.Idle(UpdateType.Archive));
|
||||
} else {
|
||||
this.setState(State.AvailableForDownload({
|
||||
@@ -73,15 +84,13 @@ export class UpdateService extends AbstractUpdateService {
|
||||
private async getLatestVersion(): Promise<IUpdate | null> {
|
||||
const data = await this.requestService.request({
|
||||
url: this.url,
|
||||
headers: {
|
||||
"User-Agent": "code-server",
|
||||
},
|
||||
headers: { "User-Agent": "code-server" },
|
||||
}, CancellationToken.None);
|
||||
return asJson(data);
|
||||
}
|
||||
|
||||
protected async doDownloadUpdate(state: AvailableForDownload): Promise<void> {
|
||||
this.setState(State.Updating(state.update));
|
||||
this.setState(State.Downloading(state.update));
|
||||
const target = os.platform();
|
||||
const releaseName = await this.buildReleaseName(state.update.version);
|
||||
const url = "https://github.com/cdr/code-server/releases/download/"
|
||||
@@ -91,15 +100,7 @@ export class UpdateService extends AbstractUpdateService {
|
||||
const extractPath = path.join(tmpdir, state.update.version);
|
||||
try {
|
||||
await pfs.mkdirp(tmpdir);
|
||||
const context = await this.requestService.request({ url }, CancellationToken.None);
|
||||
// Decompress the gzip as we download. If the gzip encoding is set then
|
||||
// the request service already does this.
|
||||
// HACK: This uses knowledge of the internals of the request service.
|
||||
if (target !== "darwin" && context.res.headers["content-encoding"] !== "gzip") {
|
||||
const stream = (context.res as any as Stream);
|
||||
stream.removeAllListeners();
|
||||
context.stream = toVSBufferReadableStream(stream.pipe(zlib.createGunzip()));
|
||||
}
|
||||
const context = await this.requestService.request({ url }, CancellationToken.None, true);
|
||||
await this.fileService.writeFile(URI.file(downloadPath), context.stream);
|
||||
await extract(downloadPath, extractPath, undefined, CancellationToken.None);
|
||||
const newBinary = path.join(extractPath, releaseName, "code-server");
|
||||
@@ -117,8 +118,7 @@ export class UpdateService extends AbstractUpdateService {
|
||||
|
||||
private onRequestError(error: Error, showNotification?: boolean): void {
|
||||
this.logService.error(error);
|
||||
const message: string | undefined = showNotification ? (error.message || error.toString()) : undefined;
|
||||
this.setState(State.Idle(UpdateType.Archive, message));
|
||||
this.setState(State.Idle(UpdateType.Archive, showNotification ? (error.message || error.toString()) : undefined));
|
||||
}
|
||||
|
||||
private async buildReleaseName(release: string): Promise<string> {
|
||||
@@ -128,7 +128,7 @@ export class UpdateService extends AbstractUpdateService {
|
||||
stderr: error.message,
|
||||
stdout: "",
|
||||
}));
|
||||
if (result.stderr.indexOf("musl") !== -1 || result.stdout.indexOf("musl") !== -1) {
|
||||
if (/musl/.test(result.stderr) || /musl/.test(result.stdout)) {
|
||||
target = "alpine";
|
||||
}
|
||||
}
|
||||
@@ -4,22 +4,19 @@ module.exports = (remoteAuthority) => {
|
||||
return {
|
||||
transformIncoming: (uri) => {
|
||||
switch (uri.scheme) {
|
||||
case "code-server": return { scheme: "file", path: uri.path };
|
||||
case "file": return { scheme: "code-server-local", path: uri.path };
|
||||
case "vscode-remote": return { scheme: "file", path: uri.path };
|
||||
default: return uri;
|
||||
}
|
||||
},
|
||||
transformOutgoing: (uri) => {
|
||||
switch (uri.scheme) {
|
||||
case "code-server-local": return { scheme: "file", path: uri.path };
|
||||
case "file": return { scheme: "code-server", authority: remoteAuthority, path: uri.path };
|
||||
case "file": return { scheme: "vscode-remote", authority: remoteAuthority, path: uri.path };
|
||||
default: return uri;
|
||||
}
|
||||
},
|
||||
transformOutgoingScheme: (scheme) => {
|
||||
switch (scheme) {
|
||||
case "code-server-local": return "file";
|
||||
case "file": return "code-server";
|
||||
case "file": return "vscode-remote";
|
||||
default: return scheme;
|
||||
}
|
||||
},
|
||||
@@ -14,6 +14,7 @@ import { mkdirp } from "vs/base/node/pfs";
|
||||
|
||||
export enum AuthType {
|
||||
Password = "password",
|
||||
None = "none",
|
||||
}
|
||||
|
||||
export enum FormatType {
|
||||
@@ -53,7 +54,7 @@ export const generateCertificate = async (): Promise<{ cert: string, certKey: st
|
||||
return paths;
|
||||
};
|
||||
|
||||
export const uriTransformerPath = getPathFromAmdModule(require, "vs/server/src/uriTransformer");
|
||||
export const uriTransformerPath = getPathFromAmdModule(require, "vs/server/src/node/uriTransformer");
|
||||
export const getUriTransformer = (remoteAuthority: string): URITransformer => {
|
||||
const rawURITransformerFactory = <any>require.__$__nodeRequire(uriTransformerPath);
|
||||
const rawURITransformer = <IRawURITransformer>rawURITransformerFactory(remoteAuthority);
|
||||
@@ -66,6 +67,10 @@ export const generatePassword = async (length: number = 24): Promise<string> =>
|
||||
return buffer.toString("hex").substring(0, length);
|
||||
};
|
||||
|
||||
export const hash = (str: string): string => {
|
||||
return crypto.createHash("sha256").update(str).digest("hex");
|
||||
};
|
||||
|
||||
export const getMediaMime = (filePath?: string): string => {
|
||||
return filePath && (vsGetMediaMime(filePath) || (<{[index: string]: string}>{
|
||||
".css": "text/css",
|
||||
@@ -127,7 +132,7 @@ export const enumToArray = (t: any): string[] => {
|
||||
|
||||
export const buildAllowedMessage = (t: any): string => {
|
||||
const values = enumToArray(t);
|
||||
return `Allowed value${values.length === 1 ? " is" : "s are"} ${values.map((t) => `'${t}'`).join(",")}`;
|
||||
return `Allowed value${values.length === 1 ? " is" : "s are"} ${values.map((t) => `'${t}'`).join(", ")}`;
|
||||
};
|
||||
|
||||
/**
|
||||
@@ -135,5 +140,5 @@ export const buildAllowedMessage = (t: any): string => {
|
||||
* at the root for Node modules.
|
||||
*/
|
||||
export const localRequire = <T>(modulePath: string): T => {
|
||||
return require.__$__nodeRequire(path.resolve(__dirname, "../node_modules", modulePath));
|
||||
return require.__$__nodeRequire(path.resolve(__dirname, "../../node_modules", modulePath));
|
||||
};
|
||||
372
src/upload.ts
372
src/upload.ts
@@ -1,372 +0,0 @@
|
||||
import { DesktopDragAndDropData } from "vs/base/browser/ui/list/listView";
|
||||
import { VSBuffer, VSBufferReadableStream } from "vs/base/common/buffer";
|
||||
import { Disposable } from "vs/base/common/lifecycle";
|
||||
import * as path from "vs/base/common/path";
|
||||
import { URI } from "vs/base/common/uri";
|
||||
import { generateUuid } from "vs/base/common/uuid";
|
||||
import { IFileService } from "vs/platform/files/common/files";
|
||||
import { createDecorator, IInstantiationService } from 'vs/platform/instantiation/common/instantiation';
|
||||
import { INotificationService, Severity } from "vs/platform/notification/common/notification";
|
||||
import { IProgress, IProgressService, IProgressStep, ProgressLocation } from "vs/platform/progress/common/progress";
|
||||
import { IWindowsService } from "vs/platform/windows/common/windows";
|
||||
import { IWorkspaceContextService } from "vs/platform/workspace/common/workspace";
|
||||
import { ExplorerItem } from "vs/workbench/contrib/files/common/explorerModel";
|
||||
import { IEditorGroup } from "vs/workbench/services/editor/common/editorGroupsService";
|
||||
import { IEditorService } from "vs/workbench/services/editor/common/editorService";
|
||||
|
||||
export const IUploadService = createDecorator<IUploadService>("uploadService");
|
||||
|
||||
export interface IUploadService {
|
||||
_serviceBrand: undefined;
|
||||
handleDrop(event: DragEvent, resolveTargetGroup: () => IEditorGroup | undefined, afterDrop: (targetGroup: IEditorGroup | undefined) => void, targetIndex?: number): Promise<void>;
|
||||
handleExternalDrop(data: DesktopDragAndDropData, target: ExplorerItem, originalEvent: DragEvent): Promise<void>;
|
||||
}
|
||||
|
||||
export class UploadService extends Disposable implements IUploadService {
|
||||
public _serviceBrand: undefined;
|
||||
public upload: Upload;
|
||||
|
||||
public constructor(
|
||||
@IInstantiationService instantiationService: IInstantiationService,
|
||||
@IWorkspaceContextService private readonly contextService: IWorkspaceContextService,
|
||||
@IWindowsService private readonly windowsService: IWindowsService,
|
||||
@IEditorService private readonly editorService: IEditorService,
|
||||
) {
|
||||
super();
|
||||
this.upload = instantiationService.createInstance(Upload);
|
||||
}
|
||||
|
||||
public async handleDrop(event: DragEvent, resolveTargetGroup: () => IEditorGroup | undefined, afterDrop: (targetGroup: IEditorGroup | undefined) => void, targetIndex?: number): Promise<void> {
|
||||
// TODO: should use the workspace for the editor it was dropped on?
|
||||
const target =this.contextService.getWorkspace().folders[0].uri;
|
||||
const uris = (await this.upload.uploadDropped(event, target)).map((u) => URI.file(u));
|
||||
if (uris.length > 0) {
|
||||
await this.windowsService.addRecentlyOpened(uris.map((u) => ({ fileUri: u })));
|
||||
}
|
||||
const editors = uris.map((uri) => ({
|
||||
resource: uri,
|
||||
options: {
|
||||
pinned: true,
|
||||
index: targetIndex,
|
||||
},
|
||||
}));
|
||||
const targetGroup = resolveTargetGroup();
|
||||
this.editorService.openEditors(editors, targetGroup);
|
||||
afterDrop(targetGroup);
|
||||
}
|
||||
|
||||
public async handleExternalDrop(_data: DesktopDragAndDropData, target: ExplorerItem, originalEvent: DragEvent): Promise<void> {
|
||||
await this.upload.uploadDropped(originalEvent, target.resource);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* There doesn't seem to be a provided type for entries, so here is an
|
||||
* incomplete version.
|
||||
*/
|
||||
interface IEntry {
|
||||
name: string;
|
||||
isFile: boolean;
|
||||
file: (cb: (file: File) => void) => void;
|
||||
createReader: () => ({
|
||||
readEntries: (cb: (entries: Array<IEntry>) => void) => void;
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Handles file uploads.
|
||||
*/
|
||||
class Upload {
|
||||
private readonly maxParallelUploads = 100;
|
||||
private readonly uploadingFiles = new Map<string, Reader | undefined>();
|
||||
private readonly fileQueue = new Map<string, File>();
|
||||
private progress: IProgress<IProgressStep> | undefined;
|
||||
private uploadPromise: Promise<string[]> | undefined;
|
||||
private resolveUploadPromise: (() => void) | undefined;
|
||||
private uploadedFilePaths = <string[]>[];
|
||||
private _total = 0;
|
||||
private _uploaded = 0;
|
||||
private lastPercent = 0;
|
||||
|
||||
public constructor(
|
||||
@INotificationService private notificationService: INotificationService,
|
||||
@IProgressService private progressService: IProgressService,
|
||||
@IFileService private fileService: IFileService,
|
||||
) {}
|
||||
|
||||
/**
|
||||
* Upload dropped files. This will try to upload everything it can. Errors
|
||||
* will show via notifications. If an upload operation is ongoing, the files
|
||||
* will be added to that operation.
|
||||
*/
|
||||
public async uploadDropped(event: DragEvent, uploadDir: URI): Promise<string[]> {
|
||||
await this.queueFiles(event, uploadDir);
|
||||
if (!this.uploadPromise) {
|
||||
this.uploadPromise = this.progressService.withProgress({
|
||||
cancellable: true,
|
||||
location: ProgressLocation.Notification,
|
||||
title: "Uploading files...",
|
||||
}, (progress) => {
|
||||
return new Promise((resolve): void => {
|
||||
this.progress = progress;
|
||||
this.resolveUploadPromise = (): void => {
|
||||
const uploaded = this.uploadedFilePaths;
|
||||
this.uploadPromise = undefined;
|
||||
this.resolveUploadPromise = undefined;
|
||||
this.uploadedFilePaths = [];
|
||||
this.lastPercent = 0;
|
||||
this._uploaded = 0;
|
||||
this._total = 0;
|
||||
resolve(uploaded);
|
||||
};
|
||||
});
|
||||
}, () => this.cancel());
|
||||
}
|
||||
this.uploadFiles();
|
||||
return this.uploadPromise;
|
||||
}
|
||||
|
||||
/**
|
||||
* Cancel all file uploads.
|
||||
*/
|
||||
public async cancel(): Promise<void> {
|
||||
this.fileQueue.clear();
|
||||
this.uploadingFiles.forEach((r) => r && r.abort());
|
||||
}
|
||||
|
||||
private get total(): number { return this._total; }
|
||||
private set total(total: number) {
|
||||
this._total = total;
|
||||
this.updateProgress();
|
||||
}
|
||||
|
||||
private get uploaded(): number { return this._uploaded; }
|
||||
private set uploaded(uploaded: number) {
|
||||
this._uploaded = uploaded;
|
||||
this.updateProgress();
|
||||
}
|
||||
|
||||
private updateProgress(): void {
|
||||
if (this.progress && this.total > 0) {
|
||||
const percent = Math.floor((this.uploaded / this.total) * 100);
|
||||
this.progress.report({ increment: percent - this.lastPercent });
|
||||
this.lastPercent = percent;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Upload as many files as possible. When finished, resolve the upload
|
||||
* promise.
|
||||
*/
|
||||
private uploadFiles(): void {
|
||||
while (this.fileQueue.size > 0 && this.uploadingFiles.size < this.maxParallelUploads) {
|
||||
const [path, file] = this.fileQueue.entries().next().value;
|
||||
this.fileQueue.delete(path);
|
||||
if (this.uploadingFiles.has(path)) {
|
||||
this.notificationService.error(new Error(`Already uploading ${path}`));
|
||||
} else {
|
||||
this.uploadingFiles.set(path, undefined);
|
||||
this.uploadFile(path, file).catch((error) => {
|
||||
this.notificationService.error(error);
|
||||
}).finally(() => {
|
||||
this.uploadingFiles.delete(path);
|
||||
this.uploadFiles();
|
||||
});
|
||||
}
|
||||
}
|
||||
if (this.fileQueue.size === 0 && this.uploadingFiles.size === 0) {
|
||||
this.resolveUploadPromise!();
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Upload a file, asking to override if necessary.
|
||||
*/
|
||||
private async uploadFile(filePath: string, file: File): Promise<void> {
|
||||
const uri = URI.file(filePath);
|
||||
if (await this.fileService.exists(uri)) {
|
||||
const overwrite = await new Promise<boolean>((resolve): void => {
|
||||
this.notificationService.prompt(
|
||||
Severity.Error,
|
||||
`${filePath} already exists. Overwrite?`,
|
||||
[
|
||||
{ label: "Yes", run: (): void => resolve(true) },
|
||||
{ label: "No", run: (): void => resolve(false) },
|
||||
],
|
||||
{ onCancel: () => resolve(false) },
|
||||
);
|
||||
});
|
||||
if (!overwrite) {
|
||||
return;
|
||||
}
|
||||
}
|
||||
const tempUri = uri.with({
|
||||
path: path.join(
|
||||
path.dirname(uri.path),
|
||||
`.code-server-partial-upload-${path.basename(uri.path)}-${generateUuid()}`,
|
||||
),
|
||||
});
|
||||
const reader = new Reader(file);
|
||||
reader.on("data", (data) => {
|
||||
if (data && data.byteLength > 0) {
|
||||
this.uploaded += data.byteLength;
|
||||
}
|
||||
});
|
||||
this.uploadingFiles.set(filePath, reader);
|
||||
await this.fileService.writeFile(tempUri, reader);
|
||||
if (reader.aborted) {
|
||||
this.uploaded += (file.size - reader.offset);
|
||||
await this.fileService.del(tempUri);
|
||||
} else {
|
||||
await this.fileService.move(tempUri, uri, true);
|
||||
this.uploadedFilePaths.push(filePath);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Queue files from a drop event. We have to get the files first; we can't do
|
||||
* it in tandem with uploading or the entries will disappear.
|
||||
*/
|
||||
private async queueFiles(event: DragEvent, uploadDir: URI): Promise<void> {
|
||||
const promises: Array<Promise<void>> = [];
|
||||
for (let i = 0; event.dataTransfer && event.dataTransfer.items && i < event.dataTransfer.items.length; ++i) {
|
||||
const item = event.dataTransfer.items[i];
|
||||
if (typeof item.webkitGetAsEntry === "function") {
|
||||
promises.push(this.traverseItem(item.webkitGetAsEntry(), uploadDir.fsPath));
|
||||
} else {
|
||||
const file = item.getAsFile();
|
||||
if (file) {
|
||||
this.addFile(uploadDir.fsPath + "/" + file.name, file);
|
||||
}
|
||||
}
|
||||
}
|
||||
await Promise.all(promises);
|
||||
}
|
||||
|
||||
/**
|
||||
* Traverses an entry and add files to the queue.
|
||||
*/
|
||||
private async traverseItem(entry: IEntry, path: string): Promise<void> {
|
||||
if (entry.isFile) {
|
||||
return new Promise<void>((resolve): void => {
|
||||
entry.file((file) => {
|
||||
resolve(this.addFile(path + "/" + file.name, file));
|
||||
});
|
||||
});
|
||||
}
|
||||
path += "/" + entry.name;
|
||||
await new Promise((resolve): void => {
|
||||
const promises: Array<Promise<void>> = [];
|
||||
const dirReader = entry.createReader();
|
||||
// According to the spec, readEntries() must be called until it calls
|
||||
// the callback with an empty array.
|
||||
const readEntries = (): void => {
|
||||
dirReader.readEntries((entries) => {
|
||||
if (entries.length === 0) {
|
||||
Promise.all(promises).then(resolve).catch((error) => {
|
||||
this.notificationService.error(error);
|
||||
resolve();
|
||||
});
|
||||
} else {
|
||||
promises.push(...entries.map((c) => this.traverseItem(c, path)));
|
||||
readEntries();
|
||||
}
|
||||
});
|
||||
};
|
||||
readEntries();
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Add a file to the queue.
|
||||
*/
|
||||
private addFile(path: string, file: File): void {
|
||||
this.total += file.size;
|
||||
this.fileQueue.set(path, file);
|
||||
}
|
||||
}
|
||||
|
||||
class Reader implements VSBufferReadableStream {
|
||||
private _offset = 0;
|
||||
private readonly size = 32000; // ~32kb max while reading in the file.
|
||||
private _aborted = false;
|
||||
private readonly reader = new FileReader();
|
||||
private paused = true;
|
||||
private buffer?: VSBuffer;
|
||||
private callbacks = new Map<string, Array<(...args: any[]) => void>>();
|
||||
|
||||
public constructor(private readonly file: File) {
|
||||
this.reader.addEventListener("load", this.onLoad);
|
||||
}
|
||||
|
||||
public get offset(): number { return this._offset; }
|
||||
public get aborted(): boolean { return this._aborted; }
|
||||
|
||||
public on(event: "data" | "error" | "end", callback: (...args:any[]) => void): void {
|
||||
if (!this.callbacks.has(event)) {
|
||||
this.callbacks.set(event, []);
|
||||
}
|
||||
this.callbacks.get(event)!.push(callback);
|
||||
if (this.aborted) {
|
||||
return this.emit("error", new Error("stream has been aborted"));
|
||||
} else if (this.done) {
|
||||
return this.emit("error", new Error("stream has ended"));
|
||||
} else if (event === "end") { // Once this is being listened to we can safely start outputting data.
|
||||
this.resume();
|
||||
}
|
||||
}
|
||||
|
||||
public abort = (): void => {
|
||||
this._aborted = true;
|
||||
this.reader.abort();
|
||||
this.reader.removeEventListener("load", this.onLoad);
|
||||
this.emit("end");
|
||||
}
|
||||
|
||||
public pause(): void {
|
||||
this.paused = true;
|
||||
}
|
||||
|
||||
public resume(): void {
|
||||
if (this.paused) {
|
||||
this.paused = false;
|
||||
this.readNextChunk();
|
||||
}
|
||||
}
|
||||
|
||||
public destroy(): void {
|
||||
this.abort();
|
||||
}
|
||||
|
||||
private onLoad = (): void => {
|
||||
this.buffer = VSBuffer.wrap(new Uint8Array(this.reader.result as ArrayBuffer));
|
||||
if (!this.paused) {
|
||||
this.readNextChunk();
|
||||
}
|
||||
}
|
||||
|
||||
private readNextChunk(): void {
|
||||
if (this.buffer) {
|
||||
this._offset += this.buffer.byteLength;
|
||||
this.emit("data", this.buffer);
|
||||
this.buffer = undefined;
|
||||
}
|
||||
if (!this.paused) { // Could be paused during the data event.
|
||||
if (this.done) {
|
||||
this.emit("end");
|
||||
} else {
|
||||
this.reader.readAsArrayBuffer(this.file.slice(this.offset, this.offset + this.size));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private emit(event: "data" | "error" | "end", ...args: any[]): void {
|
||||
if (this.callbacks.has(event)) {
|
||||
this.callbacks.get(event)!.forEach((cb) => cb(...args));
|
||||
}
|
||||
}
|
||||
|
||||
private get done(): boolean {
|
||||
return this.offset >= this.file.size;
|
||||
}
|
||||
}
|
||||
Reference in New Issue
Block a user