This commit is contained in:
2019-06-07 23:04:57 +08:00
commit 36b92f19e0
67 changed files with 18266 additions and 0 deletions

99
src/app.js Normal file
View File

@ -0,0 +1,99 @@
import hapi from 'hapi'
import swagger from './lib/swagger'
import {
SERVER,
SYSTEM
} from './config'
import routes from './routes'
import db from './lib/db'
import moment from 'moment-timezone'
import SocketIO from 'socket.io'
import websocket from './websocket'
import DockerOde from 'dockerode'
import fs from 'fs'
;(async () => {
const socketPath = SYSTEM.DOCKER_SOCKET
const stats = fs.statSync(socketPath)
if (!stats.isSocket()) {
console.log('Docker can\'t connect.')
}
const docker = new DockerOde({
socketPath: socketPath
})
const checkDockerEnv = async () => {
try {
const r = await docker.info()
if (r.Architecture !== 'x86_64') {
console.log('Require x86-64 system.')
}
} catch (err) {
console.log('Please make sure Docker is working.')
}
try {
const r = await docker.version()
if (r.Version.split('.')[0] < 18) {
console.log('Require Docker 18+ .')
}
} catch (err) {
console.log('Error: ' + err.toString())
}
console.log('Docker Runding...')
}
checkDockerEnv()
const server = hapi.server(SERVER)
await server.register([
...swagger
])
try {
server.bind({
docker,
$db: db,
$moment: moment,
/**
* send success data
*/
success (data, status = 1, msg) {
return {
status,
msg,
result: data
}
},
/**
* send fail data
*/
fail (data, status = 10000, msg) {
return {
status,
msg,
result: data
}
}
})
server.route(routes)
await server.start()
const io = SocketIO.listen(server.listener)
server.bind({
io
})
websocket(io, docker)
console.log('Server running at:', server.info.uri)
} catch (err) {
console.log(err)
process.exit(1)
}
})()

78
src/config.js Normal file
View File

@ -0,0 +1,78 @@
const isDev = process.env.NODE_ENV ? process.env.NODE_ENV === 'development' : false
export const SYSTEM = {
NAME: process.env.NAME || 'APP',
TOKEN: process.env.TOKEN || '1jH27dJf9s852',
SCHEME: [isDev ? 'http' : (process.env.SCHEME || 'http')],
DOMAIN: process.env.DOMAIN || 'www.example.com',
REPOPATH: process.env.REPOPATH || 'git@github.com:yi-ge/electron-distribution.git',
WORKPATH: process.env.WORKPATH || '/data',
BUILD_TYPE: process.env.BUILD_TYPE ? process.env.BUILD_TYPE.split(',') : ['win', 'linux'], // ['linux', 'win', 'mac']
GH_TOKEN: process.env.GH_TOKEN || '', // If you set publish option
CSC_LINK: process.env.CSC_LINK || '', // https://www.electron.build/code-signing
CSC_KEY_PASSWORD: process.env.CSC_KEY_PASSWORD || '',
CSC_NAME: process.env.CSC_NAME || '',
CSC_IDENTITY_AUTO_DISCOVERY: 'false',
DOCKER_SOCKET: process.env.DOCKER_SOCKET || '/var/run/docker.sock',
MAC_SERVER_HOST: process.env.MAC_SERVER_HOST || '127.0.0.1',
MAC_SERVER_PORT: process.env.MAC_SERVER_PORT || '22',
MAC_SERVER_USERNAME: process.env.MAC_SERVER_USERNAME || 'guest',
LINUX_SERVER_HOST: process.env.LINUX_SERVER_HOST || '127.0.0.1',
LINUX_SERVER_PORT: process.env.LINUX_SERVER_PORT || '22',
LINUX_SERVER_USERNAME: process.env.LINUX_SERVER_USERNAME || 'root',
OBJECT_STORAGE_TYPE: process.env.OBJECT_STORAGE_TYPE || 'cos'
}
export const SERVER = {
port: isDev ? '65533' : (process.env.PORT || '80'),
host: isDev ? '0.0.0.0' : (process.env.HOST || '0.0.0.0'),
routes: {
cors: {
origin: ['*'],
additionalHeaders: ['Expect', 'X-GitHub-Delivery', 'X-GitHub-Event', 'X-Hub-Signature']
},
state: {
parse: false, // parse and store in request.state
failAction: 'ignore' // may also be 'ignore' or 'log'
}
}
}
// 腾讯云
export const qcloudAccessKey = {
SecretId: process.env.COS_SECRE_ID || '',
SecretKey: process.env.COS_SECRE_KEY || ''
}
export const COS = {
bucket: process.env.COS_BUCKET || 'bucketname-12345678',
region: process.env.COS_REGION || 'ap-chengdu',
url: process.env.COS_URL || 'https://cdn.xxx.com'
}
// 阿里云OSS
const AliyunAccessKey = {
accessKeyId: process.env.OSS_ACCESS_KEY_ID || 'id',
accessKeySecret: process.env.OSS_ACCESS_SECRET || 'key'
}
export const OSS = {
config: {
region: process.env.OSS_REGION || 'oss-cn-qingdao',
accessKeyId: AliyunAccessKey.accessKeyId,
accessKeySecret: AliyunAccessKey.accessKeySecret,
bucket: process.env.OSS_BUCKET || 'bucket',
internal: process.env.OSS_INTERNAL === 'true',
secure: true,
timeout: 1200000 // 20min
},
url: process.env.OSS_URL || 'https://cdn.xxx.com'
}
export const QINIU = {
accessKey: process.env.QINIU_ACCESS_KEY || '',
secretKey: process.env.QINIU_SECRET_KEY || '',
bucket: process.env.QINIU_BUCKET_KEY || '',
url: process.env.QINIU_URL || 'https://cdn.xxx.com',
zone: process.env.QINIU_ZONE || 'Zone_z0'
}

2
src/dev.js Normal file
View File

@ -0,0 +1,2 @@
require('@babel/register')
require('./app')

135
src/lib/ali-oss.js Normal file
View File

@ -0,0 +1,135 @@
import fs from 'fs'
import { OSS as OSSConfig } from '../config'
import OSS from 'ali-oss'
// 阿里云OSS接口返回Promise使用.then()获取结果,.catch()抓取错误。
// web访问地址OSSConfig.url + object_key
const client = OSSConfig.config.accessKeyId !== 'id' ? new OSS(OSSConfig.config) : { list () {} }
/***
* 查看所有文件
* 通过list来列出当前Bucket下的所有文件。主要的参数如下
prefix 指定只列出符合特定前缀的文件
marker 指定只列出文件名大于marker之后的文件
delimiter 用于获取文件的公共前缀
max-keys 用于指定最多返回的文件个数
* @type {*}
*/
// 不带任何参数默认最多返回1000个文件
export const listFiles = client.list()
/***
* 上传本地文件
* @param object_key 文件名
* @param local_file 本地文件路径
*
* join
*该方法将多个参数值字符串结合成一个路径字符串,使用方式如下:
*path.([path1], [path2], [...])
*在该方法中,可以使用一个或多个字符串值参数,该参数返回将这些字符串值参数结合而成的路径。
*var joinPath = path.join(__dirname, 'a', 'b', 'c');
*console.log(joinPath); // D:\nodePro\fileTest\a\b\c
*__dirname变量值代表程序运行的根目录。
*
* 用法示例:
import path from 'path';
return upload_local_file("test.js", path.join(__dirname, "address.js")).then(function (result) {
console.log(result);
}).catch(function (err) {
console.log(err);
});
return list_files.then(function (result) {
console.log(result.objects);
}).catch(function (err) {
console.error(err);
});
*/
export const uploadLocalFile = (objectKey, localFile) => {
return client.put(objectKey, localFile)
}
/***
* 流式上传
* 通过putStream接口来上传一个Stream中的内容stream参数可以是任何实现了Readable Stream的对象包含文件流网络流等。当使用putStream接口时SDK默认会发起一个chunked encoding的HTTP PUT请求。如果在options指定了contentLength参数则不会使用chunked encoding。
* @param objectKey
* @param localFile
* @param chunked 是否使用chunked encoding 默认不使用
* @returns {Object}
*/
export const uploadStream = (objectKey, localFile, chunked = false) => {
// // use 'chunked encoding'
const stream = fs.createReadStream(localFile)
if (chunked) {
return client.putStream(objectKey, stream)
} else {
const size = fs.statSync(localFile).size // don't use 'chunked encoding'
return client.putStream(objectKey, stream, {
contentLength: size
})
}
}
/***
* 上传Buffer内容
* @param objectKey
* @param buffer Buffer对象例如new Buffer('hello world')
*/
export const uploadBuffer = (objectKey, buffer) => {
return client.put(objectKey, buffer)
}
/***
* 分片上传
* 在需要上传的文件较大时可以通过multipartUpload接口进行分片上传。分片上传的好处是将一个大请求分成多个小请求来执行这样当其中一些请求失败后不需要重新上传整个文件而只需要上传失败的分片就可以了。一般对于大于100MB的文件建议采用分片上传的方法。
* @param objectKey
* @param localFile
*/
export const uploadMultipart = (objectKey, localFile) => {
return client.multipartUpload(objectKey, localFile, {
progress: function * (p) {
console.log('Progress: ' + p)
}
})
// 上面的progress参数是一个进度回调函数用于获取上传进度。progress可以是一个generator function(function*)也可以是一个”thunk”
// const progress = function (p) {
// return function (done) {
// console.log(p);
// done();
// };
// };
}
/***
* 断点上传(需要循环调用)
* 分片上传提供progress参数允许用户传递一个进度回调在回调中SDK将当前已经上传成功的比例和断点信息作为参数。为了实现断点上传可以在上传过程中保存断点信息checkpoint发生错误后再将已保存的checkpoint作为参数传递给multipartUpload此时将从上次失败的地方继续上传。
* @param objectKey
* @param localFile
*/
export const uploadMultiparts = (objectKey, localFile) => {
let checkpoint
return client.multipartUpload(objectKey, localFile, {
checkpoint: checkpoint,
progress: function * (percentage, cpt) {
checkpoint = cpt
}
})
// 上面的代码只是将checkpoint保存在变量中如果程序崩溃的话就丢失了用户也可以将它保存在文件中然后在程序重启后将checkpoint信息从文件中读取出来。
}
/***
* 下载文件到本地
* @param objectKey
* @param localFile 本地路径
*/
export const downloadLocalFile = (objectKey, localFile) => {
return client.get(objectKey, localFile)
}
// export const download_stream = (object_key, local_file) => {
// var result = yield client.getStream(object_key)
// console.log(result)
// var writeStream = fs.createWriteStream(local_file)
// result.stream.pipe(writeStream)
// }

10
src/lib/auth.js Normal file
View File

@ -0,0 +1,10 @@
import JsSHA from 'jssha'
import { SYSTEM } from '../config'
export default (token) => {
const shaObj = new JsSHA('SHA-512', 'TEXT')
shaObj.update(SYSTEM.TOKEN)
const hash = shaObj.getHash('HEX')
if (token === hash) return true
return false
}

14
src/lib/db.js Normal file
View File

@ -0,0 +1,14 @@
import low from 'lowdb'
import path from 'path'
import FileSync from 'lowdb/adapters/FileSync'
import { SYSTEM } from '../config'
const isDev = process.env.NODE_ENV ? process.env.NODE_ENV === 'development' : false
const adapter = isDev ? new FileSync(path.join(SYSTEM.NAME + '-distribution-db.json')) : new FileSync(path.join(SYSTEM.WORKPATH, SYSTEM.NAME + '-distribution-db.json'))
const db = low(adapter)
if (!db.get('appLog').value()) {
db.defaults({ appLog: [], buildLog: [] }).write()
}
export default db

41
src/lib/qiniu.js Normal file
View File

@ -0,0 +1,41 @@
import qiniu from 'qiniu'
import { QINIU } from '../config'
const mac = new qiniu.auth.digest.Mac(QINIU.accessKey, QINIU.secretKey)
const putPolicy = new qiniu.rs.PutPolicy({
scope: QINIU.bucket
})
const uploadToken = putPolicy.uploadToken(mac)
const config = new qiniu.conf.Config()
// 空间对应的机房
config.zone = qiniu.zone[QINIU.zone]
// 是否使用https域名
config.useHttpsDomain = true
// 上传是否使用cdn加速
// config.useCdnDomain = true;
export const uploadLocalFileToQiniu = (key, localFile) => {
return new Promise((resolve, reject) => {
var resumeUploader = new qiniu.resume_up.ResumeUploader(config)
var putExtra = new qiniu.resume_up.PutExtra()
// 如果指定了断点记录文件,那么下次会从指定的该文件尝试读取上次上传的进度,以实现断点续传
// putExtra.resumeRecordFile = 'progress.log';
// 文件分片上传
const remoteKey = key.substr(1) // 删除开头的/
resumeUploader.putFile(uploadToken, remoteKey, localFile, putExtra, function (respErr,
respBody, respInfo) {
if (respErr) {
return reject(respErr)
}
if (respInfo.statusCode === 200) {
resolve(QINIU.url + key)
} else {
console.log(respInfo.statusCode)
console.log(respBody)
return reject(respInfo)
}
})
})
}

33
src/lib/swagger.js Normal file
View File

@ -0,0 +1,33 @@
import inert from 'inert'
import vision from 'vision'
import hapiSwagger from 'hapi-swagger'
import { SYSTEM } from '../config'
import pack from '../../package'
const swaggerOptions = {
schemes: SYSTEM.SCHEME,
info: {
title: 'Electron Distribution',
version: pack.version
},
grouping: 'tags',
tags: [
{
name: 'app',
description: 'App Distribution'
},
{
name: 'build',
description: 'APP Buils'
}
]
}
export default [
inert,
vision,
{
plugin: hapiSwagger,
options: swaggerOptions
}
]

26
src/lib/tencent-cos.js Normal file
View File

@ -0,0 +1,26 @@
import COSSDK from 'cos-nodejs-sdk-v5'
import { qcloudAccessKey, COS } from '../config'
const cos = new COSSDK(qcloudAccessKey)
export const uploadToCOS = (key, filePath) => {
return new Promise((resolve, reject) => {
// 分片上传
cos.sliceUploadFile(
{
Bucket: COS.bucket,
Region: COS.region,
Key: key,
FilePath: filePath
},
function (err, data) {
if (err || !data) {
reject(err)
} else {
data.realPath = COS.url + key
resolve(data)
}
}
)
})
}

23
src/lib/upload.js Normal file
View File

@ -0,0 +1,23 @@
import { uploadToCOS } from './tencent-cos'
import { uploadStream as uploadLocalFileToOSS } from './ali-oss'
import { uploadLocalFileToQiniu } from './qiniu'
import { OSS as OSSConfig, SYSTEM } from '../config'
export default async (key, localFilePath) => {
switch (SYSTEM.OBJECT_STORAGE_TYPE) {
case 'cos':
const cos = await uploadToCOS(key, localFilePath)
return cos.realPath
case 'oss':
const oss = await uploadLocalFileToOSS(key, localFilePath)
// console.log(oss.url)
if (oss) {
return OSSConfig.url + key
} else {
return null
}
case 'qiniu':
const qiniu = await uploadLocalFileToQiniu(key, localFilePath)
return qiniu
}
}

26
src/routes/auth.js Normal file
View File

@ -0,0 +1,26 @@
import Joi from 'joi'
import { SYSTEM } from '../config'
import auth from '../lib/auth'
export default [{
method: 'GET',
path: `/app/auth`,
config: {
auth: false,
tags: ['api', 'app'],
description: 'Check token.',
validate: {
query: {
token: Joi.string().required().description('Encrypted-Token')
}
}
},
async handler (request) {
if (auth(request.query.token)) {
return this.success({
buildType: SYSTEM.BUILD_TYPE
})
}
return this.fail(null, 403, 'Token Error.')
}
}]

190
src/routes/build.js Normal file
View File

@ -0,0 +1,190 @@
import Joi from 'joi'
import fs from 'fs'
import path from 'path'
import { SYSTEM } from '../config'
import { Client } from 'ssh2'
import { spawn } from 'child_process'
import auth from '../lib/auth'
import JsSHA from 'jssha'
const WIN_IMAGE_NAME = 'electronuserland/builder:wine-mono'
const LINUX_IMAGE_NAME = 'electronuserland/builder:10'
const mkdirsSync = (dirname) => {
if (fs.existsSync(dirname)) {
return true
} else {
if (mkdirsSync(path.dirname(dirname))) {
fs.mkdirSync(dirname)
return true
}
}
}
const getHashToken = () => {
const shaObj = new JsSHA('SHA-512', 'TEXT')
shaObj.update(SYSTEM.TOKEN)
return shaObj.getHash('HEX')
}
export default [
{
method: 'GET',
path: `/build/{type}`,
config: {
auth: false,
tags: ['api', 'build'],
description: 'App Build',
validate: {
params: {
type: Joi.string().required().description('Type')
},
query: {
token: Joi.string().required().description('Encrypted-Token')
}
}
},
async handler (request) {
if (!auth(request.query.token)) {
return this.fail(null, 403, 'Token Error.')
}
const shaObj = new JsSHA('SHA-512', 'TEXT')
shaObj.update(SYSTEM.TOKEN)
const hashToken = shaObj.getHash('HEX')
const publishOpt = SYSTEM.GH_TOKEN ? 'always' : 'never'
let containerCmd = 'yarn --ignore-engines'
let imageName = null
let workPath = SYSTEM.WORKPATH
let type = request.params.type
const sourcePath = path.join(workPath, '/source')
switch (type) {
case 'win':
workPath += '/win'
imageName = WIN_IMAGE_NAME
containerCmd += ' && yarn run build --' + type + ' --publish ' + publishOpt + ' && curl -X GET "' + SYSTEM.SCHEME + '://' + SYSTEM.DOMAIN + '/app/upload?platform=' + type + '&extended=x86-64&token=' + hashToken + '&logPath=$LOG_PATH" -H "cache-control: no-cache"'
break
case 'linux':
workPath += '/linux'
imageName = LINUX_IMAGE_NAME
containerCmd += ' && yarn run build --' + type + ' --publish ' + publishOpt + ' && curl -X GET "' + SYSTEM.SCHEME + '://' + SYSTEM.DOMAIN + '/app/upload?platform=' + type + '&extended=x86-64&token=' + hashToken + '&logPath=$LOG_PATH" -H "cache-control: no-cache"'
break
}
if (!fs.existsSync(SYSTEM.WORKPATH + '/logs/' + type)) mkdirsSync(SYSTEM.WORKPATH + '/logs/' + type)
const LogPath = SYSTEM.WORKPATH + '/logs/' + type + '/' + (new Date()).getTime() + '.log'
if (type === 'mac') {
// 1. rsync server -> mac
const writerStream = fs.createWriteStream(LogPath, {flags: 'a'})
const rsync = spawn('/usr/bin/rsync', ['-avrz', '-e', `'/usr/bin/ssh -p ${SYSTEM.MAC_SERVER_PORT}'`, '--delete-after', '--exclude', '"node_modules"', sourcePath + '/', SYSTEM.MAC_SERVER_USERNAME + '@' + SYSTEM.MAC_SERVER_HOST + ':/tmp/' + SYSTEM.NAME])
rsync.stdout.pipe(writerStream)
rsync.stderr.pipe(writerStream)
rsync.on('close', (code) => {
const writerStream = fs.createWriteStream(LogPath, {flags: 'a'})
writerStream.write(`\nChild process exited with code ${code} \n`)
// 2. build app and rsync mac build dir -> server build dir
let bashContent = ``
if (SYSTEM.CSC_LINK) bashContent += 'export CSC_LINK=' + SYSTEM.CSC_LINK + '\n'
if (SYSTEM.CSC_KEY_PASSWORD) bashContent += 'export CSC_KEY_PASSWORD=' + SYSTEM.CSC_KEY_PASSWORD + '\n'
if (SYSTEM.GH_TOKEN) bashContent += 'export GH_TOKEN=' + SYSTEM.GH_TOKEN + '\n'
bashContent += 'export LOG_PATH=' + LogPath + '\n'
bashContent += 'cd /tmp/' + SYSTEM.NAME + '\n'
bashContent += `yarn --ignore-engines` + ' && yarn run build --' + type + ' --publish ' + publishOpt + '\n'
// bashContent += `echo -e "Host ${SYSTEM.LINUX_SERVER_HOST}\n\tStrictHostKeyChecking no\n" >> ~/.ssh/config\n`
bashContent += `rsync -avrz -e 'ssh -p ${SYSTEM.LINUX_SERVER_PORT}' --exclude "node_modules" /tmp/` + SYSTEM.NAME + '/build/ ' + SYSTEM.LINUX_SERVER_USERNAME + '@' + SYSTEM.LINUX_SERVER_HOST + ':' + sourcePath + '/build \n'
bashContent += 'curl -X GET "' + SYSTEM.SCHEME + '://' + SYSTEM.DOMAIN + '/app/upload?platform=' + type + '&extended=x86-64&token=' + getHashToken() + '&logPath=' + LogPath + '" -H "cache-control: no-cache"\n'
writerStream.write('Run command: \n')
writerStream.write(bashContent)
const conn = new Client()
conn.on('ready', function () {
const writerStream = fs.createWriteStream(LogPath, {flags: 'a'})
writerStream.write('Client :: ready\n')
conn.shell(function (err, stream) {
if (err) throw err
stream.pipe(writerStream)
stream.on('close', function () {
const writerStream = fs.createWriteStream(LogPath, {flags: 'a'})
writerStream.write('Stream :: close')
conn.end()
})
stream.end(bashContent)
})
}).connect({
host: SYSTEM.MAC_SERVER_HOST,
port: Number(SYSTEM.MAC_SERVER_PORT),
username: SYSTEM.MAC_SERVER_USERNAME,
privateKey: require('fs').readFileSync('/root/.ssh/id_rsa')
})
})
} else {
const Env = [
'LOG_PATH=' + LogPath
]
if (SYSTEM.CSC_LINK) Env.push('CSC_LINK=' + SYSTEM.CSC_LINK)
if (SYSTEM.CSC_KEY_PASSWORD) Env.push('CSC_KEY_PASSWORD=' + SYSTEM.CSC_KEY_PASSWORD)
if (SYSTEM.GH_TOKEN) Env.push('GH_TOKEN=' + SYSTEM.GH_TOKEN)
const optsc = {
'AttachStdin': true,
'AttachStdout': true,
'AttachStderr': true,
'Tty': true,
'OpenStdin': true,
'StdinOnce': false,
'Env': Env,
'Cmd': ['/bin/bash', '-c', containerCmd],
'Image': imageName,
'WorkingDir': '/project',
'Volumes': {},
'VolumesFrom': [],
'HostConfig': {
Binds: [
workPath + ':/project:rw',
'/etc/localtime:/etc/localtime:ro',
workPath + '/.cache/electron:/root/.cache/electron',
workPath + '/.cache/electron-builder:/root/.cache/electron-builder'
],
CpusetCpus: SYSTEM.BUILD_CPU_LIMIT || '0',
Memory: Number(SYSTEM.BUILD_MEMORY_LIMIT) || 0,
AutoRemove: true
}
}
const runDocker = () => {
return new Promise((resolve, reject) => {
this.docker.createContainer(optsc, (err, container) => {
if (err || !container) return reject(err || 'container is null')
container.attach({stream: true, stdout: true, stderr: true}, (err, stream) => {
if (err) return reject(err)
const writerStream = fs.createWriteStream(LogPath)
stream.pipe(writerStream)
})
container.start((err, data) => {
if (err) return reject(err)
console.log(data)
resolve(container.id)
})
})
})
}
try {
const res = await runDocker()
return this.success(res)
} catch (err) {
console.log(err)
return this.fail(null, 10001, err.toString())
}
}
}
}
]

22
src/routes/encrypt.js Normal file
View File

@ -0,0 +1,22 @@
import Joi from 'joi'
import JsSHA from 'jssha'
export default [{
method: 'GET',
path: `/app/encrypt`,
config: {
auth: false,
tags: ['api', 'app'],
description: 'Token encrypt service.',
validate: {
query: {
token: Joi.string().required().description('Token')
}
}
},
async handler (request) {
const shaObj = new JsSHA('SHA-512', 'TEXT')
shaObj.update(request.query.token)
return this.success(shaObj.getHash('HEX'))
}
}]

12
src/routes/icon.ico.js Normal file
View File

@ -0,0 +1,12 @@
export default [{
method: 'GET',
path: `/app/icon.ico`,
config: {
auth: false,
tags: ['api', 'app'],
description: 'Squirrel windows icon.'
},
handler (request, h) {
return h.file('public/icon.ico')
}
}]

19
src/routes/index.js Normal file
View File

@ -0,0 +1,19 @@
import fs from 'fs'
import path from 'path'
const modules = []
const files = fs.readdirSync(__dirname).filter((file) => {
return file.match(/\.(json|js)$/)
})
files.forEach(key => {
if (key === 'index.js') return
// const content = require(path.join(__dirname, key)).default
const content = require(path.join(__dirname, key)).default
if (Array.isArray(content)) { modules.push(...content) } else { modules.push(content) }
})
export default modules

54
src/routes/list.js Normal file
View File

@ -0,0 +1,54 @@
import Joi from 'joi'
import { SYSTEM } from '../config'
import auth from '../lib/auth'
export default [{
method: 'GET',
path: `/app/list/release`,
config: {
auth: false,
tags: ['api', 'app'],
description: 'App release log list.',
validate: {
query: {
token: Joi.string().required().description('Encrypted-Token')
}
}
},
async handler (request) {
if (!auth(request.query.token)) {
return this.fail(null, 403, 'Token Error.')
}
return this.success({
name: SYSTEM.NAME,
list: this.$db.get('appLog') // .filter(o => o.type !== 'maczip')
.sortBy((item) => -item.releaseDate)
.value()
})
}
},
{
method: 'GET',
path: `/app/list/build`,
config: {
auth: false,
tags: ['api', 'app'],
description: 'App build log list.',
validate: {
query: {
token: Joi.string().required().description('Encrypted-Token')
}
}
},
async handler (request) {
if (!auth(request.query.token)) {
return this.fail(null, 403, 'Token Error.')
}
return this.success({
name: SYSTEM.NAME,
list: this.$db.get('buildLog')
.sortBy((item) => -item.startDate)
.value()
})
}
}]

39
src/routes/log.log.js Normal file
View File

@ -0,0 +1,39 @@
import fs from 'fs'
import Joi from 'joi'
export default [
{
method: 'GET',
path: `/build/log.log`,
config: {
auth: false,
tags: ['api', 'build'],
description: 'Get build log.',
validate: {
query: {
path: Joi.string().required().description('Log path.')
}
}
},
async handler (request) {
const path = decodeURI(request.query.path)
const logLast = this.$db.get('appLog')
.filter({logPath: path})
.sortBy((item) => -item.releaseDate)
.take()
.first()
.value()
if (logLast && logLast.logPath) {
try {
return fs.readFileSync(logLast.logPath)
} catch (err) {
return this.fail(err)
}
} else {
return this.fail(null, 10001, 'No file.')
}
}
}
]

78
src/routes/nupkg.js Normal file
View File

@ -0,0 +1,78 @@
import Joi from 'joi'
import path from 'path'
import { SYSTEM, COS, OSS, QINIU } from '../config'
import axios from 'axios'
export default [
{
method: 'GET',
path: `/app/nupkg/{version}/{releases}`,
config: {
auth: false,
tags: ['api', 'app'],
description: 'RELEASES file or download pukge.',
validate: {
params: {
version: Joi.string().required().description('Version'),
releases: Joi.string().required().description('RELEASES file or File name')
}
}
},
async handler (request, h) {
const version = request.params.version
const releases = request.params.releases
let objectStorageUrl = ''
switch (SYSTEM.OBJECT_STORAGE_TYPE) {
case 'cos':
objectStorageUrl = COS.url
break
case 'oss':
objectStorageUrl = OSS.url
break
case 'qiniu':
objectStorageUrl = QINIU.url
break
}
if (releases === 'RELEASES' || releases === 'releases') {
// TODO: ?id=name&localVersion=4.7.2&arch=amd64
const nupkgLast = this.$db.get('appLog')
.filter({type: 'RELEASES', version})
.sortBy((item) => -item.releaseDate)
.take()
.first()
.value()
if (nupkgLast) {
const RELEASESname = path.join('RELEASES-' + version, 'RELEASES')
const url = objectStorageUrl + '/app/' + nupkgLast.name + '/' + version + '/' + RELEASESname
try {
const { data } = await axios.get(url)
return data
} catch (err) {
console.log(err)
return ''
}
}
} else {
const fileName = releases
const nupkgLast = this.$db.get('appLog')
.filter({type: 'nupkg', version})
.sortBy((item) => -item.releaseDate)
.take()
.first()
.value()
if (nupkgLast) {
return h.redirect(objectStorageUrl + '/app/' + nupkgLast.name + '/' + version + '/' + fileName)
} else {
return ''
}
}
return this.fail()
}
}
]

11
src/routes/public.js Normal file
View File

@ -0,0 +1,11 @@
export default [
{
method: 'GET',
path: '/{param*}',
handler: {
directory: {
path: 'public'
}
}
}
]

40
src/routes/release.js Normal file
View File

@ -0,0 +1,40 @@
import Joi from 'joi'
import auth from '../lib/auth'
export default [
{
method: 'POST',
path: `/app/release`,
config: {
auth: false,
tags: ['api', 'app'],
description: 'The app release.',
validate: {
payload: {
token: Joi.string().required().description('Encrypted-Token'),
name: Joi.string().required().description('The package.json name'),
downloadUrl: Joi.string().required().description('Download URL'),
version: Joi.string().required().description('APP version'),
platform: Joi.string().required().description('Platform'),
extended: Joi.string().required().description('Extended'),
type: Joi.string().required().description('Type'),
logPath: Joi.string().required().description('Log path'),
author: Joi.string().description('Author'),
authorEmail: Joi.string().description('Author Email'),
message: Joi.string().description('Message')
}
}
},
async handler (request) {
if (!auth(request.payload.token)) {
return this.fail(null, 403, 'Token Error.')
}
const data = request.payload
data.releaseDate = new Date().getTime().toString()
const result = this.$db.get('appLog').push(data).write()
return this.success(result)
}
}
]

View File

@ -0,0 +1,26 @@
export default [
{
method: 'GET',
path: `/app/release.json`,
config: {
auth: false,
tags: ['api', 'app'],
description: 'Get new JSON about MAC update.'
},
async handler () {
const maczipLast = this.$db.get('appLog')
.filter({type: 'maczip'})
.sortBy((item) => -item.releaseDate)
.take()
.first()
.value()
return maczipLast ? {
'url': maczipLast.downloadUrl,
'name': maczipLast.name,
'notes': maczipLast.message,
'pub_date': this.$moment(maczipLast.releaseDate).tz('Asia/Shanghai').format()
} : {}
}
}
]

View File

@ -0,0 +1,62 @@
import { SYSTEM } from '../config'
export default [{
method: 'GET',
path: `/app/updates.json`,
config: {
auth: false,
tags: ['api', 'app'],
description: 'Update check JSON.'
},
async handler () {
const macLast = this.$db.get('appLog')
.filter({
platform: 'mac',
type: 'install'
})
.sortBy((item) => -item.releaseDate)
.take()
.first()
.value()
const winLast = this.$db.get('appLog')
.filter({
platform: 'win',
type: 'install'
})
.sortBy((item) => -item.releaseDate)
.take()
.first()
.value()
const linuxLast = this.$db.get('appLog')
.filter({
platform: 'linux',
type: 'install'
})
.sortBy((item) => -item.releaseDate)
.take()
.first()
.value()
return {
'win32-x64-prod': winLast ? {
'readme': winLast.name,
'update': SYSTEM.SCHEME + '://' + SYSTEM.DOMAIN + '/app/nupkg/' + winLast.version,
'install': winLast.downloadUrl,
'version': winLast.version
} : {},
'darwin-x64-prod': macLast ? {
'readme': macLast.name,
'update': SYSTEM.SCHEME + '://' + SYSTEM.DOMAIN + '/app/release.json',
'install': macLast.downloadUrl,
'version': macLast.version
} : {},
'linux-x64-prod': linuxLast ? {
'update': linuxLast.downloadUrl,
'install': linuxLast.downloadUrl,
'version': linuxLast.version
} : {}
}
}
}]

259
src/routes/upload.js Normal file
View File

@ -0,0 +1,259 @@
import fs from 'fs'
import Joi from 'joi'
import git from 'simple-git'
import path from 'path'
import uploadToObjectStorage from '../lib/upload'
import auth from '../lib/auth'
import {
SYSTEM
} from '../config'
const GIT_SSH_COMMAND = 'ssh -o UserKnownHostsFile=/dev/null -o StrictHostKeyChecking=no'
export default [{
method: 'GET',
path: `/app/upload`,
config: {
auth: false,
tags: ['api', 'app'],
description: 'Upload app to object storage.',
validate: {
query: {
platform: Joi.string().required().description('System platform'),
extended: Joi.string().default('x86-64').description('System extended'),
startDate: Joi.string().required().description('Build startDate'),
logPath: Joi.string().required().description('Log file path.'),
token: Joi.string().required().description('Encrypted-Token')
}
}
},
async handler (request) {
if (!auth(request.query.token)) {
return this.fail(null, 403, 'Token Error.')
}
const platform = request.query.platform
const extended = request.query.extended
const logPath = request.query.logPath
const startDate = request.query.startDate
const workPath = SYSTEM.WORKPATH
const sourcePath = path.join(workPath, '/source')
const gitLog = (workPath) => {
return new Promise((resolve, reject) => {
git(workPath).env({
...process.env,
GIT_SSH_COMMAND
}).log({
n: 1
}, (err, status) => {
if (err) {
return reject(err)
}
resolve(status)
})
})
}
try {
const log = await gitLog(sourcePath)
if (log && log.all && log.all.length === 1) {
const gitInfo = log.all[0]
let packageJson = null
let linuxPath = null
let winPath = null
let filePath = null
let filePath2 = null
let filename = null
let filename2 = null
let nupkg = null
let RELEASES = null
let RELEASESname = null
let nupkgname = null
switch (platform) {
case 'mac':
packageJson = JSON.parse(fs.readFileSync(path.join(sourcePath, 'package.json'), 'utf-8'))
filePath = path.join(sourcePath, 'build', packageJson.build.productName + '-' + packageJson.version + '.dmg')
filePath2 = path.join(sourcePath, 'build', packageJson.build.productName + '-' + packageJson.version + '-mac.zip')
filename = packageJson.name + '-' + packageJson.version + '.dmg'
filename2 = packageJson.name + '-' + packageJson.version + '-mac.zip'
break
case 'linux':
linuxPath = path.join(workPath, '/linux')
packageJson = JSON.parse(fs.readFileSync(path.join(linuxPath, 'package.json'), 'utf-8'))
if (extended === 'armv7l') {
filePath = path.join(linuxPath, 'build', packageJson.name + '-' + packageJson.version + '-armv7l.AppImage')
filename = packageJson.name + '-' + packageJson.version + '-armv7l.AppImage'
} else {
filePath = path.join(linuxPath, 'build', packageJson.name + '-' + packageJson.version + '-x86_64.AppImage')
filename = packageJson.name + '-' + packageJson.version + '-x86_64.AppImage'
}
break
case 'win':
winPath = path.join(workPath, '/win')
packageJson = JSON.parse(fs.readFileSync(path.join(winPath, 'package.json'), 'utf-8'))
filePath = path.join(winPath, 'build', 'squirrel-windows', packageJson.build.productName + ' Setup ' + packageJson.version + '.exe')
filename = packageJson.name + '-' + packageJson.version + '.exe'
nupkg = path.join(winPath, 'build', 'squirrel-windows', packageJson.name + '-' + packageJson.version + '-full.nupkg')
RELEASES = path.join(winPath, 'build', 'squirrel-windows', 'RELEASES')
RELEASESname = path.join('RELEASES-' + packageJson.version, 'RELEASES')
nupkgname = packageJson.name + '-' + packageJson.version + '-full.nupkg'
break
}
this.$db.get('buildLog').find({
startDate
}).assign({
status: 'uploading'
}).write()
const fileFullPath = path.join('/app/', packageJson.name, packageJson.version, filename)
try {
var upload = () => {
uploadToObjectStorage(fileFullPath, filePath).then(res => {
this.$db.get('appLog').push({
name: packageJson.name,
downloadUrl: res,
version: packageJson.version,
platform,
extended,
action: 'release',
type: 'install',
logPath,
author: gitInfo.author_name,
authorEmail: gitInfo.author_email,
message: gitInfo.message,
releaseDate: new Date().getTime().toString()
}).write()
if (platform === 'linux') {
this.$db.get('buildLog').find({
startDate
}).assign({
status: 'finish'
}).write()
}
}).catch(err => {
console.log(err)
})
}
upload()
} catch (err) {
console.log(err)
upload()
}
if (filePath2) {
const fileFullPath2 = path.join('/app/', packageJson.name, packageJson.version, filename2)
try {
var upload2 = () => {
uploadToObjectStorage(fileFullPath2, filePath2).then(res => {
this.$db.get('appLog').push({
name: packageJson.name,
downloadUrl: res,
version: packageJson.version,
platform,
extended,
action: 'release',
type: 'maczip',
logPath,
author: gitInfo.author_name,
authorEmail: gitInfo.author_email,
message: gitInfo.message,
releaseDate: new Date().getTime().toString()
}).write()
this.$db.get('buildLog').find({
startDate
}).assign({
status: 'finish'
}).write()
}).catch(err => {
console.log(err)
})
}
upload2()
} catch (err) {
console.log(err)
upload2()
}
}
if (nupkg) {
const fileFullPath3 = path.join('/app/', packageJson.name, packageJson.version, nupkgname)
try {
var upload3 = () => {
uploadToObjectStorage(fileFullPath3, nupkg).then(res => {
this.$db.get('appLog').push({
name: packageJson.name,
downloadUrl: res,
version: packageJson.version,
platform,
extended,
action: 'release',
type: 'nupkg',
logPath,
author: gitInfo.author_name,
authorEmail: gitInfo.author_email,
message: gitInfo.message,
releaseDate: new Date().getTime().toString()
}).write()
this.$db.get('buildLog').find({
startDate
}).assign({
status: 'finish'
}).write()
}).catch(err => {
console.log(err)
})
}
upload3()
} catch (err) {
console.log(err)
upload3()
}
const fileFullPath4 = path.join('/app/', packageJson.name, packageJson.version, RELEASESname)
try {
var upload4 = () => {
uploadToObjectStorage(fileFullPath4, RELEASES).then(res => {
this.$db.get('appLog').push({
name: packageJson.name,
downloadUrl: res,
version: packageJson.version,
platform,
extended,
action: 'release',
type: 'RELEASES',
logPath,
author: gitInfo.author_name,
authorEmail: gitInfo.author_email,
message: gitInfo.message,
releaseDate: new Date().getTime().toString()
}).write()
}).catch(err => {
console.log(err)
})
}
upload4()
} catch (err) {
console.log(err)
upload4()
}
}
return this.success('ok')
} else {
return this.fail(null, 10003, 'Get git log content error.')
}
} catch (err) {
console.log(err)
return this.fail(err.toString(), 10001, 'Get git log error.')
}
}
}]

363
src/routes/webhooks.js Normal file
View File

@ -0,0 +1,363 @@
import fs from 'fs'
import path from 'path'
import git from 'simple-git'
import { SYSTEM } from '../config'
import { Client } from 'ssh2'
import { spawn } from 'child_process'
import JsSHA from 'jssha'
import Joi from 'joi'
import db from '../lib/db'
const repoPath = SYSTEM.REPOPATH
const workPath = SYSTEM.WORKPATH
const sourcePath = path.join(workPath, '/source')
const linuxPath = path.join(workPath, '/linux')
const winPath = path.join(workPath, '/win')
const WIN_IMAGE_NAME = 'electronuserland/builder:wine-mono'
const LINUX_IMAGE_NAME = 'electronuserland/builder:10'
const GIT_SSH_COMMAND = 'ssh -o UserKnownHostsFile=/dev/null -o StrictHostKeyChecking=no'
const getHashToken = () => {
const shaObj = new JsSHA('SHA-512', 'TEXT')
shaObj.update(SYSTEM.TOKEN)
return shaObj.getHash('HEX')
}
const gitCodeUpdate = async (buidType) => {
const gitClone = (repoPath, workPath, type) => {
return new Promise((resolve, reject) => {
git().env({
...process.env,
GIT_SSH_COMMAND
})
.clone(repoPath, workPath, (err) => {
if (err) {
reject(err)
return
}
resolve({
code: 1,
type: 'clone',
change: true
})
})
})
}
const gitPull = (workPath, type) => {
return new Promise((resolve, reject) => {
git(workPath).env({
...process.env,
GIT_SSH_COMMAND
})
.pull((err, update) => {
if (err) {
reject(err)
return
}
if (update && update.summary.changes) {
resolve({
code: 1,
type: 'pull',
change: true
})
} else {
resolve({
code: 1,
type: 'clone',
change: false
})
}
})
})
}
const promiseList = []
if (buidType.includes('mac')) {
if (!fs.existsSync(sourcePath)) {
mkdirsSync(sourcePath)
promiseList.push(gitClone(repoPath, sourcePath, 'Source'))
} else {
if (fs.readdirSync(sourcePath).includes('.git')) {
promiseList.push(gitPull(sourcePath, 'Source'))
} else {
promiseList.push(gitClone(repoPath, sourcePath, 'Source'))
}
}
}
if (buidType.includes('linux')) {
if (!fs.existsSync(linuxPath)) {
mkdirsSync(linuxPath)
promiseList.push(gitClone(repoPath, linuxPath, 'Linux'))
} else {
if (fs.readdirSync(linuxPath).includes('.git')) {
promiseList.push(gitPull(linuxPath, 'Linux'))
} else {
promiseList.push(gitClone(repoPath, linuxPath, 'Linux'))
}
}
}
if (buidType.includes('win')) {
if (!fs.existsSync(winPath)) {
mkdirsSync(winPath)
promiseList.push(gitClone(repoPath, winPath, 'Win'))
} else {
if (fs.readdirSync(winPath).includes('.git')) {
promiseList.push(gitPull(winPath, 'Win'))
} else {
promiseList.push(gitClone(repoPath, winPath, 'Win'))
}
}
}
const res = await Promise.all(promiseList)
return res.every(a => a.code === 1)
}
const mkdirsSync = (dirname) => {
if (fs.existsSync(dirname)) {
return true
} else {
if (mkdirsSync(path.dirname(dirname))) {
fs.mkdirSync(dirname)
return true
}
}
}
const startDockerToBuild = async (name, version, type, docker) => {
const publishOpt = SYSTEM.GH_TOKEN ? 'always' : 'never'
const startDate = new Date().getTime().toString()
let containerCmd = 'yarn --ignore-engines'
let imageName = null
let workPath = SYSTEM.WORKPATH
switch (type) {
case 'win':
workPath += '/win'
imageName = WIN_IMAGE_NAME
containerCmd += ' && yarn run build --' + type + ' --publish ' + publishOpt + ' && curl -X GET "' + SYSTEM.SCHEME + '://' + SYSTEM.DOMAIN + '/app/upload?platform=' + type + '&extended=x86-64&token=' + getHashToken() + '&startDate=' + startDate + '&logPath=$LOG_PATH" -H "cache-control: no-cache"'
break
case 'linux':
workPath += '/linux'
imageName = LINUX_IMAGE_NAME
containerCmd += ' && yarn run build --' + type + ' --publish ' + publishOpt + ' && curl -X GET "' + SYSTEM.SCHEME + '://' + SYSTEM.DOMAIN + '/app/upload?platform=' + type + '&extended=x86-64&token=' + getHashToken() + '&startDate=' + startDate + '&logPath=$LOG_PATH" -H "cache-control: no-cache"'
break
}
if (!fs.existsSync(SYSTEM.WORKPATH + '/logs/' + type)) mkdirsSync(SYSTEM.WORKPATH + '/logs/' + type)
const logPath = SYSTEM.WORKPATH + '/logs/' + type + '/' + (new Date()).getTime() + '.log'
db.get('buildLog').push({
name,
version,
platform: type,
extended: 'x86-64',
action: 'build',
status: 'buiding',
logPath,
startDate
}).write()
const Env = [
'LOG_PATH=' + logPath
]
if (SYSTEM.CSC_LINK) Env.push('CSC_LINK=' + SYSTEM.CSC_LINK)
if (SYSTEM.CSC_KEY_PASSWORD) Env.push('CSC_KEY_PASSWORD=' + SYSTEM.CSC_KEY_PASSWORD)
if (SYSTEM.GH_TOKEN) Env.push('GH_TOKEN=' + SYSTEM.GH_TOKEN)
const optsc = {
'AttachStdin': true,
'AttachStdout': true,
'AttachStderr': true,
'Tty': true,
'OpenStdin': true,
'StdinOnce': false,
'Env': Env,
'Cmd': ['/bin/bash', '-c', containerCmd],
'Image': imageName,
'WorkingDir': '/project',
'Volumes': {},
'VolumesFrom': [],
'HostConfig': {
Binds: [
workPath + ':/project:rw',
'/etc/localtime:/etc/localtime:ro',
workPath + '/.cache/electron:/root/.cache/electron',
workPath + '/.cache/electron-builder:/root/.cache/electron-builder'
],
CpusetCpus: SYSTEM.BUILD_CPU_LIMIT || '0',
Memory: Number(SYSTEM.BUILD_MEMORY_LIMIT) || 0,
AutoRemove: true
}
}
const runDocker = (docker) => {
return new Promise((resolve, reject) => {
docker.createContainer(optsc, (err, container) => {
if (err || !container) return reject(err || 'container is null')
container.attach({stream: true, stdout: true, stderr: true}, (err, stream) => {
const writerStream = fs.createWriteStream(logPath)
if (err) return writerStream.write(err.toString())
stream.pipe(writerStream)
})
container.start((err, data) => {
if (err) return reject(err)
console.log(data)
resolve(container.id)
})
})
})
}
return runDocker(docker)
}
const macBuild = async (name, version) => {
const type = 'mac'
const publishOpt = SYSTEM.GH_TOKEN ? 'always' : 'never'
if (!fs.existsSync(SYSTEM.WORKPATH + '/logs/' + type)) mkdirsSync(SYSTEM.WORKPATH + '/logs/' + type)
const logPath = SYSTEM.WORKPATH + '/logs/' + type + '/' + (new Date()).getTime() + '.log'
const startDate = new Date().getTime().toString()
db.get('buildLog').push({
name,
version,
platform: type,
extended: 'x86-64',
action: 'build',
status: 'buiding',
logPath: logPath,
startDate
}).write()
// 1. rsync server -> mac
const writerStream = fs.createWriteStream(logPath, {flags: 'a'})
const cmd = `rsync -avrz -e 'ssh -p ${SYSTEM.MAC_SERVER_PORT}' --delete-after --exclude "node_modules" ${sourcePath}/ ${SYSTEM.MAC_SERVER_USERNAME}@${SYSTEM.MAC_SERVER_HOST}:/tmp/${SYSTEM.NAME}`
writerStream.write(cmd)
const rsync = spawn('/bin/sh', ['-c', cmd])
rsync.stdout.pipe(writerStream)
rsync.stderr.pipe(writerStream)
rsync.on('close', (code) => {
const writerStream = fs.createWriteStream(logPath, {flags: 'a'})
writerStream.write(`\nChild process exited with code ${code} \n`)
// 2. build app and rsync mac build dir -> server build dir
let bashContent = ''
if (SYSTEM.CSC_LINK) bashContent += 'export CSC_LINK=' + SYSTEM.CSC_LINK + '\n'
if (SYSTEM.CSC_KEY_PASSWORD) bashContent += 'export CSC_KEY_PASSWORD=' + SYSTEM.CSC_KEY_PASSWORD + '\n'
if (SYSTEM.GH_TOKEN) bashContent += 'export GH_TOKEN=' + SYSTEM.GH_TOKEN + '\n'
bashContent += 'export LOG_PATH=' + logPath + '\n'
bashContent += 'cd /tmp/' + SYSTEM.NAME + '\n'
bashContent += `yarn --ignore-engines` + ' && yarn run build --' + type + ' --publish ' + publishOpt + '\n'
// bashContent += `echo -e "Host ${SYSTEM.LINUX_SERVER_HOST}\n\tStrictHostKeyChecking no\n" >> ~/.ssh/config\n`
bashContent += `rsync -avrz -e 'ssh -p ${SYSTEM.LINUX_SERVER_PORT}' --exclude "node_modules" /tmp/` + SYSTEM.NAME + '/build/ ' + SYSTEM.LINUX_SERVER_USERNAME + '@' + SYSTEM.LINUX_SERVER_HOST + ':' + sourcePath + '/build \n'
bashContent += 'curl -X GET "' + SYSTEM.SCHEME + '://' + SYSTEM.DOMAIN + '/app/upload?platform=' + type + '&extended=x86-64&token=' + getHashToken() + '&startDate=' + startDate + '&logPath=' + logPath + '" -H "cache-control: no-cache"\n'
writerStream.write('Run command: \n')
writerStream.write(bashContent)
const conn = new Client()
conn.on('ready', function () {
const writerStream = fs.createWriteStream(logPath, {flags: 'a'})
writerStream.write('Client :: ready\n')
conn.shell(function (err, stream) {
if (err) throw err
const writerStream = fs.createWriteStream(logPath, {flags: 'a'})
stream.pipe(writerStream)
stream.on('close', function () {
const writerStream = fs.createWriteStream(logPath, {flags: 'a'})
writerStream.write('\nStream :: close\n')
conn.end()
})
stream.end(bashContent)
})
}).connect({
host: SYSTEM.MAC_SERVER_HOST,
port: Number(SYSTEM.MAC_SERVER_PORT),
username: SYSTEM.MAC_SERVER_USERNAME,
privateKey: require('fs').readFileSync('/root/.ssh/id_rsa')
})
})
}
const linuxBuild = async (name, version, docker) => {
await startDockerToBuild(name, version, 'linux', docker)
}
const winBuild = async (name, version, docker) => {
await startDockerToBuild(name, version, 'win', docker)
}
const sleep = (s) => new Promise(resolve => setTimeout(resolve, s))
export default [
{
method: 'POST',
path: `/build/webhooks`,
config: {
auth: false,
tags: ['api', 'build'],
description: 'Github webhook',
validate: {
headers: {
'x-hub-signature': Joi.string().required().description('Github Secret.')
},
options: {
allowUnknown: true
}
}
},
async handler (request) {
try {
const shaObj = new JsSHA('SHA-1', 'TEXT')
shaObj.setHMACKey(SYSTEM.TOKEN, 'TEXT')
shaObj.update(JSON.stringify(request.payload))
const hash = shaObj.getHMAC('HEX')
if (request.headers && request.headers['x-hub-signature'] === 'sha1=' + hash) {
const updateCodeRes = await gitCodeUpdate(SYSTEM.BUILD_TYPE)
if (updateCodeRes) {
const packageJson = JSON.parse(fs.readFileSync(path.join(sourcePath, 'package.json'), 'utf-8'))
if (packageJson && packageJson.name && packageJson.version) {
const name = packageJson.name
const version = packageJson.version
if (SYSTEM.BUILD_TYPE.includes('mac')) {
macBuild(name, version) // async
await sleep(500)
}
if (SYSTEM.BUILD_TYPE.includes('linux')) {
linuxBuild(name, version, this.docker) // async
await sleep(500)
}
if (SYSTEM.BUILD_TYPE.includes('win')) {
winBuild(name, version, this.docker) // async
}
} else {
return this.fail(null, 10010, 'package read failed.')
}
} else {
return this.fail(null, 10009, 'code update failed.')
}
return this.success('ok')
} else {
return this.fail()
}
} catch (err) {
return this.fail(null, 10001, err.toString())
}
}
}
]

5
src/tool/mac.js Normal file
View File

@ -0,0 +1,5 @@
// import { exec } from 'child_process'
// const out = spawn(this.getJavaPath() + '\\java.exe', ['-jar', execPath], {
// cwd: path.join(__dirname.replace('app.asar', 'app.asar.unpacked'))
// })

23
src/util.js Normal file
View File

@ -0,0 +1,23 @@
import Chance from 'chance'
const chance = new Chance()
// 给数字字符串补零
function preZeroFill (num, size) {
if (num >= Math.pow(10, size)) { // 如果num本身位数不小于size位
return num.toString()
} else {
var _str = Array(size + 1).join('0') + num
return _str.slice(_str.length - size)
}
}
/**
* 获取指定位数的整数随机数
* @param {int} size 位数
* @return {string} 定位数的整数随机数字符串
*/
export let getIntRandom = (size) => preZeroFill(chance.integer({
min: 0,
max: Array(size + 1).join(9)
}), size)

168
src/websocket/container.js Normal file
View File

@ -0,0 +1,168 @@
import { Transform } from 'stream'
import chalk from 'chalk'
const WIN_IMAGE_NAME = 'electronuserland/builder:wine-mono'
const LINUX_IMAGE_NAME = 'electronuserland/builder:10'
export default (io, socket, docker) => {
socket.on('exec', function (id, w, h) {
var container = docker.getContainer(id)
var cmd = {
'AttachStdout': true,
'AttachStderr': true,
'AttachStdin': true,
'Tty': true,
Cmd: ['/bin/bash']
}
container.exec(cmd, (err, exec) => {
var options = {
'Tty': true,
stream: true,
stdin: true,
stdout: true,
stderr: true,
// fix vim
hijack: true
}
container.wait((err, data) => {
console.log(err)
socket.emit('end', 'ended')
})
if (err) {
return
}
exec.start(options, (err, stream) => {
console.log(err)
var dimensions = {
h,
w
}
if (dimensions.h !== 0 && dimensions.w !== 0) {
exec.resize(dimensions, () => {})
}
stream.on('data', (chunk) => {
socket.emit('show', chunk.toString())
})
socket.on('cmd', (data) => {
stream.write(data)
})
})
})
})
socket.on('logs', function (id) {
const container = docker.getContainer(id)
const logsOpts = {
follow: true,
stdout: true,
stderr: true,
timestamps: false
}
container.logs(logsOpts, (err, stream) => {
if (err) {
console.log(err)
socket.emit('err', chalk.red('Error:\n') + err + '.\n')
return
}
stream.on('data', (data) => { socket.emit('show', data.toString('utf-8')) })
stream.on('end', function () {
socket.emit('show', '\n===Stream finished===\n')
stream.destroy()
})
})
})
socket.on('pull', function (type) {
let imageName = null
switch (type) {
case 'win':
imageName = WIN_IMAGE_NAME
break
case 'linux':
imageName = LINUX_IMAGE_NAME
break
}
docker.pull(imageName, function (err, stream) {
if (err) {
console.log(err)
socket.emit('err', chalk.red('Error:\n') + err + '.\n')
return
}
const bytesToSize = (bytes) => {
if (bytes === 0) return '0 B'
const k = 1000 // or 1024
const sizes = ['B', 'KB', 'MB', 'GB', 'TB', 'PB', 'EB', 'ZB', 'YB']
const i = Math.floor(Math.log(bytes) / Math.log(k))
return (bytes / Math.pow(k, i)).toPrecision(3) + ' ' + sizes[i]
}
let downTmp = []
let downTmpId = []
const commaSplitter = new Transform({
readableObjectMode: true,
transform (chunk, encoding, callback) {
let tmp = ''
try {
var result = chunk.toString().match(/{(.*)}/)
result = result ? result[0] : null
if (!result) callback()
tmp = JSON.parse(result)
if (tmp.id) {
if (downTmpId.includes(tmp.id)) {
for (const n in downTmp) {
if (downTmp[n].id === tmp.id) {
if (tmp.progressDetail && tmp.progressDetail.current && tmp.progressDetail.total) {
const percentage = Math.floor(100 * tmp.progressDetail.current / tmp.progressDetail.total)
downTmp[n].val = ': [' + percentage + '%] Total ' + bytesToSize(tmp.progressDetail.total)
} else if (tmp.status) {
downTmp[n].val = ': ' + tmp.status
}
}
}
} else {
downTmpId.push(tmp.id)
const temp = {}
temp.id = tmp.id
if (tmp.progressDetail && tmp.progressDetail.current && tmp.progressDetail.total) {
const percentage = Math.floor(100 * tmp.progressDetail.current / tmp.progressDetail.total)
temp.val = ': [' + percentage + '%] Total ' + bytesToSize(tmp.progressDetail.total)
} else if (tmp.status) {
temp.val = ': ' + tmp.status
}
downTmp.push(temp)
}
let str = ''
for (const n in downTmp) {
str += downTmp[n].id + downTmp[n].val + '\n'
}
socket.emit('progress', str)
}
} catch (err) {
// console.log(err)
}
callback()
}
})
stream.pipe(commaSplitter)
stream.once('end', () => {
socket.emit('progress', chalk.green('All: [100%] Finish。\n'))
// socket.emit('end', imageName + ' install ' + chalk.green('success') + '.\n')
downTmp = []
downTmpId = []
})
})
})
}

93
src/websocket/git.js Normal file
View File

@ -0,0 +1,93 @@
import git from 'simple-git'
import chalk from 'chalk'
import fs from 'fs'
import path from 'path'
import { SYSTEM } from '../config'
const GIT_SSH_COMMAND = 'ssh -o UserKnownHostsFile=/dev/null -o StrictHostKeyChecking=no'
const mkdirsSync = (dirname) => {
if (fs.existsSync(dirname)) {
return true
} else {
if (mkdirsSync(path.dirname(dirname))) {
fs.mkdirSync(dirname)
return true
}
}
}
export default (io, socket, docker) => {
socket.on('gitPull', (data) => {
const repoPath = SYSTEM.REPOPATH
const workPath = SYSTEM.WORKPATH
const sourcePath = path.join(workPath, '/source')
const linuxPath = path.join(workPath, '/linux')
const winPath = path.join(workPath, '/win')
const gitClone = (repoPath, workPath, type) => {
git().env({
...process.env,
GIT_SSH_COMMAND
})
.clone(repoPath, workPath, (err) => {
if (err) {
socket.emit('err', chalk.red(type + ' clone error:\n') + err + '\n')
return
}
socket.emit('show', chalk.green(type + ' clone is finished!\n'))
})
}
const gitPull = (workPath, type) => {
git(workPath).env({
...process.env,
GIT_SSH_COMMAND
})
.pull((err, update) => {
if (err) {
socket.emit('err', chalk.red(type + ' pull error:\n') + err + '\n')
return
}
if (update && update.summary.changes) {
socket.emit('show', chalk.green(type + ' update success.\n'))
} else {
socket.emit('show', chalk.green(type + ' update success, no change.\n'))
}
})
}
if (!fs.existsSync(sourcePath)) {
mkdirsSync(sourcePath)
gitClone(repoPath, sourcePath, 'Source')
} else {
if (fs.readdirSync(sourcePath).includes('.git')) {
gitPull(sourcePath, 'Source')
} else {
gitClone(repoPath, sourcePath, 'Source')
}
}
if (!fs.existsSync(linuxPath)) {
mkdirsSync(linuxPath)
gitClone(repoPath, linuxPath, 'Linux')
} else {
if (fs.readdirSync(linuxPath).includes('.git')) {
gitPull(linuxPath, 'Linux')
} else {
gitClone(repoPath, linuxPath, 'Linux')
}
}
if (!fs.existsSync(winPath)) {
mkdirsSync(winPath)
gitClone(repoPath, winPath, 'Win')
} else {
if (fs.readdirSync(winPath).includes('.git')) {
gitPull(winPath, 'Win')
} else {
gitClone(repoPath, winPath, 'Win')
}
}
})
}

30
src/websocket/index.js Normal file
View File

@ -0,0 +1,30 @@
import container from './container'
import git from './git'
import log from './log'
import JsSHA from 'jssha'
import {
SYSTEM
} from '../config'
export default (io, docker) => {
console.log('Websocket Runing...')
io.on('connection', function (socket) {
console.log('One user connected - ' + socket.id)
socket.emit('requireAuth', 'distribution')
socket.emit('opend', new Date())
socket.on('auth', function (token) {
const shaObj = new JsSHA('SHA-512', 'TEXT')
shaObj.update(SYSTEM.TOKEN)
const hash = shaObj.getHash('HEX')
if (token === hash) {
container(io, socket, docker)
git(io, socket, docker)
log(io, socket, docker)
socket.emit('auth', 'success')
} else {
socket.emit('auth', 'fail')
}
})
})
}

25
src/websocket/log.js Normal file
View File

@ -0,0 +1,25 @@
import fs from 'fs'
import db from '../lib/db'
export default (io, socket, docker) => {
socket.on('log', function (path) {
path = decodeURI(path)
const logLast = db.get('buildLog')
.filter({logPath: path})
.sortBy((item) => -item.startDate)
.take()
.first()
.value()
if (logLast && logLast.logPath) {
try {
socket.emit('show', fs.readFileSync(logLast.logPath) + '\n')
} catch (err) {
socket.emit('show', err.toString() + '\n')
}
} else {
socket.emit('show', 'No file.')
}
})
}