Skip to content

Commit

Permalink
fix: switch to GitHub downloads
Browse files Browse the repository at this point in the history
Web3Storage doesn't have free usage any more so switch to downloading
tarballs from GitHub release pages instead and verifying them against
an expected CID.
  • Loading branch information
achingbrain committed Apr 29, 2024
1 parent 98b1f09 commit 3d6d629
Show file tree
Hide file tree
Showing 10 changed files with 170 additions and 153 deletions.
8 changes: 4 additions & 4 deletions Makefile
Original file line number Diff line number Diff line change
@@ -1,8 +1,8 @@
COMMIT?=v0.5.0
COMMIT?=v0.6.0
TARGETS=linux darwin win32
WORKDIR=bin

all: clean darwin linux win32
all: clean darwin linux win32 versions

clean:
rm -rf *.tar.gz *.zip bin/p2pd-* bin/go-libp2p-daemon
Expand Down Expand Up @@ -44,7 +44,7 @@ win32:
zip p2pd-$(COMMIT)-$@-arm64.zip $(WORKDIR)/p2pd-win32-arm64.exe && \
zip p2pd-$(COMMIT)-$@-386.zip $(WORKDIR)/p2pd-win32-386.exe

upload:
node ./scripts/upload.js
versions:
node ./scripts/update-versions.js

.PHONY: clean
5 changes: 1 addition & 4 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -54,10 +54,7 @@ archive the binaries and upload them to web3.storage.
```console
$ make all
```
2. Upload new versions
```console
$ make upload
```
2. Upload new versions to the GitHub release page
3. Open a PR to this repo with changes made to `src/versions.json`

If anything goes wrong:
Expand Down
21 changes: 10 additions & 11 deletions package.json
Original file line number Diff line number Diff line change
Expand Up @@ -74,28 +74,27 @@
},
"dependencies": {
"blockstore-core": "^4.2.0",
"browser-readablestream-to-it": "^2.0.7",
"cachedir": "^2.3.0",
"delay": "^6.0.0",
"got": "^12.5.3",
"gunzip-maybe": "^1.4.2",
"ipfs-unixfs-importer": "^15.1.5",
"it-last": "^3.0.2",
"multiformats": "^11.0.2",
"p-retry": "^5.1.2",
"pkg-conf": "^4.0.0",
"tar-fs": "^2.1.0",
"uint8arrays": "^4.0.3",
"it-to-buffer": "^4.0.7",
"multiformats": "^13.1.0",
"p-retry": "^6.2.0",
"package-config": "^5.0.0",
"tar-fs": "^3.0.6",
"uint8arrays": "^5.0.3",
"unzip-stream": "^0.3.0"
},
"devDependencies": {
"@types/got": "^9.6.12",
"@types/gunzip-maybe": "^1.4.0",
"@types/tar-fs": "^2.0.1",
"@types/unzip-stream": "^0.3.1",
"aegir": "^39.0.9",
"execa": "^7.0.0",
"pre-commit": "^1.2.2",
"web3.storage": "^4.5.4"
"aegir": "^42.2.9",
"execa": "^8.0.1",
"pre-commit": "^1.2.2"
},
"pre-commit": "restore-bin"
}
36 changes: 36 additions & 0 deletions scripts/update-versions.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,36 @@
/* eslint-disable no-console */
import fs from 'node:fs'
import { join, resolve } from 'node:path'
import * as url from 'node:url'
import { hashFile } from '../src/hash-file.js'
import { ARCHITECTURES } from '../src/arches.js'

const __dirname = url.fileURLToPath(new URL('.', import.meta.url))
const versionsPath = join(__dirname, '..', 'src', 'versions.json')

const version = fs.readFileSync(join(__dirname, '..', 'Makefile'), {
encoding: 'utf8'
})
.split('\n')
.map(line => line.trim())
.filter(line => line.startsWith('COMMIT?='))
.pop()
.replace('COMMIT?=', '')

const versions = {}

for (const arch of ARCHITECTURES) {
const filePath = resolve(join(__dirname, '..', `p2pd-${version}-${arch}.${arch.includes('win32') ? 'zip' : 'tar.gz'}`))
const cid = await hashFile(filePath)
versions[arch] = cid.toString()
}

const manifest = JSON.parse(fs.readFileSync(versionsPath, {
encoding: 'utf8'
}))

manifest.versions[version] = versions

fs.writeFileSync(versionsPath, JSON.stringify(manifest, null, 2), {
encoding: 'utf8'
})
59 changes: 0 additions & 59 deletions scripts/upload.js

This file was deleted.

9 changes: 9 additions & 0 deletions src/arches.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,9 @@
export const ARCHITECTURES = [
'darwin',
'linux-386',
'linux-amd64',
'linux-arm64',
'win32-386',
'win32-amd64',
'win32-arm64'
]
81 changes: 37 additions & 44 deletions src/download.js
Original file line number Diff line number Diff line change
Expand Up @@ -12,22 +12,19 @@ import os from 'node:os'
import path from 'node:path'
import * as url from 'node:url'
import util from 'node:util'
import { BlackHoleBlockstore } from 'blockstore-core/black-hole'
import browserReadableStreamToIt from 'browser-readablestream-to-it'
import cachedir from 'cachedir'
import delay from 'delay'
import got from 'got'
import gunzip from 'gunzip-maybe'
import { importer } from 'ipfs-unixfs-importer'
import { fixedSize } from 'ipfs-unixfs-importer/chunker'
import { balanced } from 'ipfs-unixfs-importer/layout'
import last from 'it-last'
import toBuffer from 'it-to-buffer'
import { CID } from 'multiformats/cid'
import retry from 'p-retry'
import { packageConfigSync } from 'pkg-conf'
import { packageConfigSync } from 'package-config'
import tarFS from 'tar-fs'
import { equals as uint8ArrayEquals } from 'uint8arrays/equals'
import unzip from 'unzip-stream'
import * as goenv from './go-platform.js'
import { hashFile } from './hash-file.js'

const __dirname = url.fileURLToPath(new URL('.', import.meta.url))
const isWin = process.platform === 'win32'
Expand All @@ -36,6 +33,8 @@ const { latest, versions } = JSON.parse(fs.readFileSync(path.join(__dirname, 've
encoding: 'utf-8'
}))

const DOWNLOAD_TIMEOUT_MS = 60000

/**
* avoid expensive fetch if file is already in cache
*
Expand Down Expand Up @@ -63,8 +62,34 @@ async function cachingFetchAndVerify (url, cid, options = {}) {
console.info(`Cached file ${cachedFilePath} not found`)
console.info(`Downloading ${url} to ${cacheDir}`)

const buf = await retry(async (attempt) => {
return await got(url).buffer()
const buf = await retry(async () => {
const signal = AbortSignal.timeout(DOWNLOAD_TIMEOUT_MS)

try {
const res = await fetch(url, {
signal
})

console.info(`${url} ${res.status} ${res.statusText}`)

if (!res.ok) {
throw new Error(`${res.status}: ${res.statusText}`)
}

const body = res.body

if (body == null) {
throw new Error('Response had no body')
}

return await toBuffer(browserReadableStreamToIt(body))
} catch (err) {
if (signal.aborted) {
console.error(`Download timed out after ${DOWNLOAD_TIMEOUT_MS}ms`)
}

throw err
}
}, {
retries,
onFailedAttempt: async (err) => {
Expand All @@ -82,39 +107,7 @@ async function cachingFetchAndVerify (url, cid, options = {}) {
}

console.info(`Verifying ${filename} from ${cachedFilePath}`)

const blockstore = new BlackHoleBlockstore()
const input = fs.createReadStream(cachedFilePath)
let result

if (cid.startsWith('bafy')) {
console.info('Recreating new-style CID')
// new-style w3storage CID
result = await last(importer([{
content: input
}], blockstore, {
cidVersion: 1,
rawLeaves: true,
chunker: fixedSize({ chunkSize: 1024 * 1024 }),
layout: balanced({ maxChildrenPerNode: 1024 })
}))
} else {
// old-style kubo CID
result = await last(importer([{
content: input
}], blockstore, {
cidVersion: 0,
rawLeaves: false,
chunker: fixedSize({ chunkSize: 262144 }),
layout: balanced({ maxChildrenPerNode: 174 })
}))
}

if (result == null) {
throw new Error('Import failed')
}

const receivedCid = result.cid
const receivedCid = await hashFile(cachedFilePath)
const downloadedCid = CID.parse(cid)

if (!uint8ArrayEquals(downloadedCid.multihash.bytes, receivedCid.multihash.bytes)) {
Expand Down Expand Up @@ -168,7 +161,7 @@ function cleanArguments (options = {}) {
cwd: process.env.INIT_CWD ?? process.cwd(),
defaults: {
version: options.version ?? latest,
distUrl: 'https://%s.ipfs.w3s.link'
distUrl: 'https://github.com/libp2p/go-libp2p-daemon/releases/download/%s/p2pd-%s-%s.tar.gz'
}
})

Expand Down Expand Up @@ -207,7 +200,7 @@ async function getDownloadURL (version, platform, arch, distUrl) {
}

return {
url: util.format(distUrl, CID.parse(cid).toV1().toString()),
url: util.format(distUrl, version, version, platform),
cid
}
}
Expand Down
33 changes: 33 additions & 0 deletions src/hash-file.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,33 @@
import fs from 'node:fs'
import { BlackHoleBlockstore } from 'blockstore-core/black-hole'
import { importer } from 'ipfs-unixfs-importer'
import { fixedSize } from 'ipfs-unixfs-importer/chunker'
import { balanced } from 'ipfs-unixfs-importer/layout'
import last from 'it-last'

/**
* @typedef {import('multiformats/cid').CID} CID
*/

/**
* @param {string} filePath
* @returns {Promise<CID>}
*/
export async function hashFile (filePath) {
const blockstore = new BlackHoleBlockstore()
const input = fs.createReadStream(filePath)
const result = await last(importer([{
content: input
}], blockstore, {
cidVersion: 1,
rawLeaves: true,
chunker: fixedSize({ chunkSize: 1024 * 1024 }),
layout: balanced({ maxChildrenPerNode: 1024 })
}))

if (result == null) {
throw new Error('Import failed')
}

return result.cid
}
Loading

0 comments on commit 3d6d629

Please sign in to comment.