Skip to content

Commit

Permalink
Merge pull request #1008 from brave/verified_contents
Browse files Browse the repository at this point in the history
Added verified_contents.json generator.
  • Loading branch information
linhkikuchi authored Nov 29, 2024
2 parents cce6807 + 76c1285 commit 8a8c081
Show file tree
Hide file tree
Showing 3 changed files with 247 additions and 7 deletions.
190 changes: 190 additions & 0 deletions lib/contentSign.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,190 @@
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this file,
* You can obtain one at http://mozilla.org/MPL/2.0/. */

import fs from 'fs-extra'
import path from 'path'
import crypto from 'crypto'
import glob from 'glob'

const getComponentFiles = (dir, patterns) => {
const options = {
cwd: dir,
posix: true
}
let files = []

patterns.forEach((pattern) => {
const pfiles = glob.sync('./' + pattern, options)
files = files.concat(pfiles)
})
files = [
...new Set(
files
.map((f) => {
return path.join(dir, f)
})
.filter((f) => {
return !fs.statSync(f).isDirectory()
})
)
]
return files.sort()
}

const computeBlockHashes = (filePath) => {
const buffer = Buffer.alloc(4096)

const file = fs.openSync(filePath, 'r')
const hashes = []

while (true) {
const bytesRead = fs.readSync(file, buffer, 0, buffer.length)
if (bytesRead <= 0) {
break
}
const hash = crypto.createHash('sha256')
hash.update(buffer.subarray(0, bytesRead))
hashes.push(hash.digest())
}

if (!hashes) {
const hash = crypto.createHash('sha256')
hash.update('')
hashes.push(hash.digest())
}

return hashes
}

const computeRootHash = (file) => {
let blockHashes = computeBlockHashes(file)
if (!blockHashes) {
return ''
}

const branchFactor = 4096 / 32

while (blockHashes.length > 1) {
let i = 0
const parentNodes = []
while (i !== blockHashes.length) {
const hash = crypto.createHash('sha256')
for (let j = 0; j < branchFactor && i !== blockHashes.length; j++, i++) {
hash.update(blockHashes[i])
}
parentNodes.push(hash.digest())
}
blockHashes = parentNodes
}
return blockHashes[0]
}

const createPayload = (component, files) => {
const payload = {
content_hashes: [
{
block_size: 4096,
digest: 'sha256',
files: [],
format: 'treehash',
hash_block_size: 4096
}
],
item_id: component.id,
item_version: component.version,
protocol_version: 1
}

for (const file of files) {
const rootHash = computeRootHash(file)
payload.content_hashes[0].files.push({
path: file.replace(component.dir, ''),
root_hash: Buffer.from(rootHash).toString('base64Url')
})
}

return payload
}

const signPayload = (protectedBy, payload, privateKey) => {
const signer = crypto.createSign('RSA-SHA256')
signer.update(protectedBy)
signer.update('.')
signer.update(payload)

return signer.sign(privateKey, 'base64url')
}

const ensureTrailingSlash = (path) => {
if (path.charAt(path.length - 1) !== '/') {
path += '/'
}
return path
}

const createVerifiedContents = (
inputDir,
patterns,
id,
version,
privateKey
) => {
if (!privateKey) {
throw new Error('Missing private key')
}

privateKey = fs.readFileSync(privateKey, 'utf-8')
if (!privateKey) {
throw new Error('Cannot read private key')
}

inputDir = ensureTrailingSlash(inputDir)

const componentFiles = getComponentFiles(inputDir, patterns)
if (!componentFiles) {
throw new Error(
'No files matching the specified patterns were found in the component directory.'
)
}

const component = {
dir: inputDir,
id,
version
}

const payload = createPayload(component, componentFiles)

const protection = {
alg: 'RS256'
}

const encodedPayload = Buffer.from(JSON.stringify(payload)).toString(
'base64url'
)
const encodedProtection = Buffer.from(JSON.stringify(protection)).toString(
'base64url'
)

const result = {
description: 'treehash per file',
signed_content: {
payload: encodedPayload,
signatures: []
}
}

const signature = signPayload(encodedProtection, encodedPayload, privateKey)
result.signed_content.signatures.push({
protected: encodedProtection,
header: {
kid: 'webstore'
},
signature
})
return [result]
}
export default {
createVerifiedContents
}
34 changes: 33 additions & 1 deletion lib/util.js
Original file line number Diff line number Diff line change
Expand Up @@ -14,6 +14,7 @@ import { S3Client, GetObjectCommand, HeadObjectCommand, PutObjectCommand, PutObj
import replace from 'replace-in-file'
import { pipeline } from 'stream/promises'
import { tmpdir } from 'os'
import contentSign from './contentSign.js'

const DynamoDBTableName = 'Extensions'
const FirstVersion = '1.0.0'
Expand Down Expand Up @@ -63,6 +64,34 @@ const fetchTextFromURL = (listURL) => {
return p
}

/**
* Writes 'brave_metadata/verified_contents.josn`. All files in the component's dir that matches the specified patterns will be signed.
*
* @param inputDir the component's root directory.
* @param filePatterns the list of the glob file patterns.
* @param privateKey the RS-256 signing key.
*/
const generateVerifiedContents = (inputDir, filePatterns, privateKey) => {
const manifest = parseManifest(path.join(inputDir, 'manifest.json'))
const componentId = getIDFromBase64PublicKey(manifest.key)
const version = manifest.version

const verifiedContents = contentSign.createVerifiedContents(
inputDir,
filePatterns,
componentId,
version,
privateKey
)

const metadataDir = path.join(inputDir, 'brave_metadata')
fs.mkdirSync(metadataDir)
fs.writeFileSync(
path.join(metadataDir, 'verified_contents.json'),
JSON.stringify(verifiedContents)
)
}

const generateCRXFile = (binary, crxFile, privateKeyFile, publisherProofKey,
publisherProofKeyAlt, inputDir) => {
if (!binary) {
Expand Down Expand Up @@ -571,6 +600,8 @@ const addCommonScriptOptions = (command) => {
'Private key for generating publisher proof')
.option('-a, --publisher-proof-key-alt <file>',
'Second private key for generating publisher proof. Useful when rotating keys.')
.option('-vc, --verified-contents-key <file>',
'Private key for generating verified_contents.json.')

// If setup locally, use --endpoint http://localhost:8000
.option('-e, --endpoint <endpoint>', 'DynamoDB endpoint to connect to', '')
Expand Down Expand Up @@ -640,5 +671,6 @@ export default {
escapeStringForJSON,
copyManifestWithVersion,
stageDir,
stageFiles
stageFiles,
generateVerifiedContents
}
30 changes: 24 additions & 6 deletions scripts/packageAdBlock.js
Original file line number Diff line number Diff line change
Expand Up @@ -20,8 +20,16 @@ async function stageFiles (version, outputDir) {
util.copyManifestWithVersion(originalManifest, outputDir, version)
}

const generateVerifiedContents = (stagingDir, signingKey) => {
util.generateVerifiedContents(
stagingDir,
['resources.json', 'list.txt', 'list_catalog.json'],
signingKey
)
}

const postNextVersionWork = (componentSubdir, key, publisherProofKey,
publisherProofKeyAlt, binary, localRun, version, contentHash) => {
publisherProofKeyAlt, binary, localRun, version, contentHash, verifiedContentsKey) => {
const stagingDir = path.join('build', 'ad-block-updater', componentSubdir)
const crxOutputDir = path.join('build', 'ad-block-updater')
const crxFile = path.join(crxOutputDir, `ad-block-updater-${componentSubdir}.crx`)
Expand All @@ -32,6 +40,7 @@ const postNextVersionWork = (componentSubdir, key, publisherProofKey,
fs.unlinkSync(contentHashFile)
}
if (!localRun) {
generateVerifiedContents(stagingDir, verifiedContentsKey)
const privateKeyFile = path.join(key, `ad-block-updater-${componentSubdir}.pem`)
util.generateCRXFile(binary, crxFile, privateKeyFile, publisherProofKey,
publisherProofKeyAlt, stagingDir)
Expand All @@ -48,8 +57,16 @@ const getOriginalManifest = (componentSubdir) => {
return path.join(manifestsDir, componentSubdir, 'manifest.json')
}

const processComponent = (binary, endpoint, region, keyDir,
publisherProofKey, publisherProofKeyAlt, localRun, componentSubdir) => {
const processComponent = (
binary,
endpoint,
region,
keyDir,
publisherProofKey,
publisherProofKeyAlt,
localRun,
verifiedContentsKey,
componentSubdir) => {
const originalManifest = getOriginalManifest(componentSubdir)

// TODO - previous download failures should prevent the attempt to package the component.
Expand Down Expand Up @@ -80,14 +97,14 @@ const processComponent = (binary, endpoint, region, keyDir,
util.getNextVersion(endpoint, region, id, contentHash).then((version) => {
if (version !== undefined) {
postNextVersionWork(componentSubdir, keyDir, publisherProofKey,
publisherProofKeyAlt, binary, localRun, version, contentHash)
publisherProofKeyAlt, binary, localRun, version, contentHash, verifiedContentsKey)
} else {
console.log('content for ' + id + ' was not updated, skipping!')
}
})
} else {
postNextVersionWork(componentSubdir, undefined, publisherProofKey,
publisherProofKeyAlt, binary, localRun, '1.0.0', contentHash)
publisherProofKeyAlt, binary, localRun, '1.0.0', contentHash, verifiedContentsKey)
}
}

Expand All @@ -109,7 +126,8 @@ const processJob = async (commander, keyDir) => {
commander.region, keyDir,
commander.publisherProofKey,
commander.publisherProofKeyAlt,
commander.localRun))
commander.localRun,
commander.verifiedContentsKey))
}

util.installErrorHandlers()
Expand Down

0 comments on commit 8a8c081

Please sign in to comment.