describe('Unit | Utils | mergeChunks', function() { it('should exist', function() { expect(mergeChunks).to.be.a('function'); }); it('should work', function() { [null, []].forEach(chunks => { const buffer = mergeChunks(chunks); expect(Buffer.isBuffer(buffer)).to.be.true; expect(buffer.toString()).to.have.lengthOf(0); }); const str = mergeChunks(['T', 'e', 's', 't']); expect(Buffer.isBuffer(str)).to.be.false; expect(str).to.equal('Test'); const buffer = mergeChunks(['T', 'e', 's', 't'].map(c => Buffer.from(c))); expect(Buffer.isBuffer(buffer)).to.be.true; expect(buffer.toString()).to.equal('Test'); }); });
getChunksFromBody(body, headers, isBinary = false) { if (!body) { return []; } if (Buffer.isBuffer(body)) { return [body]; } // If content-encoding is set in the header then the body/content // is as an array of hex strings if (isContentEncoded(headers)) { const hexChunks = JSON.parse(body); return hexChunks.map(chunk => Buffer.from(chunk, 'hex')); } // The body can be one of two things: // 1. A hex string which then means its binary data. // 2. A utf8 string which means a regular string. return [Buffer.from(body, isBinary ? 'hex' : 'utf8')]; }
constructor(config) { super(); const options = { scopes: ['https://www.googleapis.com/auth/bigquery', 'https://www.googleapis.com/auth/drive'], projectId: process.env.CUBEJS_DB_BQ_PROJECT_ID, keyFilename: process.env.CUBEJS_DB_BQ_KEY_FILE, credentials: process.env.CUBEJS_DB_BQ_CREDENTIALS ? JSON.parse(Buffer.from(process.env.CUBEJS_DB_BQ_CREDENTIALS, 'base64').toString('utf8')) : undefined, ...config }; this.bigquery = new BigQuery(options); this.mapFieldsRecursive = this.mapFieldsRecursive.bind(this); this.tablesSchema = this.tablesSchema.bind(this); this.parseDataset = this.parseDataset.bind(this); this.parseTableData = this.parseTableData.bind(this); this.flatten = this.flatten.bind(this); this.toObjectFromId = this.toObjectFromId.bind(this); }
export function serialize(body) { if (supportsBuffer && body) { let buffer; if (Buffer.isBuffer(body)) { buffer = body; } else if (Array.isArray(body) && body.some(c => Buffer.isBuffer(c))) { // Body is a chunked array const chunks = body.map(c => Buffer.from(c)); buffer = Buffer.concat(chunks); } else if (`${body}` === '[object ArrayBuffer]') { buffer = Buffer.from(body); } else if (supportsArrayBuffer && ArrayBuffer.isView(body)) { buffer = Buffer.from(body.buffer, body.byteOffset, body.byteLength); } if (Buffer.isBuffer(buffer)) { return buffer.toString('hex'); } } return body; }
test('will succeed with jwt and buffer as secret ', async () => { const pgClient = { query: jest.fn(), release: jest.fn() }; const pgPool = { connect: jest.fn(() => pgClient) }; const bufferSecret = Buffer.from('secret', 'utf8'); await withPostGraphileContext( { pgPool, jwtToken: jwt.sign({ aud: 'postgraphile' }, bufferSecret, { noTimestamp: true, }), jwtSecret: bufferSecret, }, () => {}, ); expect(pgClient.query.mock.calls).toEqual([ ['begin'], [ { text: 'select set_config($1, $2, true)', values: ['jwt.claims.aud', 'postgraphile'], }, ], ['commit'], ]); });
getChunksFromBody(body, headers, isBinary = false) { if (!body) { return []; } if (Buffer.isBuffer(body)) { return [body]; } // If content-encoding is set in the header then the body/content // is as an array of hex strings if (isContentEncoded(headers)) { const hexChunks = JSON.parse(body); return hexChunks.map(chunk => Buffer.from(chunk, 'hex')); } // The body can be one of two things: // 1. A hex string which then means its binary data. // 2. A utf8 string which means a regular string. return [Buffer.from(body, isBinary ? 'hex' : 'utf8')]; }
describe('Unit | Utils | mergeChunks', function() { it('should exist', function() { expect(mergeChunks).to.be.a('function'); }); it('should work', function() { [null, []].forEach(chunks => { const buffer = mergeChunks(chunks); expect(Buffer.isBuffer(buffer)).to.be.true; expect(buffer.toString()).to.have.lengthOf(0); }); const str = mergeChunks(['T', 'e', 's', 't']); expect(Buffer.isBuffer(str)).to.be.false; expect(str).to.equal('Test'); const buffer = mergeChunks(['T', 'e', 's', 't'].map(c => Buffer.from(c))); expect(Buffer.isBuffer(buffer)).to.be.true; expect(buffer.toString()).to.equal('Test'); }); });
export function serialize(body) { if (supportsBuffer && body) { let buffer; if (Buffer.isBuffer(body)) { buffer = body; } else if (Array.isArray(body) && body.some(c => Buffer.isBuffer(c))) { // Body is a chunked array const chunks = body.map(c => Buffer.from(c)); buffer = Buffer.concat(chunks); } else if (`${body}` === '[object ArrayBuffer]') { buffer = Buffer.from(body); } else if (supportsArrayBuffer && ArrayBuffer.isView(body)) { buffer = Buffer.from(body.buffer, body.byteOffset, body.byteLength); } if (Buffer.isBuffer(buffer)) { return buffer.toString('hex'); } } return body; }