@Override public int write(ByteBuffer src) throws IOException { synchronized (this) { checkOpen(); int amt = channel.write(src); if (amt > 0) { position += amt; size += amt; } return amt; } }
@Test public void testIsOpen() throws IOException { when(gcsChannel.isOpen()).thenReturn(true).thenReturn(false); assertThat(chan.isOpen()).isTrue(); chan.close(); assertThat(chan.isOpen()).isFalse(); verify(gcsChannel, times(2)).isOpen(); verify(gcsChannel).close(); verifyNoMoreInteractions(gcsChannel); }
@Test public void testReadAndWriteCaptureChannels() throws IOException { String blobName = "test-read-and-write-capture-channels-blob"; BlobInfo blob = BlobInfo.newBuilder(BUCKET, blobName).build(); byte[] stringBytes; WriteChannel writer = storage.writer(blob); stringBytes = BLOB_STRING_CONTENT.getBytes(UTF_8); writer.write(ByteBuffer.wrap(BLOB_BYTE_CONTENT)); RestorableState<WriteChannel> writerState = writer.capture(); WriteChannel secondWriter = writerState.restore(); secondWriter.write(ByteBuffer.wrap(stringBytes)); secondWriter.close(); ByteBuffer readBytes; ByteBuffer readStringBytes; ReadChannel reader = storage.reader(blob.getBlobId()); reader.setChunkSize(BLOB_BYTE_CONTENT.length); readBytes = ByteBuffer.allocate(BLOB_BYTE_CONTENT.length); reader.read(readBytes); RestorableState<ReadChannel> readerState = reader.capture(); ReadChannel secondReader = readerState.restore(); readStringBytes = ByteBuffer.allocate(stringBytes.length); secondReader.read(readStringBytes); reader.close(); secondReader.close(); assertArrayEquals(BLOB_BYTE_CONTENT, readBytes.array()); assertEquals(BLOB_STRING_CONTENT, new String(readStringBytes.array(), UTF_8)); assertTrue(storage.delete(BUCKET, blobName)); }
@Test public void testWrite() throws IOException { ByteBuffer buffer = ByteBuffer.allocate(1); buffer.put((byte) 'B'); assertThat(chan.position()).isEqualTo(0L); assertThat(chan.size()).isEqualTo(0L); when(gcsChannel.write(eq(buffer))).thenReturn(1); assertThat(chan.write(buffer)).isEqualTo(1); assertThat(chan.position()).isEqualTo(1L); assertThat(chan.size()).isEqualTo(1L); verify(gcsChannel).write(any(ByteBuffer.class)); verify(gcsChannel, times(5)).isOpen(); verifyNoMoreInteractions(gcsChannel); }
@Override public boolean isOpen() { synchronized (this) { return channel.isOpen(); } }
@Override public void close() throws IOException { synchronized (this) { channel.close(); } }
@Test public void testStateEquals() { expect(storageRpcMock.open(BLOB_INFO.toPb(), EMPTY_RPC_OPTIONS)).andReturn(UPLOAD_ID).times(2); replay(storageRpcMock); writer = new BlobWriteChannel(options, BLOB_INFO, EMPTY_RPC_OPTIONS); // avoid closing when you don't want partial writes to GCS upon failure @SuppressWarnings("resource") WriteChannel writer2 = new BlobWriteChannel(options, BLOB_INFO, EMPTY_RPC_OPTIONS); RestorableState<WriteChannel> state = writer.capture(); RestorableState<WriteChannel> state2 = writer2.capture(); assertEquals(state, state2); assertEquals(state.hashCode(), state2.hashCode()); assertEquals(state.toString(), state2.toString()); }
private void checkOpen() throws ClosedChannelException { if (!channel.isOpen()) { throw new ClosedChannelException(); } } }
@Override void run(BigQuery bigquery, Tuple<WriteChannelConfiguration, String> configuration) throws Exception { System.out.println("Running insert"); try (FileChannel fileChannel = FileChannel.open(Paths.get(configuration.y()))) { WriteChannel writeChannel = bigquery.writer(configuration.x()); long position = 0; long written = fileChannel.transferTo(position, CHUNK_SIZE, writeChannel); while (written > 0) { position += written; written = fileChannel.transferTo(position, CHUNK_SIZE, writeChannel); } writeChannel.close(); } }
@Test public void testStateEquals() { expect( bigqueryRpcMock.open( new com.google.api.services.bigquery.model.Job() .setJobReference(JOB_INFO.getJobId().toPb()) .setConfiguration(LOAD_CONFIGURATION.toPb()))) .andReturn(UPLOAD_ID) .times(2); replay(bigqueryRpcMock); writer = new TableDataWriteChannel(options, JOB_INFO.getJobId(), LOAD_CONFIGURATION); // avoid closing when you don't want partial writes upon failure @SuppressWarnings("resource") WriteChannel writer2 = new TableDataWriteChannel(options, JOB_INFO.getJobId(), LOAD_CONFIGURATION); RestorableState<WriteChannel> state = writer.capture(); RestorableState<WriteChannel> state2 = writer2.capture(); assertEquals(state, state2); assertEquals(state.hashCode(), state2.hashCode()); assertEquals(state.toString(), state2.toString()); }
/** Example of writing the blob's content through a writer. */ // [TARGET writer(BlobWriteOption...)] public void writer() throws IOException { // [START writer] byte[] content = "Hello, World!".getBytes(UTF_8); try (WriteChannel writer = blob.writer()) { try { writer.write(ByteBuffer.wrap(content, 0, content.length)); } catch (Exception ex) { // handle exception } } // [END writer] }
@Before public void before() { when(gcsChannel.isOpen()).thenReturn(true); }
@Override public void close() throws IOException { writer.close(); } };
@Test public void testSaveAndRestoreClosed() throws IOException { expect(storageRpcMock.open(BLOB_INFO.toPb(), EMPTY_RPC_OPTIONS)).andReturn(UPLOAD_ID); Capture<byte[]> capturedBuffer = Capture.newInstance(); storageRpcMock.write(eq(UPLOAD_ID), capture(capturedBuffer), eq(0), eq(0L), eq(0), eq(true)); replay(storageRpcMock); writer = new BlobWriteChannel(options, BLOB_INFO, EMPTY_RPC_OPTIONS); writer.close(); RestorableState<WriteChannel> writerState = writer.capture(); RestorableState<WriteChannel> expectedWriterState = BlobWriteChannel.StateImpl.builder(options, BLOB_INFO, UPLOAD_ID) .setBuffer(null) .setChunkSize(DEFAULT_CHUNK_SIZE) .setIsOpen(false) .setPosition(0) .build(); WriteChannel restoredWriter = writerState.restore(); assertArrayEquals(new byte[0], capturedBuffer.getValue()); assertEquals(expectedWriterState, restoredWriter.capture()); }
@Override public void run() { try { if (directUpload) { byte[] content = Files.readAllBytes(localFile.toPath()); Blob result = mClient.create(sourceBlob, content); LOG.debug("Upload file {} to gs://{}/{}", localFile, gsBucket, gsKey); LOG.trace("Upload file {}, Blob: {}", result); } else { long startTime = System.nanoTime(); try (WriteChannel out = mClient.writer(sourceBlob); FileChannel in = new FileInputStream(localFile).getChannel(); ) { ByteBuffer buffer = ByteBuffer.allocateDirect(1024 * 1024 * 5); // 5 MiB buffer (remember this is pr. thread) int bytesRead; while ((bytesRead = in.read(buffer)) > 0) { buffer.flip(); out.write(buffer); buffer.clear(); } } long elapsedTime = System.nanoTime() - startTime; LOG.debug("Upload file {} to gs://{}/{} in {} msec", localFile, gsBucket, gsKey, (elapsedTime / 1000000.0)); } } catch (IOException e) { throw new RuntimeException(e); } } });
@Test public void testWrite_whenClosed_throwsCce() throws IOException { when(gcsChannel.isOpen()).thenReturn(false); thrown.expect(ClosedChannelException.class); chan.write(ByteBuffer.allocate(1)); }
@Override public void close() throws IOException { synchronized (this) { channel.close(); } }
@Test public void testSaveAndRestoreClosed() throws IOException { expect( bigqueryRpcMock.open( new com.google.api.services.bigquery.model.Job() .setJobReference(JOB_INFO.getJobId().toPb()) .setConfiguration(LOAD_CONFIGURATION.toPb()))) .andReturn(UPLOAD_ID); Capture<byte[]> capturedBuffer = Capture.newInstance(); expect( bigqueryRpcMock.write( eq(UPLOAD_ID), capture(capturedBuffer), eq(0), eq(0L), eq(0), eq(true))) .andReturn(job.toPb()); replay(bigqueryRpcMock); writer = new TableDataWriteChannel(options, JOB_INFO.getJobId(), LOAD_CONFIGURATION); writer.close(); assertEquals(job, writer.getJob()); RestorableState<WriteChannel> writerState = writer.capture(); RestorableState<WriteChannel> expectedWriterState = TableDataWriteChannel.StateImpl.builder(options, LOAD_CONFIGURATION, UPLOAD_ID, job) .setBuffer(null) .setChunkSize(DEFAULT_CHUNK_SIZE) .setIsOpen(false) .setPosition(0) .build(); WriteChannel restoredWriter = writerState.restore(); assertArrayEquals(new byte[0], capturedBuffer.getValue()); assertEquals(expectedWriterState, restoredWriter.capture()); }
/** Example of writing a blob's content through a writer. */ // [TARGET writer(BlobInfo, BlobWriteOption...)] // [VARIABLE "my_unique_bucket"] // [VARIABLE "my_blob_name"] public void writer(String bucketName, String blobName) throws IOException { // [START writer] BlobId blobId = BlobId.of(bucketName, blobName); byte[] content = "Hello, World!".getBytes(UTF_8); BlobInfo blobInfo = BlobInfo.newBuilder(blobId).setContentType("text/plain").build(); try (WriteChannel writer = storage.writer(blobInfo)) { try { writer.write(ByteBuffer.wrap(content, 0, content.length)); } catch (Exception ex) { // handle exception } } // [END writer] }