public void widgetDisposed( DisposeEvent event ) { if ( jobGraph.job != null ) { KettleLogStore.discardLines( jobGraph.job.getLogChannelId(), true ); } } } );
@VisibleForTesting void discardLogLines( JobExecutorData data ) { // Keep the strain on the logging back-end conservative. // TODO: make this optional/user-defined later if ( data.executorJob != null ) { KettleLogStore.discardLines( data.executorJob.getLogChannelId(), false ); LoggingRegistry.getInstance().removeIncludingChildren( data.executorJob.getLogChannelId() ); } }
@GET @Path( "/remove/{id : .+}" ) public Response removeJob( @PathParam( "id" ) String id ) { Job job = CarteResource.getJob( id ); CarteObjectEntry entry = CarteResource.getCarteObjectEntry( id ); KettleLogStore.discardLines( job.getLogChannelId(), true ); CarteSingleton.getInstance().getJobMap().removeJob( entry ); return Response.ok().build(); }
/** * Gets the logging hierarchy. * * @return the logging hierarchy */ public List<LoggingHierarchy> getLoggingHierarchy() { List<LoggingHierarchy> hierarchy = new ArrayList<LoggingHierarchy>(); List<String> childIds = LoggingRegistry.getInstance().getLogChannelChildren( getLogChannelId() ); for ( String childId : childIds ) { LoggingObjectInterface loggingObject = LoggingRegistry.getInstance().getLoggingObject( childId ); if ( loggingObject != null ) { hierarchy.add( new LoggingHierarchy( getLogChannelId(), batchId, loggingObject ) ); } } return hierarchy; }
/** * This test demonstrates the issue fixed in PDI-17398. * When a job is scheduled twice, it gets the same log channel Id and both logs get merged */ @Test public void testTwoJobsGetSameLogChannelId() { Repository repository = mock( Repository.class ); JobMeta meta = mock( JobMeta.class ); Job job1 = new Job( repository, meta ); Job job2 = new Job( repository, meta ); assertEquals( job1.getLogChannelId(), job2.getLogChannelId() ); }
private void emergencyWriteJobTracker( Result res ) { JobEntryResult jerFinalResult = new JobEntryResult( res, this.getLogChannelId(), BaseMessages.getString( PKG, "Job.Comment.JobFinished" ), null, null, 0, null ); JobTracker finalTrack = new JobTracker( this.getJobMeta(), jerFinalResult ); // jobTracker is up to date too. this.jobTracker.addJobTracker( finalTrack ); }
/** * This test demonstrates the fix for PDI-17398. * Two schedules -> two Carte object Ids -> two log channel Ids */ @Test public void testTwoJobsGetDifferentLogChannelIdWithDifferentCarteId() { Repository repository = mock( Repository.class ); JobMeta meta1 = mock( JobMeta.class ); JobMeta meta2 = mock( JobMeta.class ); String carteId1 = UUID.randomUUID().toString(); String carteId2 = UUID.randomUUID().toString(); doReturn( carteId1 ).when( meta1 ).getContainerObjectId(); doReturn( carteId2 ).when( meta2 ).getContainerObjectId(); Job job1 = new Job( repository, meta1 ); Job job2 = new Job( repository, meta2 ); assertNotEquals( job1.getContainerObjectId(), job2.getContainerObjectId() ); assertNotEquals( job1.getLogChannelId(), job2.getLogChannelId() ); }
@Test @PrepareForTest( { Encode.class } ) public void testRemoveJobServletEscapesHtmlWhenTransFound() throws ServletException, IOException { KettleLogStore.init(); HttpServletRequest mockHttpServletRequest = mock( HttpServletRequest.class ); HttpServletResponse mockHttpServletResponse = mock( HttpServletResponse.class ); Job mockJob = mock( Job.class ); JobMeta mockJobMeta = mock( JobMeta.class ); LogChannelInterface mockLogChannelInterface = mock( LogChannelInterface.class ); mockJob.setName( ServletTestUtils.BAD_STRING_TO_TEST ); StringWriter out = new StringWriter(); PrintWriter printWriter = new PrintWriter( out ); PowerMockito.spy( Encode.class ); when( mockHttpServletRequest.getContextPath() ).thenReturn( RemoveJobServlet.CONTEXT_PATH ); when( mockHttpServletRequest.getParameter( anyString() ) ).thenReturn( ServletTestUtils.BAD_STRING_TO_TEST ); when( mockHttpServletResponse.getWriter() ).thenReturn( printWriter ); when( mockJobMap.getJob( any( CarteObjectEntry.class ) ) ).thenReturn( mockJob ); when( mockJob.getLogChannelId() ).thenReturn( ServletTestUtils.BAD_STRING_TO_TEST ); when( mockJob.getLogChannel() ).thenReturn( mockLogChannelInterface ); when( mockJob.getJobMeta() ).thenReturn( mockJobMeta ); when( mockJobMeta.getMaximum() ).thenReturn( new Point( 10, 10 ) ); removeJobServlet.doGet( mockHttpServletRequest, mockHttpServletResponse ); assertFalse( ServletTestUtils.hasBadText( ServletTestUtils.getInsideOfTag( "H3", out.toString() ) ) ); PowerMockito.verifyStatic( atLeastOnce() ); Encode.forHtml( anyString() ); } }
cache.remove( job.getLogChannelId() ); KettleLogStore.discardLines( job.getLogChannelId(), true ); getJobMap().removeJob( entry );
@Test @PrepareForTest( { Encode.class } ) public void testStopJobServletEscapesHtmlWhenTransFound() throws ServletException, IOException { KettleLogStore.init(); HttpServletRequest mockHttpServletRequest = mock( HttpServletRequest.class ); HttpServletResponse mockHttpServletResponse = mock( HttpServletResponse.class ); Job mockJob = mock( Job.class ); JobMeta mockJobMeta = mock( JobMeta.class ); LogChannelInterface mockLogChannelInterface = mock( LogChannelInterface.class ); mockJob.setName( ServletTestUtils.BAD_STRING_TO_TEST ); StringWriter out = new StringWriter(); PrintWriter printWriter = new PrintWriter( out ); PowerMockito.spy( Encode.class ); when( mockHttpServletRequest.getContextPath() ).thenReturn( StopJobServlet.CONTEXT_PATH ); when( mockHttpServletRequest.getParameter( anyString() ) ).thenReturn( ServletTestUtils.BAD_STRING_TO_TEST ); when( mockHttpServletResponse.getWriter() ).thenReturn( printWriter ); when( mockJobMap.getJob( any( CarteObjectEntry.class ) ) ).thenReturn( mockJob ); when( mockJob.getLogChannelId() ).thenReturn( ServletTestUtils.BAD_STRING_TO_TEST ); when( mockJob.getLogChannel() ).thenReturn( mockLogChannelInterface ); when( mockJob.getJobMeta() ).thenReturn( mockJobMeta ); when( mockJobMeta.getMaximum() ).thenReturn( new Point( 10, 10 ) ); stopJobServlet.doGet( mockHttpServletRequest, mockHttpServletResponse ); assertFalse( ServletTestUtils.hasBadText( ServletTestUtils.getInsideOfTag( "H1", out.toString() ) ) ); PowerMockito.verifyStatic( atLeastOnce() ); Encode.forHtml( anyString() ); } }
@Test @PrepareForTest( { Encode.class } ) public void testStartJobServletEscapesHtmlWhenTransFound() throws ServletException, IOException { KettleLogStore.init(); HttpServletRequest mockHttpServletRequest = mock( HttpServletRequest.class ); HttpServletResponse mockHttpServletResponse = mock( HttpServletResponse.class ); Job mockJob = mock( Job.class ); JobMeta mockJobMeta = mock( JobMeta.class ); LogChannelInterface mockLogChannelInterface = mock( LogChannelInterface.class ); mockJob.setName( ServletTestUtils.BAD_STRING_TO_TEST ); StringWriter out = new StringWriter(); PrintWriter printWriter = new PrintWriter( out ); PowerMockito.spy( Encode.class ); when( mockHttpServletRequest.getContextPath() ).thenReturn( StartJobServlet.CONTEXT_PATH ); when( mockHttpServletRequest.getParameter( anyString() ) ).thenReturn( ServletTestUtils.BAD_STRING_TO_TEST ); when( mockHttpServletResponse.getWriter() ).thenReturn( printWriter ); when( mockJobMap.getJob( any( CarteObjectEntry.class ) ) ).thenReturn( mockJob ); when( mockJob.getLogChannelId() ).thenReturn( ServletTestUtils.BAD_STRING_TO_TEST ); when( mockJob.getLogChannel() ).thenReturn( mockLogChannelInterface ); when( mockJob.getJobMeta() ).thenReturn( mockJobMeta ); when( mockJobMeta.getMaximum() ).thenReturn( new Point( 10, 10 ) ); startJobServlet.doGet( mockHttpServletRequest, mockHttpServletResponse ); assertFalse( ServletTestUtils.hasBadText( ServletTestUtils.getInsideOfTag( "H1", out.toString() ) ) ); PowerMockito.verifyStatic( atLeastOnce() ); Encode.forHtml( anyString() ); } }
PowerMockito.when( mockJob.getJobMeta() ).thenReturn( mockJobMeta ); PowerMockito.when( mockJob.isFinished() ).thenReturn( true ); PowerMockito.when( mockJob.getLogChannelId() ).thenReturn( logId ); PowerMockito.when( mockJobMeta.getMaximum() ).thenReturn( new Point( 10, 10 ) ); when( mockJob.getStatus() ).thenReturn( "Finished" );
cache.remove( job.getLogChannelId() ); KettleLogStore.discardLines( job.getLogChannelId(), true );
String logging = KettleLogStore.getAppender().getBuffer( job.getLogChannelId(), false ).toString(); throw new KettleException( "Error executing job: " + logging, executionException );
private void refreshImage( GC canvasGc ) { List<MetricsDuration> durations = MetricsUtil.getAllDurations( jobGraph.job.getLogChannelId() ); if ( Utils.isEmpty( durations ) ) {
String id = jobMap.getJob( entry ).getLogChannelId(); LoggingRegistry.getInstance().removeLogChannelFileWriterBuffer( id );
KettleLogStore.discardLines( job.getLogChannelId(), true );
break; case CHANNEL_ID: value = job.getLogChannelId(); break; case JOBNAME: break; case LOG_FIELD: value = getLogBuffer( job, job.getLogChannelId(), status, logSizeLimit ); break; case EXECUTING_SERVER:
String channelId = data.executorJob.getLogChannelId(); String logText = KettleLogStore.getAppender().getBuffer( channelId, false ).toString(); outputRow[idx++] = logText; outputRow[idx++] = data.executorJob.getLogChannelId();
KettleLogStore.discardLines( job.getLogChannelId(), true );