public void loadPersistentSessions() { // Rather use separate transactions for update and loading KeycloakModelUtils.runJobInTransaction(sessionFactory, new KeycloakSessionTask() { @Override public void run(KeycloakSession session) { sessionLoader.init(session); } }); KeycloakModelUtils.runJobInTransaction(sessionFactory, new KeycloakSessionTask() { @Override public void run(KeycloakSession session) { int count = sessionLoader.getSessionsCount(session); for (int i=0 ; i<count ; i+=sessionsPerSegment) { sessionLoader.loadSessions(session, i, sessionsPerSegment); } } }); } }
@Override public void loadPersistentSessions(final KeycloakSessionFactory sessionFactory, final int maxErrors, final int sessionsPerSegment) { log.debug("Start pre-loading userSessions from persistent storage"); KeycloakModelUtils.runJobInTransaction(sessionFactory, new KeycloakSessionTask() { @Override public void run(KeycloakSession session) { InfinispanConnectionProvider connections = session.getProvider(InfinispanConnectionProvider.class); Cache<String, Serializable> workCache = connections.getCache(InfinispanConnectionProvider.WORK_CACHE_NAME); InfinispanCacheInitializer ispnInitializer = new InfinispanCacheInitializer(sessionFactory, workCache, new OfflinePersistentUserSessionLoader(sessionsPerSegment), "offlineUserSessions", sessionsPerSegment, maxErrors); // DB-lock to ensure that persistent sessions are loaded from DB just on one DC. The other DCs will load them from remote cache. CacheInitializer initializer = new DBLockBasedCacheInitializer(session, ispnInitializer); initializer.initCache(); initializer.loadSessions(); // Initialize persister for periodically doing bulk DB updates of lastSessionRefresh timestamps of refreshed sessions persisterLastSessionRefreshStore = new PersisterLastSessionRefreshStoreFactory().createAndInit(session, true); } }); log.debug("Pre-loading userSessions from persistent storage finished"); }
@Override public void exportRealm(KeycloakSessionFactory factory, final String realmName) throws IOException { logger.infof("Exporting realm '%s' into file %s", realmName, this.file.getAbsolutePath()); KeycloakModelUtils.runJobInTransaction(factory, new ExportImportSessionTask() { @Override protected void runExportImportTask(KeycloakSession session) throws IOException { RealmModel realm = session.realms().getRealmByName(realmName); RealmRepresentation realmRep = ExportUtils.exportRealm(session, realm, true); writeToFile(realmRep); } }); }
@Override public void onEvent(ProviderEvent event) { if (event instanceof PostMigrationEvent) { KeycloakModelUtils.runJobInTransaction(factory, (KeycloakSession session) -> { registerClusterListeners(session); }); } } });
@Override public void exportModel(KeycloakSessionFactory factory) throws IOException { logger.infof("Exporting model into file %s", this.file.getAbsolutePath()); KeycloakModelUtils.runJobInTransaction(factory, new ExportImportSessionTask() { @Override protected void runExportImportTask(KeycloakSession session) throws IOException { List<RealmModel> realms = session.realms().getRealms(); List<RealmRepresentation> reps = new ArrayList<RealmRepresentation>(); for (RealmModel realm : realms) { reps.add(ExportUtils.exportRealm(session, realm, true)); } writeToFile(reps); } }); }
@Override public void importRealm(KeycloakSessionFactory factory, final String realmName, final Strategy strategy) throws IOException { File realmFile = new File(this.rootDirectory + File.separator + realmName + "-realm.json"); File[] userFiles = this.rootDirectory.listFiles(new FilenameFilter() { @Override public boolean accept(File dir, String name) { return name.matches(realmName + "-users-[0-9]+\\.json"); } }); // Import realm first FileInputStream is = new FileInputStream(realmFile); final RealmRepresentation realmRep = JsonSerialization.readValue(is, RealmRepresentation.class); KeycloakModelUtils.runJobInTransaction(factory, new ExportImportSessionTask() { @Override public void runExportImportTask(KeycloakSession session) throws IOException { ImportUtils.importRealm(session, realmRep, strategy); } }); // Import users for (File userFile : userFiles) { final FileInputStream fis = new FileInputStream(userFile); KeycloakModelUtils.runJobInTransaction(factory, new ExportImportSessionTask() { @Override protected void runExportImportTask(KeycloakSession session) throws IOException { ImportUtils.importUsersFromStream(session, realmName, JsonSerialization.mapper, fis); } }); } }
@Override public void importModel(KeycloakSessionFactory factory, final Strategy strategy) throws IOException { logger.infof("Full importing from file %s", this.file.getAbsolutePath()); checkRealmReps(); KeycloakModelUtils.runJobInTransaction(factory, new ExportImportSessionTask() { @Override protected void runExportImportTask(KeycloakSession session) throws IOException { ImportUtils.importRealms(session, realmReps.values(), strategy); } }); }
@Override public void postInit(KeycloakSessionFactory factory) { LOG.debug("BeerResourceProviderFactory::postInit"); /* Depending on how we are deployed, we need to access data model differently. When cold deployed (i.e. provider is either present at "deployments" subdirectory or deployed as a JBoss module), postInit is invoked too early, specifically before initial realm population/migration. In this case we should wait for PostMigrationEvent first. When hot deployed, PostMigrationEvent won't arrive, so we can do stuff right away. NB: hot deployment is NOT yet supported for EntityProviders! Thus, hot deploying BeerCloak will result in exceptions and non-working code. This code is here only to demonstrate correct postInit implementation for all deployment modes. See https://issues.jboss.org/browse/KEYCLOAK-5782 for more. */ if (isHotDeploying()) { LOG.debug("Hot (re)deploy, using current thread"); KeycloakModelUtils.runJobInTransaction(factory, this::initRoles); } else { LOG.debug("Server startup, waiting for PostMigrationEvent"); } factory.register((ProviderEvent event) -> { if (event instanceof RealmModel.RealmPostCreateEvent) realmPostCreate((RealmModel.RealmPostCreateEvent) event); else if (event instanceof PostMigrationEvent) KeycloakModelUtils.runJobInTransaction(factory, this::initRoles); }); }
private void loadSessionsFromRemoteCache(final KeycloakSessionFactory sessionFactory, String cacheName, final int sessionsPerSegment, final int maxErrors) { log.debugf("Check pre-loading sessions from remote cache '%s'", cacheName); KeycloakModelUtils.runJobInTransaction(sessionFactory, new KeycloakSessionTask() { @Override public void run(KeycloakSession session) { InfinispanConnectionProvider connections = session.getProvider(InfinispanConnectionProvider.class); Cache<String, Serializable> workCache = connections.getCache(InfinispanConnectionProvider.WORK_CACHE_NAME); InfinispanCacheInitializer initializer = new InfinispanCacheInitializer(sessionFactory, workCache, new RemoteCacheSessionsLoader(cacheName, sessionsPerSegment), "remoteCacheLoad::" + cacheName, sessionsPerSegment, maxErrors); initializer.initCache(); initializer.loadSessions(); } }); log.debugf("Pre-loading sessions from remote cache '%s' finished", cacheName); }
private InitializerState getOrCreateInitializerState() { InitializerState state = (InitializerState) cache.get(stateKey); if (state == null) { final int[] count = new int[1]; // Rather use separate transactions for update and counting KeycloakModelUtils.runJobInTransaction(sessionFactory, new KeycloakSessionTask() { @Override public void run(KeycloakSession session) { sessionLoader.init(session); } }); KeycloakModelUtils.runJobInTransaction(sessionFactory, new KeycloakSessionTask() { @Override public void run(KeycloakSession session) { count[0] = sessionLoader.getSessionsCount(session); } }); state = new InitializerState(); state.init(count[0], sessionsPerSegment); saveStateToCache(state); } return state; }
@Override public void loadPersistentSessions(final KeycloakSessionFactory sessionFactory, final int maxErrors, final int sessionsPerSegment) { log.debug("Start pre-loading userSessions and clientSessions from persistent storage"); if (compatMode) { SimpleUserSessionInitializer initializer = new SimpleUserSessionInitializer(sessionFactory, new OfflineUserSessionLoader(), sessionsPerSegment); initializer.loadPersistentSessions(); } else { KeycloakModelUtils.runJobInTransaction(sessionFactory, new KeycloakSessionTask() { @Override public void run(KeycloakSession session) { InfinispanConnectionProvider connections = session.getProvider(InfinispanConnectionProvider.class); Cache<String, SessionEntity> cache = connections.getCache(InfinispanConnectionProvider.OFFLINE_SESSION_CACHE_NAME); InfinispanUserSessionInitializer initializer = new InfinispanUserSessionInitializer(sessionFactory, cache, new OfflineUserSessionLoader(), maxErrors, sessionsPerSegment, "offlineUserSessions"); initializer.initCache(); initializer.loadPersistentSessions(); } }); } log.debug("Pre-loading userSessions and clientSessions from persistent storage finished"); }
@Override public void postInit(final KeycloakSessionFactory factory) { KeycloakModelUtils.runJobInTransaction(factory, new KeycloakSessionTask() { @Override public void run(KeycloakSession session) { compatMode = isCompatMode(session); if (compatMode) { compatProviderFactory = new MemUserSessionProviderFactory(); } } }); // Max count of worker errors. Initialization will end with exception when this number is reached final int maxErrors = config.getInt("maxErrors", 20); // Count of sessions to be computed in each segment final int sessionsPerSegment = config.getInt("sessionsPerSegment", 100); factory.register(new ProviderEventListener() { @Override public void onEvent(ProviderEvent event) { if (event instanceof PostMigrationEvent) { loadPersistentSessions(factory, maxErrors, sessionsPerSegment); } } }); }
@Override public SessionLoader.WorkerResult call() throws Exception { if (log.isTraceEnabled()) { log.tracef("Running computation for segment: %s", workerCtx.toString()); } KeycloakSessionFactory sessionFactory = workCache.getAdvancedCache().getComponentRegistry().getComponent(KeycloakSessionFactory.class); if (sessionFactory == null) { log.debugf("KeycloakSessionFactory not yet set in cache. Worker skipped"); return sessionLoader.createFailedWorkerResult(loaderCtx, workerCtx); } SessionLoader.WorkerResult[] ref = new SessionLoader.WorkerResult[1]; KeycloakModelUtils.runJobInTransaction(sessionFactory, new KeycloakSessionTask() { @Override public void run(KeycloakSession session) { ref[0] = sessionLoader.loadSessions(session, loaderCtx, workerCtx); } }); return ref[0]; }
@Override public void exportModel(KeycloakSessionFactory factory) throws IOException { final RealmsHolder holder = new RealmsHolder(); KeycloakModelUtils.runJobInTransaction(factory, new KeycloakSessionTask() { @Override public void run(KeycloakSession session) { List<RealmModel> realms = session.realms().getRealms(); holder.realms = realms; } }); for (RealmModel realm : holder.realms) { exportRealmImpl(factory, realm.getName()); } writeVersion("version.json", VersionRepresentation.SINGLETON); }
private void update(KeycloakSession session) { MigrationStrategy strategy = getMigrationStrategy(); MongoUpdaterProvider mongoUpdater = session.getProvider(MongoUpdaterProvider.class); if (mongoUpdater == null) { throw new RuntimeException("Can't update database: Mongo updater provider not found"); } DBLockProvider dbLock = new DBLockManager(session).getDBLock(); if (dbLock.hasLock()) { updateOrValidateDB(strategy, session, mongoUpdater); } else { logger.trace("Don't have DBLock retrieved before upgrade. Needs to acquire lock first in separate transaction"); KeycloakModelUtils.runJobInTransaction(session.getKeycloakSessionFactory(), new KeycloakSessionTask() { @Override public void run(KeycloakSession lockSession) { DBLockManager dbLockManager = new DBLockManager(lockSession); DBLockProvider dbLock2 = dbLockManager.getDBLock(); dbLock2.waitForLock(); try { updateOrValidateDB(strategy, session, mongoUpdater); } finally { dbLock2.releaseLock(); } } }); } }
@Override public InfinispanUserSessionInitializer.WorkerResult call() throws Exception { if (log.isTraceEnabled()) { log.tracef("Running computation for segment: %d", segment); } KeycloakSessionFactory sessionFactory = cache.getAdvancedCache().getComponentRegistry().getComponent(KeycloakSessionFactory.class); if (sessionFactory == null) { log.warnf("KeycloakSessionFactory not yet set in cache. Worker skipped"); return InfinispanUserSessionInitializer.WorkerResult.create(segment, false); } final int first = segment * sessionsPerSegment; final int max = sessionsPerSegment; KeycloakModelUtils.runJobInTransaction(sessionFactory, new KeycloakSessionTask() { @Override public void run(KeycloakSession session) { sessionLoader.loadSessions(session, first, max); } }); return InfinispanUserSessionInitializer.WorkerResult.create(segment, true); }
protected void export(Connection connection, String schema, File databaseUpdateFile, KeycloakSession session, JpaUpdaterProvider updater) { DBLockProvider dbLock = new DBLockManager(session).getDBLock(); if (dbLock.hasLock()) { updater.export(connection, schema, databaseUpdateFile); } else { KeycloakModelUtils.runJobInTransaction(session.getKeycloakSessionFactory(), new KeycloakSessionTask() { @Override public void run(KeycloakSession lockSession) { DBLockManager dbLockManager = new DBLockManager(lockSession); DBLockProvider dbLock2 = dbLockManager.getDBLock(); dbLock2.waitForLock(); try { updater.export(connection, schema, databaseUpdateFile); } finally { dbLock2.releaseLock(); } } }); } }
protected void update(Connection connection, String schema, KeycloakSession session, JpaUpdaterProvider updater) { DBLockProvider dbLock = new DBLockManager(session).getDBLock(); if (dbLock.hasLock()) { updater.update(connection, schema); } else { KeycloakModelUtils.runJobInTransaction(session.getKeycloakSessionFactory(), new KeycloakSessionTask() { @Override public void run(KeycloakSession lockSession) { DBLockManager dbLockManager = new DBLockManager(lockSession); DBLockProvider dbLock2 = dbLockManager.getDBLock(); dbLock2.waitForLock(); try { updater.update(connection, schema); } finally { dbLock2.releaseLock(); } } }); } }
@Override public void eventReceived(ClusterEvent event) { KeycloakModelUtils.runJobInTransaction(sessionFactory, (KeycloakSession session) -> { InfinispanUserSessionProvider provider = (InfinispanUserSessionProvider) session.getProvider(UserSessionProvider.class, InfinispanUserSessionProviderFactory.PROVIDER_ID); SE sessionEvent = (SE) event; boolean shouldResendEvent = shouldResendEvent(session, sessionEvent); if (log.isDebugEnabled()) { log.debugf("Received user session event '%s'. Should resend event: %b", sessionEvent.toString(), shouldResendEvent); } eventReceived(session, provider, sessionEvent); if (shouldResendEvent) { session.getProvider(ClusterProvider.class).notify(sessionEvent.getEventKey(), event, true, ClusterProvider.DCNotify.ALL_BUT_LOCAL_DC); } }); }
@Override public void eventReceived(ClusterEvent event) { KeycloakModelUtils.runJobInTransaction(sessionFactory, (KeycloakSession session) -> { InfinispanAuthenticationSessionProvider provider = (InfinispanAuthenticationSessionProvider) session.getProvider(AuthenticationSessionProvider.class, InfinispanAuthenticationSessionProviderFactory.PROVIDER_ID); SE sessionEvent = (SE) event; if (!provider.getCache().getStatus().allowInvocations()) { log.debugf("Cache in state '%s' doesn't allow invocations", provider.getCache().getStatus()); return; } log.debugf("Received authentication session event '%s'", sessionEvent.toString()); eventReceived(session, provider, sessionEvent); }); }