@Override public Iterable<DiscoveryNode> discoverNodes() { List<DiscoveryNode> discoveryNodes = new ArrayList<DiscoveryNode>(); try { Iterable<? extends NodeMetadata> nodes = computeServiceBuilder.getFilteredNodes(); for (NodeMetadata metadata : nodes) { if (metadata.getStatus() != NodeMetadata.Status.RUNNING) { continue; } discoveryNodes.add(buildDiscoveredNode(metadata)); } if (discoveryNodes.isEmpty()) { LOGGER.warning("No running nodes discovered in configured cloud provider."); } else { StringBuilder sb = new StringBuilder("Discovered the following nodes with public IPS:\n"); for (DiscoveryNode node : discoveryNodes) { sb.append(" ").append(node.getPublicAddress().toString()).append("\n"); } LOGGER.finest(sb.toString()); } } catch (Exception e) { throw new HazelcastException("Failed to get registered addresses", e); } return discoveryNodes; }
@Test(expected = HazelcastException.class) public void whenInvalidAddress_thenHazelcastException() { HashSet<String> privateAddresses = new HashSet<String>(); // invalid address privateAddresses.add("257.0.0.1"); Set<NodeMetadata> nodes = new HashSet<NodeMetadata>(); nodes.add(new NodeMetadataImpl("", "", "dummyId", null, null, new HashMap<String, String>(), new HashSet<String>(), null, null, null, null, NodeMetadata.Status.RUNNING, "", STARTING_PORT, privateAddresses, privateAddresses, null, "dummyHostName")); ComputeServiceBuilder mockComputeServiceBuilder = mock(ComputeServiceBuilder.class); doReturn(nodes).when(mockComputeServiceBuilder).getFilteredNodes(); JCloudsDiscoveryStrategy jCloudsDiscoveryStrategy = new JCloudsDiscoveryStrategy(mockComputeServiceBuilder); jCloudsDiscoveryStrategy.discoverNodes(); }
public static String urlEncode(String string) { String encoded; try { encoded = URLEncoder.encode(string, "UTF-8").replace("+", "%20"); } catch (UnsupportedEncodingException e) { throw new HazelcastException(e); } return encoded; }
@Override public boolean afterInsert(final Object key, final Object value, final Object version) throws CacheException { try { return cache.insert(key, value, version); } catch (HazelcastException e) { if (log.isFinestEnabled()) { log.finest("Could not insert into Cache[" + hazelcastRegion.getName() + "]: " + e.getMessage()); } return false; } }
/** * {@inheritDoc} * <p/> * Called after <code>com.hazelcast.ReadWriteAccessDelegate.lockItem()</code> */ @Override public boolean afterUpdate(final Object key, final Object value, final Object currentVersion, final Object previousVersion, final SoftLock lock) throws CacheException { try { return cache.update(key, value, currentVersion, lock); } catch (HazelcastException e) { if (log.isFinestEnabled()) { log.finest("Could not update Cache[" + hazelcastRegion.getName() + "]: " + e.getMessage()); } return false; } }
@Override public void remove(final Object key) throws CacheException { try { cache.remove(key); } catch (HazelcastException e) { throw new CacheException("Operation timeout during remove operation from cache!", e); } }
@Override public Object get(final Object key, final long txTimestamp) throws CacheException { try { return cache.get(key, txTimestamp); } catch (HazelcastException e) { if (log.isFinestEnabled()) { log.finest("Could not read from Cache[" + hazelcastRegion.getName() + "]: " + e.getMessage()); } return null; } }
@Override public boolean putFromLoad(final Object key, final Object value, final long txTimestamp, final Object version) throws CacheException { try { return cache.put(key, value, txTimestamp, version); } catch (HazelcastException e) { if (log.isFinestEnabled()) { log.finest("Could not put into Cache[" + hazelcastRegion.getName() + "]: " + e.getMessage()); } return false; } }
/** * {@inheritDoc} * <p> * Called after <code>com.hazelcast.ReadWriteAccessDelegate.lockItem()</code> * </p> */ @Override public boolean afterUpdate(final Object key, final Object value, final Object currentVersion, final Object previousVersion, final SoftLock lock) throws CacheException { try { return cache.update(key, value, currentVersion, lock); } catch (HazelcastException e) { if (log.isFinestEnabled()) { log.finest("Could not update Cache[" + hazelcastRegion.getName() + "]: " + e.getMessage()); } return false; } }
protected boolean put(final Object key, final Object value, final Object currentVersion) { try { return cache.put(key, value, currentVersion); } catch (HazelcastException e) { LOG.log(Level.FINEST, "Could not put into Cache[" + hazelcastRegion.getName() + "]: " + e.getMessage()); return false; } }
protected boolean update(final Object key, final Object value, final Object currentVersion, final Object previousVersion, final SoftLock lock) { try { return cache.update(key, value, currentVersion, previousVersion, lock); } catch (HazelcastException e) { LOG.log(Level.FINEST, "Could not update Cache[" + hazelcastRegion.getName() + "]: " + e.getMessage()); return false; } }
public Object get(final Object key, final long txTimestamp) throws CacheException { try { return cache.get(key); } catch (HazelcastException e) { LOG.log(Level.FINEST, "Could not read from Cache[" + hazelcastRegion.getName() + "]: " + e.getMessage()); return null; } }
public void remove(final Object key) throws CacheException { try { cache.remove(key); } catch (HazelcastException e) { throw new CacheException("Operation timeout during remove operation from cache!", e); } }
public static void throwUncheckedException(Throwable t) { if (t instanceof Error) { if (t instanceof OutOfMemoryError) { OutOfMemoryErrorDispatcher.onOutOfMemory((OutOfMemoryError) t); } throw (Error) t; } else if (t instanceof RuntimeException) { throw (RuntimeException) t; } else { throw new HazelcastException(t); } }
@Test public void testAfterInsert() { when(cache.insert(any(), any(), any())).thenThrow(new HazelcastException("expected exception")); assertFalse(delegate.afterInsert(null, null, null)); }
@Test public void testAfterUpdate() { when(cache.update(any(), any(), any(), any(SoftLock.class))).thenThrow(new HazelcastException("expected exception")); assertFalse(delegate.afterUpdate(null, null, null, null, null)); }