Full-featured IMDG
JCache Provider
Apache 2 License
Small JAR with Minimal Dependencies
Embedded or Client Server
API with Multiple Language Clients
package member;
import com.hazelcast.core.Hazelcast;
import com.hazelcast.core.HazelcastInstance;
import com.hazelcast.core.IAtomicLong;
import com.hazelcast.core.IFunction;
public class AtomicLongSample {
public static class MultiplyByTwo implements IFunction<Long, Long> {
@Override
public Long apply(Long input) {
return input * 2;
}
}
public static void main(String[] args) {
// Start the Embedded Hazelcast Cluster Member.
HazelcastInstance hz = Hazelcast.newHazelcastInstance();
// Get an Atomic Counter, we'll call it "counter"
IAtomicLong counter = hz.getAtomicLong("counter");
// Add and Get the "counter"
counter.addAndGet(3);
// value is now 3
// Multiply the "counter" by passing it an IFunction
counter.alter(new MultiplyByTwo());
//value is now 6
// Display the "counter" value
System.out.println("counter: " + counter.get());
// Shutdown this Hazelcast Cluster Member
hz.shutdown();
}
}
package member;
import com.hazelcast.config.Config;
import com.hazelcast.config.SerializerConfig;
import com.hazelcast.core.Hazelcast;
import com.hazelcast.core.HazelcastInstance;
import com.hazelcast.nio.ObjectDataInput;
import com.hazelcast.nio.ObjectDataOutput;
import com.hazelcast.nio.serialization.StreamSerializer;
import java.io.IOException;
public class CustomSerializerSample {
static class CustomSerializable {
public String value;
CustomSerializable(String value) {
this.value = value;
}
}
static class CustomSerializer implements StreamSerializer<CustomSerializable> {
@Override
public int getTypeId() {
return 10;
}
@Override
public void destroy() {
}
@Override
public void write(ObjectDataOutput out, CustomSerializable object) throws IOException {
byte[] bytes = object.value.getBytes("utf8");
out.writeInt(bytes.length);
out.write(bytes);
}
@Override
public CustomSerializable read(ObjectDataInput in) throws IOException {
int len = in.readInt();
byte[] bytes = new byte[len];
in.readFully(bytes);
return new CustomSerializable(new String(bytes, "utf8"));
}
}
public static void main(String[] args) {
Config config = new Config();
config.getSerializationConfig().addSerializerConfig(new SerializerConfig()
.setImplementation(new CustomSerializer())
.setTypeClass(CustomSerializable.class));
// Start the Embedded Hazelcast Cluster Member.
HazelcastInstance hz = Hazelcast.newHazelcastInstance(config);
//CustomSerializer will serialize/deserialize CustomSerializable objects
hz.shutdown();
}
}
package member;
import com.hazelcast.core.Hazelcast;
import com.hazelcast.core.HazelcastInstance;
import com.hazelcast.core.IMap;
import com.hazelcast.map.AbstractEntryProcessor;
import java.util.Map;
public class EntryProcessorSample {
public static class IncEntryProcessor extends AbstractEntryProcessor<String, Integer> {
@Override
public Object process(Map.Entry<String, Integer> entry) {
// Get the value passed
int oldValue = entry.getValue();
// Update the value
int newValue = oldValue + 1;
// Update the value back to the entry stored in the Hazelcast Member this EntryProcessor is running on.
entry.setValue(newValue);
// No need to return anything back to the caller
return null;
}
}
public static void main(String[] args) {
// Start the Embedded Hazelcast Cluster Member. No Config object is passed so using defaults.
HazelcastInstance hz = Hazelcast.newHazelcastInstance();
// Get the Distributed Map from Cluster.
IMap<String, Integer> map = hz.getMap("my-distributed-map");
// Put the integer value of 0 into the Distributed Map
map.put("key", 0);
// Run the IncEntryProcessor class on the Hazelcast Cluster Member holding the key called "key"
map.executeOnKey("key", new IncEntryProcessor());
// Show that the IncEntryProcessor updated the value.
System.out.println("new value:" + map.get("key"));
// Shutdown the Hazelcast Cluster Member
hz.shutdown();
}
}
package member;
import com.hazelcast.core.Hazelcast;
import com.hazelcast.core.HazelcastInstance;
import com.hazelcast.core.IExecutorService;
import com.hazelcast.core.Member;
import java.io.Serializable;
public class ExecutorServiceSample {
public static void main(String[] args) {
// Start the Embedded Hazelcast Cluster Member.
HazelcastInstance hz1 = Hazelcast.newHazelcastInstance();
// Start a Second Embedded Hazelcast Cluster Member
HazelcastInstance hz2 = Hazelcast.newHazelcastInstance();
// Get the Distributed Executor Service
IExecutorService ex = hz1.getExecutorService("my-distributed-executor");
// Submit the MessagePrinter Runnable to a random Hazelcast Cluster Member
ex.submit(new MessagePrinter("message to any node"));
// Get the first Hazelcast Cluster Member
Member firstMember = hz1.getCluster().getMembers().iterator().next();
// Submit the MessagePrinter Runnable to the first Hazelcast Cluster Member
ex.executeOnMember(new MessagePrinter("message to very first member of the cluster"), firstMember);
// Submit the MessagePrinter Runnable to all Hazelcast Cluster Members
ex.executeOnAllMembers(new MessagePrinter("message to all members in the cluster"));
// Submit the MessagePrinter Runnable to the Hazelcast Cluster Member owning the key called "key"
ex.executeOnKeyOwner(new MessagePrinter("message to the member that owns the following key"), "key");
// Shutdown this Hazelcast Cluster Member
hz1.shutdown();
// Shutdown this Hazelcast Cluster Member
hz2.shutdown();
}
static class MessagePrinter implements Runnable, Serializable {
final String message;
MessagePrinter(String message) {
this.message = message;
}
@Override
public void run() {
System.out.println(message);
}
}
}
package member;
import com.hazelcast.config.Config;
import com.hazelcast.config.GlobalSerializerConfig;
import com.hazelcast.core.Hazelcast;
import com.hazelcast.core.HazelcastInstance;
import com.hazelcast.nio.ObjectDataInput;
import com.hazelcast.nio.ObjectDataOutput;
import com.hazelcast.nio.serialization.StreamSerializer;
import java.io.IOException;
public class GlobalSerializerSample {
static class GlobalSerializer implements StreamSerializer<Object> {
@Override
public int getTypeId() {
return 20;
}
@Override
public void destroy() {
}
@Override
public void write(ObjectDataOutput out, Object object) throws IOException {
// out.write(MyFavoriteSerializer.serialize(object))
}
@Override
public Object read(ObjectDataInput in) throws IOException {
// return MyFavoriteSerializer.deserialize(in);
return null;
}
}
public static void main(String[] args) {
Config config = new Config();
config.getSerializationConfig().setGlobalSerializerConfig(
new GlobalSerializerConfig().setImplementation(new GlobalSerializer())
);
// Start the Embedded Hazelcast Cluster Member.
HazelcastInstance hz = Hazelcast.newHazelcastInstance(config);
//GlobalSerializer will serialize/deserialize all non-builtin types
hz.shutdown();
}
}
package member;
import com.hazelcast.config.Config;
import com.hazelcast.core.Hazelcast;
import com.hazelcast.core.HazelcastInstance;
import com.hazelcast.nio.ObjectDataInput;
import com.hazelcast.nio.ObjectDataOutput;
import com.hazelcast.nio.serialization.DataSerializableFactory;
import com.hazelcast.nio.serialization.IdentifiedDataSerializable;
import java.io.IOException;
public class IdentifiedDataSerializableSample {
public static class Employee implements IdentifiedDataSerializable {
private static final int CLASS_ID = 100;
public int id;
public String name;
@Override
public void readData(ObjectDataInput in) throws IOException {
id = in.readInt();
name = in.readUTF();
}
@Override
public void writeData(ObjectDataOutput out) throws IOException {
out.writeInt(id);
out.writeUTF(name);
}
@Override
public int getFactoryId() {
return SampleDataSerializableFactory.FACTORY_ID;
}
@Override
public int getId() {
return CLASS_ID;
}
}
public static class SampleDataSerializableFactory implements DataSerializableFactory {
public static final int FACTORY_ID = 1000;
@Override
public IdentifiedDataSerializable create(int typeId) {
if (typeId == 100) {
return new Employee();
}
return null;
}
}
public static void main(String[] args) {
Config config = new Config();
config.getSerializationConfig()
.addDataSerializableFactory(SampleDataSerializableFactory.FACTORY_ID,
new SampleDataSerializableFactory());
// Start the Embedded Hazelcast Cluster Member.
HazelcastInstance hz = Hazelcast.newHazelcastInstance(config);
//Employee can be used here
hz.shutdown();
}
}
package member;
import javax.cache.Cache;
import javax.cache.CacheManager;
import javax.cache.Caching;
import javax.cache.configuration.MutableConfiguration;
import javax.cache.expiry.AccessedExpiryPolicy;
import javax.cache.expiry.Duration;
import javax.cache.expiry.ExpiryPolicy;
import com.hazelcast.cache.ICache;
public class JCacheSample {
public static void main(String[] args) {
// Run as a Hazelcast Member
System.setProperty("hazelcast.jcache.provider.type", "server");
// Create the JCache CacheManager
CacheManager manager = Caching.getCachingProvider().getCacheManager();
MutableConfiguration<String, String> configuration = new MutableConfiguration<String, String>();
// Expire entries after 1 minute
configuration.setExpiryPolicyFactory(AccessedExpiryPolicy.factoryOf(Duration.ONE_MINUTE));
// Get a Cache called "myCache" and configure with 1 minute expiry
Cache<String, String> myCache = manager.createCache("myCache", configuration);
// Put and Get a value from "myCache"
myCache.put("key", "value");
String value = myCache.get("key");
System.out.println(value);
//ICache is a Hazelcast interface that extends JCache, provides more functionality
ICache<String, String> icache = myCache.unwrap(ICache.class);
//Async Get and Put using ICache interface
icache.getAsync("key");
icache.putAsync("key", "value");
//ICache allows custom expiry per cache entry
final ExpiryPolicy customExpiryPolicy = AccessedExpiryPolicy.factoryOf(Duration.TEN_MINUTES).create();
icache.put("key", "newValue", customExpiryPolicy);
//Size of the Cache should reflect the ICache and JCache operations
icache.size();
//Shutdown the underlying Hazelcast Cluster Member
manager.getCachingProvider().close();
}
}
package member;
import com.hazelcast.core.Hazelcast;
import com.hazelcast.core.HazelcastInstance;
import java.util.List;
public class ListSample {
public static void main(String[] args) {
// Start the Embedded Hazelcast Cluster Member.
HazelcastInstance hz = Hazelcast.newHazelcastInstance();
// Get the Distributed List from Cluster.
List<String> list = hz.getList("my-distributed-list");
// Add elements to the list
list.add("item1");
list.add("item2");
// Remove the first element
System.out.println("Removed: " + list.remove(0));
// There is only one element left
System.out.println("Current size is " + list.size());
// Clear the list
list.clear();
// Shutdown this Hazelcast Cluster Member
hz.shutdown();
}
}
package member;
import com.hazelcast.core.Hazelcast;
import com.hazelcast.core.HazelcastInstance;
import java.util.concurrent.locks.Lock;
public class LockSample {
public static void main(String[] args) {
// Start the Embedded Hazelcast Cluster Member.
HazelcastInstance hz = Hazelcast.newHazelcastInstance();
// Get a distributed lock called "my-distributed-lock"
Lock lock = hz.getLock("my-distributed-lock");
// Now create a lock and execute some guarded code.
lock.lock();
try {
//do something here
} finally {
lock.unlock();
}
// Shutdown this Hazelcast Cluster Member
hz.shutdown();
}
}
package member;
import com.hazelcast.core.Hazelcast;
import com.hazelcast.core.HazelcastInstance;
import com.hazelcast.core.IMap;
public class MapSample {
public static void main(String[] args) {
// Start the Embedded Hazelcast Cluster Member.
HazelcastInstance hz = Hazelcast.newHazelcastInstance();
// Get the Distributed Map from Cluster.
IMap<String, String> map = hz.getMap("my-distributed-map");
//Standard Put and Get.
map.put("key", "value");
map.get("key");
//Concurrent Map methods, optimistic updating
map.putIfAbsent("somekey", "somevalue");
map.replace("key", "value", "newvalue");
// Shutdown the Hazelcast Cluster Member
hz.shutdown();
}
}
package member;
import com.hazelcast.core.Hazelcast;
import com.hazelcast.core.HazelcastInstance;
import com.hazelcast.core.MultiMap;
import java.util.Collection;
public class MultiMapSample {
public static void main(String[] args) {
// Start the Embedded Hazelcast Cluster Member.
HazelcastInstance hz = Hazelcast.newHazelcastInstance();
// Get the Distributed MultiMap from Cluster.
MultiMap<String, String> multiMap = hz.getMultiMap("my-distributed-multimap");
// Put values in the map against the same key
multiMap.put("my-key", "value1");
multiMap.put("my-key", "value2");
multiMap.put("my-key", "value3");
// Print out all the values for associated with key called "my-key"
Collection<String> values = multiMap.get("my-key");
System.out.println(values);
// remove specific key/value pair
multiMap.remove("my-key", "value2");
// Shutdown the Hazelcast Cluster Member
hz.shutdown();
}
}
package member;
import com.hazelcast.config.Config;
import com.hazelcast.core.Hazelcast;
import com.hazelcast.core.HazelcastInstance;
import com.hazelcast.nio.serialization.Portable;
import com.hazelcast.nio.serialization.PortableFactory;
import com.hazelcast.nio.serialization.PortableReader;
import com.hazelcast.nio.serialization.PortableWriter;
import java.io.IOException;
import java.util.Date;
public class PortableSerializableSample {
public static class Customer implements Portable {
public static final int CLASS_ID = 1;
public String name;
public int id;
public Date lastOrder;
@Override
public int getFactoryId() {
return SamplePortableFactory.FACTORY_ID;
}
@Override
public int getClassId() {
return CLASS_ID;
}
@Override
public void writePortable(PortableWriter writer) throws IOException {
writer.writeInt("id", id);
writer.writeUTF("name", name);
writer.writeLong("lastOrder", lastOrder.getTime());
}
@Override
public void readPortable(PortableReader reader) throws IOException {
id = reader.readInt("id");
name = reader.readUTF("name");
lastOrder = new Date(reader.readLong("lastOrder"));
}
}
public static class SamplePortableFactory implements PortableFactory {
public static final int FACTORY_ID = 1;
@Override
public Portable create(int classId) {
if (classId == Customer.CLASS_ID) {
return new Customer();
}
return null;
}
}
public static void main(String[] args) {
Config config = new Config();
config.getSerializationConfig()
.addPortableFactory(SamplePortableFactory.FACTORY_ID, new SamplePortableFactory());
// Start the Embedded Hazelcast Cluster Member.
HazelcastInstance hz = Hazelcast.newHazelcastInstance(config);
//Customer can be used here
hz.shutdown();
}
}
package member;
import com.hazelcast.core.Hazelcast;
import com.hazelcast.core.HazelcastInstance;
import com.hazelcast.core.IMap;
import com.hazelcast.query.Predicate;
import com.hazelcast.query.Predicates;
import com.hazelcast.query.SqlPredicate;
import java.io.Serializable;
import java.util.Collection;
public class QuerySample {
public static class User implements Serializable {
String username;
int age;
boolean active;
public User(String username, int age, boolean active) {
this.username = username;
this.age = age;
this.active = active;
}
@Override
public String toString() {
return "User{"
+ "username='" + username + '\''
+ ", age=" + age
+ ", active=" + active
+ '}';
}
}
private static void generateUsers(IMap<String, User> users) {
users.put("Rod", new User("Rod", 19, true));
users.put("Jane", new User("Jane", 20, true));
users.put("Freddy", new User("Freddy", 23, true));
}
public static void main(String[] args) {
// Start the Embedded Hazelcast Cluster Member.
HazelcastInstance hz = Hazelcast.newHazelcastInstance();
// Get a Distributed Map called "users"
IMap<String, User> users = hz.getMap("users");
// Add some users to the Distributed Map
generateUsers(users);
// Create a Predicate from a String (a SQL like Where clause)
Predicate sqlQuery = new SqlPredicate("active AND age BETWEEN 18 AND 21)");
// Creating the same Predicate as above but with a builder
Predicate criteriaQuery = Predicates.and(
Predicates.equal("active", true),
Predicates.between("age", 18, 21)
);
// Get result collections using the two different Predicates
Collection<User> result1 = users.values(sqlQuery);
Collection<User> result2 = users.values(criteriaQuery);
// Print out the results
System.out.println(result1);
System.out.println(result2);
// Shutdown the Hazelcast Cluster Member
hz.shutdown();
}
}
package member;
import com.hazelcast.core.Hazelcast;
import com.hazelcast.core.HazelcastInstance;
import java.util.concurrent.BlockingQueue;
import java.util.concurrent.TimeUnit;
public class QueueSample {
public static void main(String[] args) throws InterruptedException {
// Start the Embedded Hazelcast Cluster Member.
HazelcastInstance hz = Hazelcast.newHazelcastInstance();
// Get a Blocking Queue called "my-distributed-queue"
BlockingQueue<String> queue = hz.getQueue("my-distributed-queue");
// Offer a String into the Distributed Queue
queue.offer("item");
// Poll the Distributed Queue and return the String
queue.poll();
//Timed blocking Operations
queue.offer("anotheritem", 500, TimeUnit.MILLISECONDS);
queue.poll(5, TimeUnit.SECONDS);
//Indefinitely blocking Operations
queue.put("yetanotheritem");
System.out.println(queue.take());
// Shutdown the Hazelcast Cluster Member
hz.shutdown();
}
}
package member;
import com.hazelcast.core.Hazelcast;
import com.hazelcast.core.HazelcastInstance;
import com.hazelcast.core.ReplicatedMap;
public class ReplicatedMapSample {
public static void main(String[] args) {
// Start the Embedded Hazelcast Cluster Member.
HazelcastInstance hz = Hazelcast.newHazelcastInstance();
// Get a Replicated Map called "my-replicated-map"
ReplicatedMap<String, String> map = hz.getReplicatedMap("my-replicated-map");
// Put and Get a value from the Replicated Map
// key/value replicated to all members
map.put("key", "value");
// the value retrieved from local member
map.get("key");
// Shutdown the Hazelcast Cluster Member
hz.shutdown();
}
}
package member;
import com.hazelcast.core.Hazelcast;
import com.hazelcast.core.HazelcastInstance;
import com.hazelcast.ringbuffer.Ringbuffer;
public class RingbufferSample {
public static void main(String[] args) throws InterruptedException {
// Start the Embedded Hazelcast Cluster Member.
HazelcastInstance hz = Hazelcast.newHazelcastInstance();
Ringbuffer<Long> rb = hz.getRingbuffer("rb");
// add two items into ring buffer
rb.add(100L);
rb.add(200L);
// we start from the oldest item.
// if you want to start from the next item, call rb.tailSequence()+1
long sequence = rb.headSequence();
System.out.println(rb.readOne(sequence));
sequence++;
System.out.println(rb.readOne(sequence));
// Shutdown the Hazelcast Cluster Member
hz.shutdown();
}
}
package member;
import com.hazelcast.core.Hazelcast;
import com.hazelcast.core.HazelcastInstance;
import java.util.Set;
public class SetSample {
public static void main(String[] args) {
// Start the Embedded Hazelcast Cluster Member.
HazelcastInstance hz = Hazelcast.newHazelcastInstance();
// Get the Distributed Set from Cluster.
Set<String> set = hz.getSet("my-distributed-set");
// Add items to the set with duplicates
set.add("item1");
set.add("item1");
set.add("item2");
set.add("item2");
set.add("item2");
set.add("item3");
// Get the items. Note that there are no duplicates.
for (String item: set) {
System.out.println(item);
}
// Shutdown the Hazelcast Cluster Member
hz.shutdown();
}
}
package member;
import com.hazelcast.core.Hazelcast;
import com.hazelcast.core.HazelcastInstance;
import com.hazelcast.core.ITopic;
import com.hazelcast.core.Message;
import com.hazelcast.core.MessageListener;
public class TopicSample implements MessageListener<String> {
@Override
public void onMessage(Message<String> message) {
System.out.println("Got message " + message.getMessageObject());
}
public static void main(String[] args) {
// Start the Embedded Hazelcast Cluster Member.
HazelcastInstance hz = Hazelcast.newHazelcastInstance();
// Get a Topic called "my-distributed-topic"
ITopic<String> topic = hz.getTopic("my-distributed-topic");
// Add a Listener to the Topic
topic.addMessageListener(new TopicSample());
// Publish a message to the Topic
topic.publish("Hello to distributed world");
// Shutdown the Hazelcast Cluster Member
hz.shutdown();
}
}
package client;
import com.hazelcast.client.HazelcastClient;
import com.hazelcast.core.HazelcastInstance;
import com.hazelcast.core.IAtomicLong;
public class AtomicLongSample {
public static void main(String[] args) {
// Start the Hazelcast Client and connect to an already running Hazelcast Cluster on 127.0.0.1
HazelcastInstance hz = HazelcastClient.newHazelcastClient();
// Get an Atomic Counter, we'll call it "counter"
IAtomicLong counter = hz.getAtomicLong("counter");
// Add and Get the "counter"
counter.addAndGet(3);
// value is now 3
// Display the "counter" value
System.out.println("counter: " + counter.get());
// Shutdown this Hazelcast Client
hz.shutdown();
}
}
package client;
import com.hazelcast.client.HazelcastClient;
import com.hazelcast.client.config.ClientConfig;
import com.hazelcast.config.SerializerConfig;
import com.hazelcast.core.HazelcastInstance;
import com.hazelcast.nio.ObjectDataInput;
import com.hazelcast.nio.ObjectDataOutput;
import com.hazelcast.nio.serialization.StreamSerializer;
import java.io.IOException;
public class CustomSerializerSample {
static class CustomSerializable {
public String value;
CustomSerializable(String value) {
this.value = value;
}
}
static class CustomSerializer implements StreamSerializer<CustomSerializable> {
@Override
public int getTypeId() {
return 10;
}
@Override
public void destroy() {
}
@Override
public void write(ObjectDataOutput out, CustomSerializable object) throws IOException {
byte[] bytes = object.value.getBytes("utf8");
out.writeInt(bytes.length);
out.write(bytes);
}
@Override
public CustomSerializable read(ObjectDataInput in) throws IOException {
int len = in.readInt();
byte[] bytes = new byte[len];
in.readFully(bytes);
return new CustomSerializable(new String(bytes, "utf8"));
}
}
public static void main(String[] args) {
ClientConfig clientConfig = new ClientConfig();
clientConfig.getSerializationConfig().addSerializerConfig(new SerializerConfig()
.setImplementation(new CustomSerializer())
.setTypeClass(CustomSerializable.class));
// Start the Hazelcast Client and connect to an already running Hazelcast Cluster on 127.0.0.1
HazelcastInstance hz = HazelcastClient.newHazelcastClient(clientConfig);
//CustomSerializer will serialize/deserialize CustomSerializable objects
hz.shutdown();
}
}
package client;
import com.hazelcast.client.HazelcastClient;
import com.hazelcast.client.config.ClientConfig;
import com.hazelcast.client.config.ClientUserCodeDeploymentConfig;
import com.hazelcast.core.HazelcastInstance;
import com.hazelcast.core.IMap;
import com.hazelcast.map.AbstractEntryProcessor;
import java.io.Serializable;
import java.util.Map;
public class EntryProcessorSample {
public static class IncEntryProcessor extends AbstractEntryProcessor<String, Integer> implements Serializable {
@Override
public Object process(Map.Entry<String, Integer> entry) {
// Get the value passed
int oldValue = entry.getValue();
// Update the value
int newValue = oldValue + 1;
// Update the value back to the entry stored in the Hazelcast Member this EntryProcessor is running on.
entry.setValue(newValue);
// No need to return anything back to the caller, we can return whatever we like here.
return null;
}
}
public static void main(String[] args) {
// Enable Code Deployment from this Client classpath to the Cluster Members classpath
// User Code Deployment needs to be enabled on the Cluster Members as well.
ClientConfig config = new ClientConfig();
ClientUserCodeDeploymentConfig userCodeDeploymentConfig = config.getUserCodeDeploymentConfig();
userCodeDeploymentConfig.setEnabled(true);
userCodeDeploymentConfig.addClass(EntryProcessorSample.IncEntryProcessor.class);
// Start the Hazelcast Client and connect to an already running Hazelcast Cluster on 127.0.0.1
HazelcastInstance hz = HazelcastClient.newHazelcastClient(config);
// Get the Distributed Map from Cluster.
IMap<String, Integer> map = hz.getMap("my-distributed-map");
// Put the integer value of 0 into the Distributed Map
map.put("key", 0);
// Run the IncEntryProcessor class on the Hazelcast Cluster Member holding the key called "key"
map.executeOnKey("key", new IncEntryProcessor());
// Show that the IncEntryProcessor updated the value.
System.out.println("new value:" + map.get("key"));
// Shutdown this Hazelcast Client
hz.shutdown();
}
}
package client;
import com.hazelcast.client.HazelcastClient;
import com.hazelcast.client.config.ClientConfig;
import com.hazelcast.client.config.ClientUserCodeDeploymentConfig;
import com.hazelcast.core.HazelcastInstance;
import com.hazelcast.core.IExecutorService;
import com.hazelcast.core.Member;
import java.io.Serializable;
public class ExecutorServiceSample {
static class MessagePrinter implements Serializable, Runnable {
public String message;
MessagePrinter(String message) {
this.message = message;
}
@Override
public void run() {
System.out.println(message);
}
}
public static void main(String[] args) {
// Enable Code Deployment from this Client classpath to the Cluster Members classpath
// User Code Deployment needs to be enabled on the Cluster Members as well.
ClientConfig config = new ClientConfig();
ClientUserCodeDeploymentConfig userCodeDeploymentConfig = config.getUserCodeDeploymentConfig();
userCodeDeploymentConfig.setEnabled(true);
userCodeDeploymentConfig.addClass(ExecutorServiceSample.MessagePrinter.class);
// Start the Hazelcast Client and connect to an already running Hazelcast Cluster on 127.0.0.1
HazelcastInstance hz = HazelcastClient.newHazelcastClient(config);
// Get the Distributed Executor Service
IExecutorService ex = hz.getExecutorService("my-distributed-executor");
// Submit the MessagePrinter Runnable to a random Hazelcast Cluster Member
ex.submit(new MessagePrinter("message to any node"));
// Get the first Hazelcast Cluster Member
Member firstMember = hz.getCluster().getMembers().iterator().next();
// Submit the MessagePrinter Runnable to the first Hazelcast Cluster Member
ex.executeOnMember(new MessagePrinter("message to very first member of the cluster"), firstMember);
// Submit the MessagePrinter Runnable to all Hazelcast Cluster Members
ex.executeOnAllMembers(new MessagePrinter("message to all members in the cluster"));
// Submit the MessagePrinter Runnable to the Hazelcast Cluster Member owning the key called "key"
ex.executeOnKeyOwner(new MessagePrinter("message to the member that owns the following key"), "key");
// Shutdown this Hazelcast Client
hz.shutdown();
}
}
package client;
import com.hazelcast.client.HazelcastClient;
import com.hazelcast.client.config.ClientConfig;
import com.hazelcast.config.GlobalSerializerConfig;
import com.hazelcast.core.HazelcastInstance;
import com.hazelcast.nio.ObjectDataInput;
import com.hazelcast.nio.ObjectDataOutput;
import com.hazelcast.nio.serialization.StreamSerializer;
import java.io.IOException;
public class GlobalSerializerSample {
static class GlobalSerializer implements StreamSerializer<Object> {
@Override
public int getTypeId() {
return 20;
}
@Override
public void destroy() {
}
@Override
public void write(ObjectDataOutput out, Object object) throws IOException {
// out.write(MyFavoriteSerializer.serialize(object))
}
@Override
public Object read(ObjectDataInput in) throws IOException {
// return MyFavoriteSerializer.deserialize(in);
return null;
}
}
public static void main(String[] args) {
ClientConfig clientConfig = new ClientConfig();
clientConfig.getSerializationConfig().setGlobalSerializerConfig(
new GlobalSerializerConfig().setImplementation(new GlobalSerializer())
);
// Start the Hazelcast Client and connect to an already running Hazelcast Cluster on 127.0.0.1
HazelcastInstance hz = HazelcastClient.newHazelcastClient(clientConfig);
//GlobalSerializer will serialize/deserialize all non-builtin types
hz.shutdown();
}
}
package client;
import com.hazelcast.client.HazelcastClient;
import com.hazelcast.client.config.ClientConfig;
import com.hazelcast.core.HazelcastInstance;
import com.hazelcast.nio.ObjectDataInput;
import com.hazelcast.nio.ObjectDataOutput;
import com.hazelcast.nio.serialization.DataSerializableFactory;
import com.hazelcast.nio.serialization.IdentifiedDataSerializable;
import java.io.IOException;
public class IdentifiedDataSerializableSample {
public static class Employee implements IdentifiedDataSerializable {
private static final int CLASS_ID = 100;
public int id;
public String name;
@Override
public void readData(ObjectDataInput in) throws IOException {
id = in.readInt();
name = in.readUTF();
}
@Override
public void writeData(ObjectDataOutput out) throws IOException {
out.writeInt(id);
out.writeUTF(name);
}
@Override
public int getFactoryId() {
return SampleDataSerializableFactory.FACTORY_ID;
}
@Override
public int getId() {
return CLASS_ID;
}
}
public static class SampleDataSerializableFactory implements DataSerializableFactory {
public static final int FACTORY_ID = 1000;
@Override
public IdentifiedDataSerializable create(int typeId) {
if (typeId == 100) {
return new Employee();
}
return null;
}
}
public static void main(String[] args) {
ClientConfig clientConfig = new ClientConfig();
clientConfig.getSerializationConfig()
.addDataSerializableFactory(SampleDataSerializableFactory.FACTORY_ID,
new SampleDataSerializableFactory());
// Start the Hazelcast Client and connect to an already running Hazelcast Cluster on 127.0.0.1
HazelcastInstance hz = HazelcastClient.newHazelcastClient(clientConfig);
//Employee can be used here
hz.shutdown();
}
}
package client;
import com.hazelcast.cache.ICache;
import javax.cache.Cache;
import javax.cache.CacheManager;
import javax.cache.Caching;
import javax.cache.configuration.MutableConfiguration;
import javax.cache.expiry.AccessedExpiryPolicy;
import javax.cache.expiry.Duration;
import javax.cache.expiry.ExpiryPolicy;
public class JCacheSample {
public static void main(String[] args) {
// Run as a Hazelcast Client
System.setProperty("hazelcast.jcache.provider.type", "client");
// Create the JCache CacheManager
CacheManager manager = Caching.getCachingProvider().getCacheManager();
MutableConfiguration<String, String> configuration = new MutableConfiguration<String, String>();
// Expire entries after 1 minute
configuration.setExpiryPolicyFactory(AccessedExpiryPolicy.factoryOf(Duration.ONE_MINUTE));
// Get a Cache called "myCache" and configure with 1 minute expiry
Cache<String, String> myCache = manager.createCache("myCache", configuration);
// Put and Get a value from "myCache"
myCache.put("key", "value");
String value = myCache.get("key");
System.out.println(value);
//ICache is a Hazelcast interface that extends JCache, provides more functionality
ICache<String, String> icache = myCache.unwrap(ICache.class);
//Async Get and Put using ICache interface
icache.getAsync("key");
icache.putAsync("key", "value");
//ICache allows custom expiry per cache entry
final ExpiryPolicy customExpiryPolicy = AccessedExpiryPolicy.factoryOf(Duration.TEN_MINUTES).create();
icache.put("key", "newValue", customExpiryPolicy);
//Size of the Cache should reflect the ICache and JCache operations
icache.size();
//Shutdown this Hazelcast Client
manager.getCachingProvider().close();
}
}
package client;
import com.hazelcast.client.HazelcastClient;
import com.hazelcast.core.HazelcastInstance;
import java.util.List;
public class ListSample {
public static void main(String[] args) {
// Start the Hazelcast Client and connect to an already running Hazelcast Cluster on 127.0.0.1
HazelcastInstance hz = HazelcastClient.newHazelcastClient();
// Get the Distributed List from Cluster.
List<String> list = hz.getList("my-distributed-list");
// Add elements to the list
list.add("item1");
list.add("item2");
// Remove the first element
System.out.println("Removed: " + list.remove(0));
// There is only one element left
System.out.println("Current size is " + list.size());
// Clear the list
list.clear();
// Shutdown this Hazelcast client
hz.shutdown();
}
}
package client;
import com.hazelcast.client.HazelcastClient;
import com.hazelcast.core.HazelcastInstance;
import java.util.concurrent.locks.Lock;
public class LockSample {
public static void main(String[] args) {
// Start the Hazelcast Client and connect to an already running Hazelcast Cluster on 127.0.0.1
HazelcastInstance hz = HazelcastClient.newHazelcastClient();
// Get a distributed lock called "my-distributed-lock"
Lock lock = hz.getLock("my-distributed-lock");
// Now create a lock and execute some guarded code.
lock.lock();
try {
//do something here
} finally {
lock.unlock();
}
// Shutdown this Hazelcast Client
hz.shutdown();
}
}
package client;
import com.hazelcast.client.HazelcastClient;
import com.hazelcast.core.HazelcastInstance;
import com.hazelcast.core.IMap;
public class MapSample {
public static void main(String[] args) {
// Start the Hazelcast Client and connect to an already running Hazelcast Cluster on 127.0.0.1
HazelcastInstance hz = HazelcastClient.newHazelcastClient();
// Get the Distributed Map from Cluster.
IMap<String, String> map = hz.getMap("my-distributed-map");
//Standard Put and Get.
map.put("key", "value");
map.get("key");
//Concurrent Map methods, optimistic updating
map.putIfAbsent("somekey", "somevalue");
map.replace("key", "value", "newvalue");
// Shutdown this Hazelcast client
hz.shutdown();
}
}
package client;
import com.hazelcast.client.HazelcastClient;
import com.hazelcast.core.HazelcastInstance;
import com.hazelcast.core.MultiMap;
import java.util.Collection;
public class MultiMapSample {
public static void main(String[] args) {
// Start the Hazelcast Client and connect to an already running Hazelcast Cluster on 127.0.0.1
HazelcastInstance hz = HazelcastClient.newHazelcastClient();
// Get the Distributed MultiMap from Cluster.
MultiMap<String, String> multiMap = hz.getMultiMap("my-distributed-multimap");
// Put values in the map against the same key
multiMap.put("my-key", "value1");
multiMap.put("my-key", "value2");
multiMap.put("my-key", "value3");
// Print out all the values for associated with key called "my-key"
Collection<String> values = multiMap.get("my-key");
System.out.println(values);
// remove specific key/value pair
multiMap.remove("my-key", "value2");
// Shutdown this Hazelcast Client
hz.shutdown();
}
}
package client;
import com.hazelcast.client.HazelcastClient;
import com.hazelcast.client.config.ClientConfig;
import com.hazelcast.core.HazelcastInstance;
import com.hazelcast.nio.serialization.Portable;
import com.hazelcast.nio.serialization.PortableFactory;
import com.hazelcast.nio.serialization.PortableReader;
import com.hazelcast.nio.serialization.PortableWriter;
import java.io.IOException;
import java.util.Date;
public class PortableSerializableSample {
public static class Customer implements Portable {
public static final int CLASS_ID = 1;
public String name;
public int id;
public Date lastOrder;
@Override
public int getFactoryId() {
return SamplePortableFactory.FACTORY_ID;
}
@Override
public int getClassId() {
return CLASS_ID;
}
@Override
public void writePortable(PortableWriter writer) throws IOException {
writer.writeInt("id", id);
writer.writeUTF("name", name);
writer.writeLong("lastOrder", lastOrder.getTime());
}
@Override
public void readPortable(PortableReader reader) throws IOException {
id = reader.readInt("id");
name = reader.readUTF("name");
lastOrder = new Date(reader.readLong("lastOrder"));
}
}
public static class SamplePortableFactory implements PortableFactory {
public static final int FACTORY_ID = 1;
@Override
public Portable create(int classId) {
if (classId == Customer.CLASS_ID) {
return new Customer();
}
return null;
}
}
public static void main(String[] args) {
ClientConfig clientConfig = new ClientConfig();
clientConfig.getSerializationConfig()
.addPortableFactory(SamplePortableFactory.FACTORY_ID, new SamplePortableFactory());
// Start the Hazelcast Client and connect to an already running Hazelcast Cluster on 127.0.0.1
HazelcastInstance hz = HazelcastClient.newHazelcastClient(clientConfig);
//Customer can be used here
hz.shutdown();
}
}
package client;
import com.hazelcast.client.HazelcastClient;
import com.hazelcast.client.config.ClientConfig;
import com.hazelcast.core.HazelcastInstance;
import com.hazelcast.core.IMap;
import com.hazelcast.nio.serialization.Portable;
import com.hazelcast.nio.serialization.PortableFactory;
import com.hazelcast.nio.serialization.PortableReader;
import com.hazelcast.nio.serialization.PortableWriter;
import com.hazelcast.query.Predicate;
import com.hazelcast.query.Predicates;
import com.hazelcast.query.SqlPredicate;
import java.io.IOException;
import java.util.Collection;
public class QuerySample {
public static class User implements Portable {
public static final int CLASS_ID = 1;
public String username;
public int age;
public boolean active;
public User(String username, int age, boolean active) {
this.username = username;
this.age = age;
this.active = active;
}
public User() {
}
@Override
public String toString() {
return "User{"
+ "username='" + username + '\''
+ ", age=" + age
+ ", active=" + active
+ '}';
}
@Override
public int getFactoryId() {
return ThePortableFactory.FACTORY_ID;
}
@Override
public int getClassId() {
return CLASS_ID;
}
@Override
public void writePortable(PortableWriter writer) throws IOException {
writer.writeUTF("username", username);
writer.writeInt("age", age);
writer.writeBoolean("active", active);
}
@Override
public void readPortable(PortableReader reader) throws IOException {
username = reader.readUTF("username");
age = reader.readInt("age");
active = reader.readBoolean("active");
}
}
public static class ThePortableFactory implements PortableFactory {
public static final int FACTORY_ID = 1;
@Override
public Portable create(int classId) {
if (classId == User.CLASS_ID) {
return new User();
}
return null;
}
}
private static void generateUsers(IMap<String, User> users) {
users.put("Rod", new User("Rod", 19, true));
users.put("Jane", new User("Jane", 20, true));
users.put("Freddy", new User("Freddy", 23, true));
}
public static void main(String[] args) {
ClientConfig clientConfig = new ClientConfig();
clientConfig.getSerializationConfig()
.addPortableFactory(ThePortableFactory.FACTORY_ID, new ThePortableFactory());
// Start the Hazelcast Client and connect to an already running Hazelcast Cluster on 127.0.0.1
HazelcastInstance hz = HazelcastClient.newHazelcastClient(clientConfig);
// Get a Distributed Map called "users"
IMap<String, User> users = hz.getMap("users");
// Add some users to the Distributed Map
generateUsers(users);
// Create a Predicate from a String (a SQL like Where clause)
Predicate sqlQuery = new SqlPredicate("active AND age BETWEEN 18 AND 21)");
// Creating the same Predicate as above but with a builder
Predicate criteriaQuery = Predicates.and(
Predicates.equal("active", true),
Predicates.between("age", 18, 21)
);
// Get result collections using the two different Predicates
Collection<User> result1 = users.values(sqlQuery);
Collection<User> result2 = users.values(criteriaQuery);
// Print out the results
System.out.println(result1);
System.out.println(result2);
// Shutdown this Hazelcast Client
hz.shutdown();
}
}
package client;
import com.hazelcast.client.HazelcastClient;
import com.hazelcast.core.HazelcastInstance;
import java.util.concurrent.BlockingQueue;
import java.util.concurrent.TimeUnit;
public class QueueSample {
public static void main(String[] args) throws InterruptedException {
// Start the Hazelcast Client and connect to an already running Hazelcast Cluster on 127.0.0.1
HazelcastInstance hz = HazelcastClient.newHazelcastClient();
// Get a Blocking Queue called "my-distributed-queue"
BlockingQueue<String> queue = hz.getQueue("my-distributed-queue");
// Offer a String into the Distributed Queue
queue.offer("item");
// Poll the Distributed Queue and return the String
queue.poll();
//Timed blocking Operations
queue.offer("anotheritem", 500, TimeUnit.MILLISECONDS);
queue.poll(5, TimeUnit.SECONDS);
//Indefinitely blocking Operations
queue.put("yetanotheritem");
System.out.println(queue.take());
// Shutdown this Hazelcast Client
hz.shutdown();
}
}
package client;
import com.hazelcast.client.HazelcastClient;
import com.hazelcast.core.HazelcastInstance;
import com.hazelcast.core.ReplicatedMap;
public class ReplicatedMapSample {
public static void main(String[] args) {
// Start the Hazelcast Client and connect to an already running Hazelcast Cluster on 127.0.0.1
HazelcastInstance hz = HazelcastClient.newHazelcastClient();
// Get a Replicated Map called "my-replicated-map"
ReplicatedMap<String, String> map = hz.getReplicatedMap("my-replicated-map");
// Put and Get a value from the Replicated Map
String replacedValue = map.put("key", "value");
// key/value replicated to all members
System.out.println("replacedValue = " + replacedValue);
// Will be null as its first update
String value = map.get("key");
// the value is retrieved from a random member in the cluster
System.out.println("value for key = " + value);
// Shutdown this Hazelcast Client
hz.shutdown();
}
}
package client;
import com.hazelcast.client.HazelcastClient;
import com.hazelcast.core.HazelcastInstance;
import com.hazelcast.ringbuffer.Ringbuffer;
public class RingBufferSample {
public static void main(String[] args) throws Exception {
// Start the Hazelcast Client and connect to an already running Hazelcast Cluster on 127.0.0.1
HazelcastInstance hz = HazelcastClient.newHazelcastClient();
Ringbuffer<Long> rb = hz.getRingbuffer("rb");
// add two items into ring buffer
rb.add(100L);
rb.add(200L);
// we start from the oldest item.
// if you want to start from the next item, call rb.tailSequence()+1
long sequence = rb.headSequence();
System.out.println(rb.readOne(sequence));
sequence++;
System.out.println(rb.readOne(sequence));
// Shutdown this Hazelcast Client
hz.shutdown();
}
}
package client;
import com.hazelcast.client.HazelcastClient;
import com.hazelcast.core.HazelcastInstance;
import java.util.Set;
public class SetSample {
public static void main(String[] args) {
// Start the Hazelcast Client and connect to an already running Hazelcast Cluster on 127.0.0.1
HazelcastInstance hz = HazelcastClient.newHazelcastClient();
// Get the Distributed Set from Cluster.
Set<String> set = hz.getSet("my-distributed-set");
// Add items to the set with duplicates
set.add("item1");
set.add("item1");
set.add("item2");
set.add("item2");
set.add("item2");
set.add("item3");
// Get the items. Note that there are no duplicates.
for (String item: set) {
System.out.println(item);
}
// Shutdown this Hazelcast client
hz.shutdown();
}
}
package client;
import com.hazelcast.client.HazelcastClient;
import com.hazelcast.core.HazelcastInstance;
import com.hazelcast.core.ITopic;
import com.hazelcast.core.Message;
import com.hazelcast.core.MessageListener;
public class TopicSample implements MessageListener<String> {
@Override
public void onMessage(Message<String> message) {
System.out.println("Got message " + message.getMessageObject());
}
public static void main(String[] args) {
// Start the Hazelcast Client and connect to an already running Hazelcast Cluster on 127.0.0.1
HazelcastInstance hz = HazelcastClient.newHazelcastClient();
// Get a Topic called "my-distributed-topic"
ITopic<String> topic = hz.getTopic("my-distributed-topic");
// Add a Listener to the Topic
topic.addMessageListener(new TopicSample());
// Publish a message to the Topic
topic.publish("Hello to distributed world");
// Shutdown this Hazelcast Client
hz.shutdown();
}
}
var Client = require('hazelcast-client').Client;
// Start the Hazelcast Client and connect to an already running Hazelcast Cluster on 127.0.0.1
Client.newHazelcastClient().then(function (hz) {
// Get an Atomic Counter, we'll call it "counter"
var counter = hz.getAtomicLong("counter");
// Add and Get the "counter"
return counter.addAndGet(3).then(function (value) {
return counter.get();
}).then(function (value) {
// Display the "counter" value
console.log("counter: " + value);
// Shutdown this Hazelcast Client
hz.shutdown();
});
});
var Client = require('hazelcast-client').Client;
var Config = require('hazelcast-client').Config;
function CustomSerializable(value) {
this.value = value;
}
CustomSerializable.prototype.hzGetCustomId = function () {
return 10;
};
function CustomSerializer() {
//Constructor function
}
CustomSerializer.prototype.getId = function () {
return 10;
};
CustomSerializer.prototype.write = function (output, t) {
output.writeInt(t.value.length);
for (var i = 0; i < t.value.length; i++) {
output.writeInt(t.value.charCodeAt(i));
}
};
CustomSerializer.prototype.read = function(reader) {
var len = reader.readInt();
var str = '';
for (var i = 0; i < len; i++) {
str = str + String.fromCharCode(reader.readInt());
}
return new CustomSerializable(str);
};
var cfg = new Config.ClientConfig();
cfg.serializationConfig.customSerializers.push(new CustomSerializer());
// Start the Hazelcast Client and connect to an already running Hazelcast Cluster on 127.0.0.1
Client.newHazelcastClient(cfg).then(function (hz) {
//CustomSerializer will serialize/deserialize CustomSerializable objects
hz.shutdown();
});
var Client = require('hazelcast-client').Client;
var Config = require('hazelcast-client').Config;
function IdentifiedEntryProcessor(value) {
// Constructor function
}
IdentifiedEntryProcessor.prototype.readData = function (inp) {
};
IdentifiedEntryProcessor.prototype.writeData = function(outp) {
};
IdentifiedEntryProcessor.prototype.getFactoryId = function () {
return 1;
};
IdentifiedEntryProcessor.prototype.getClassId = function() {
return 9;
};
function EntryProcessorDataSerializableFactory() {
}
EntryProcessorDataSerializableFactory.prototype.create = function (type) {
if (type === 1) {
return new IdentifiedEntryProcessor();
}
return null;
};
var cfg = new Config.ClientConfig();
cfg.serializationConfig.dataSerializableFactories[1] = new EntryProcessorDataSerializableFactory();
// Start the Hazelcast Client and connect to an already running Hazelcast Cluster on 127.0.0.1
Client.newHazelcastClient(cfg).then(function (hz) {
// Get the Distributed Map from Cluster.
var map = hz.getMap('my-distributed-map');
// Put the double value of 0 into the Distributed Map
return map.put('key', 0).then(function () {
// Run the IdentifiedEntryProcessor class on the Hazelcast Cluster Member holding the key called "key"
return map.executeOnKey('key', new IdentifiedEntryProcessor());
}).then(function () {
// Show that the IdentifiedEntryProcessor updated the value.
return map.get('key');
}).then(function (value) {
console.log(value);
// Shutdown the Hazelcast Cluster Member
hz.shutdown();
})
});
var Client = require('hazelcast-client').Client;
var Config = require('hazelcast-client').Config;
var cfg = new Config.ClientConfig();
function GlobalSerializer() {
// Constructor function
}
GlobalSerializer.prototype.getId = function () {
return 20;
};
GlobalSerializer.prototype.read = function (input) {
// return MyFavoriteSerializer.deserialize(input);
};
GlobalSerializer.prototype.write = function (output, obj) {
// output.write(MyFavoriteSerializer.serialize(obj))
};
cfg.serializationConfig.globalSerializer = new GlobalSerializer();
// Start the Hazelcast Client and connect to an already running Hazelcast Cluster on 127.0.0.1
Client.newHazelcastClient(cfg).then(function (hz) {
// GlobalSerializer will serialize/deserialize all non-builtin types
hz.shutdown();
});
var Client = require('hazelcast-client').Client;
var Config = require('hazelcast-client').Config;
function Employee(id, name) {
this.id = id;
this.name = name;
}
Employee.prototype.readData = function (input) {
this.id = input.readInt();
this.name = input.readUTF();
};
Employee.prototype.writeData = function(output) {
output.writeInt(this.id);
output.writeUTF(this.name);
};
Employee.prototype.getFactoryId = function () {
return 1000;
};
Employee.prototype.getClassId = function() {
return 100;
};
function SampleDataSerializableFactory() {
// Constructor function
}
SampleDataSerializableFactory.prototype.create = function (type) {
if (type === 100) {
return new Employee();
}
return null;
};
var cfg = new Config.ClientConfig();
cfg.serializationConfig.dataSerializableFactories[1000] = new SampleDataSerializableFactory();
// Start the Hazelcast Client and connect to an already running Hazelcast Cluster on 127.0.0.1
Client.newHazelcastClient(cfg).then(function (hz) {
// Employee can be used here
hz.shutdown();
});
var Client = require('hazelcast-client').Client;
// Start the Hazelcast Client and connect to an already running Hazelcast Cluster on 127.0.0.1
Client.newHazelcastClient().then(function (hz) {
// Get the Distributed List from Cluster.
var list = hz.getList('my-distributed-list');
// Add elements to the list
return list.add('item1').then(function () {
return list.add('item2');
}).then(function () {
//Remove the first element
return list.removeAt(0);
}).then(function (value) {
console.log(value);
// There is only one element left
return list.size();
}).then(function (len) {
console.log(len);
// Clear the list
return list.clear();
}).then(function () {
// Shutdown this Hazelcast client
hz.shutdown();
});
});
var Client = require('hazelcast-client').Client;
// Start the Hazelcast Client and connect to an already running Hazelcast Cluster on 127.0.0.1
Client.newHazelcastClient().then(function (hz) {
// Get a distributed lock called "my-distributed-lock"
var lock = hz.getLock("my-distributed-lock");
// Now create a lock and execute some guarded code.
return lock.lock().then(function () {
//do something here
}).finally(function () {
return lock.unlock();
}).then(function () {
// Shutdown this Hazelcast Client
hz.shutdown();
});
});
var Client = require('hazelcast-client').Client;
// Start the Hazelcast Client and connect to an already running Hazelcast Cluster on 127.0.0.1
Client.newHazelcastClient().then(function (hz) {
// Get the Distributed Map from Cluster.
var map = hz.getMap('my-distributed-map');
//Standard Put and Get.
return map.put('key', 'value').then(function () {
return map.get('key');
}).then(function (val) {
//Concurrent Map methods, optimistic updating
return map.putIfAbsent('somekey', 'somevalue');
}).then(function () {
return map.replace('key', 'value', 'newvalue');
}).then(function (value) {
// Shutdown this Hazelcast client
hz.shutdown();
});
});
var Client = require('hazelcast-client').Client;
// Start the Hazelcast Client and connect to an already running Hazelcast Cluster on 127.0.0.1
Client.newHazelcastClient().then(function (hz) {
// Get the Distributed MultiMap from Cluster.
var multiMap = hz.getMultiMap('my-distributed-multimap');
// Put values in the map against the same key
return multiMap.put('my-key', 'value1').then(function () {
return multiMap.put('my-key', 'value2');
}).then(function () {
return multiMap.put('my-key', 'value3');
}).then(function () {
// Print out all the values for associated with key called "my-key"
return multiMap.get('my-key')
}).then(function (values) {
for (value of values) {
console.log(value);
}
// remove specific key/value pair
return multiMap.remove('my-key', 'value2');
}).then(function () {
// Shutdown this Hazelcast client
hz.shutdown();
});
});
var Client = require('hazelcast-client').Client;
var Config = require('hazelcast-client').Config;
var Long = require('long');
function Customer(name, id, lastOrder) {
this.name = name;
this.id = id;
this.lastOrder = lastOrder;
}
Customer.prototype.readPortable = function (reader) {
this.name = reader.readUTF('name');
this.id = reader.readInt('id');
this.lastOrder = reader.readLong('lastOrder').toNumber();
};
Customer.prototype.writePortable = function(writer) {
writer.writeUTF('name', this.name);
writer.writeInt('id', this.id);
writer.writeLong('lastOrder', Long.fromNumber(this.lastOrder));
};
Customer.prototype.getFactoryId = function () {
return 1;
};
Customer.prototype.getClassId = function() {
return 1;
};
function PortableFactory() {
// Constructor function
}
PortableFactory.prototype.create = function (classId) {
if (classId === 1) {
return new Customer();
}
return null;
};
var cfg = new Config.ClientConfig();
cfg.serializationConfig.portableFactories[1] = new PortableFactory();
// Start the Hazelcast Client and connect to an already running Hazelcast Cluster on 127.0.0.1
Client.newHazelcastClient(cfg).then(function (hz) {
//Customer can be used here
hz.shutdown();
});
var Client = require('hazelcast-client').Client;
var Predicates = require('hazelcast-client').Predicates;
var Config = require('hazelcast-client').Config;
function User(username, age, active) {
this.username = username;
this.age = age;
this.active = active;
}
User.prototype.readPortable = function (reader) {
this.username = reader.readUTF('username');
this.age = reader.readInt('age');
this.active = reader.readBoolean('active');
};
User.prototype.writePortable = function (writer) {
writer.writeUTF('username', this.username);
writer.writeInt('age', this.age);
writer.writeBoolean('active', this.active);
};
User.prototype.getFactoryId = function () {
return 1;
};
User.prototype.getClassId = function () {
return 1;
};
function PortableFactory() {
// Constructor sample
}
PortableFactory.prototype.create = function (classId) {
if (classId === 1) {
return new User();
}
return null;
};
function generateUsers(users) {
return users.put('Rod', new User('Rod', 19, true)).then(function () {
return users.put('Jane', new User('Jane', 20, true));
}).then(function () {
return users.put('Freddy', new User('Freddy', 23, true));
});
}
var cfg = new Config.ClientConfig();
cfg.serializationConfig.portableFactories[1] = new PortableFactory();
// Start the Hazelcast Client and connect to an already running Hazelcast Cluster on 127.0.0.1
Client.newHazelcastClient(cfg).then(function (hz) {
// Get a Distributed Map called "users"
var users = hz.getMap('users');
// Add some users to the Distributed Map
return generateUsers(users).then(function () {
// Create a Predicate
var criteriaQuery = Predicates.and(
Predicates.truePredicate('active', true),
Predicates.isBetween('age', 18, 21)
);
// Get result collections using the the Predicate
return users.valuesWithPredicate(criteriaQuery);
}).then(function (values) {
// Print out the results
console.log(values.toArray());
// Shutdown this Hazelcast Client
hz.shutdown();
})
});
var Client = require('hazelcast-client').Client;
// Start the Hazelcast Client and connect to an already running Hazelcast Cluster on 127.0.0.1
Client.newHazelcastClient().then(function (hz) {
// Get a Blocking Queue called "my-distributed-queue"
var queue = hz.getQueue('my-distributed-queue');
// Offer a String into the Distributed Queue
return queue.offer('item').then(function () {
// Poll the Distributed Queue and return the String
return queue.poll();
}).then(function () {
//Timed blocking Operations
return queue.offer('anotheritem', 500);
}).then(function () {
return queue.poll(5000);
}).then(function () {
//Indefinitely blocking Operations
return queue.put('yetanotheritem');
}).then(function () {
return queue.take();
}).then(function (value) {
console.log(value);
// Shutdown this Hazelcast Client
hz.shutdown();
})
});
var Client = require('hazelcast-client').Client;
// Start the Hazelcast Client and connect to an already running Hazelcast Cluster on 127.0.0.1
Client.newHazelcastClient().then(function (hz) {
// Get a Replicated Map called "my-replicated-map"
var map = hz.getReplicatedMap('my-replicated-map');
// Put and Get a value from the Replicated Map
// key/value replicated to all members
return map.put('key', 'value').then(function (replacedValue) {
console.log('replaced value = ' + replacedValue); // Will be null as its first update
return map.get('key');
}).then(function (value) {
// the value is retrieved from a random member in the cluster
console.log('value for key = ' + value);
// Shutdown this Hazelcast Client
hz.shutdown();
});
});
var Client = require('hazelcast-client').Client;
// Start the Hazelcast Client and connect to an already running Hazelcast Cluster on 127.0.0.1
Client.newHazelcastClient().then(function (hz) {
var rb = hz.getRingbuffer('rb');
return rb.add(100).then(function () {
return rb.add(200);
}).then(function (value) {
// we start from the oldest item.
// if you want to start from the next item, call rb.tailSequence()+1
return rb.headSequence();
}).then(function (sequence) {
return rb.readOne(sequence).then(function (value) {
console.log(value);
return rb.readOne(sequence.add(1));
}).then(function (value) {
console.log(value);
// Shutdown this Hazelcast Client
hz.shutdown();
});
});
});
var Client = require('hazelcast-client').Client;
// Start the Hazelcast Client and connect to an already running Hazelcast Cluster on 127.0.0.1
Client.newHazelcastClient().then(function (hz) {
// Get the Distributed Set from Cluster.
var set = hz.getSet('my-distributed-set');
// Add items to the set with duplicates
return set.add('item1').then(function () {
return set.add('item1');
}).then(function () {
return set.add('item2');
}).then(function () {
return set.add('item2');
}).then(function () {
return set.add('item2');
}).then(function () {
return set.add('item3');
}).then(function () {
// Get the items. Note that there are no duplicates
return set.toArray();
}).then(function (values) {
console.log(values);
}).then(function () {
// Shutdown this Hazelcast client
hz.shutdown();
});
});
var Client = require('hazelcast-client').Client;
// Start the Hazelcast Client and connect to an already running Hazelcast Cluster on 127.0.0.1
Client.newHazelcastClient().then(function (hz) {
// Get a Topic called "my-distributed-topic"
var topic = hz.getReliableTopic("my-distributed-topic");
// Add a Listener to the Topic
topic.addMessageListener(function (message) {
console.log(message);
// Shutdown this Hazelcast Client
hz.shutdown();
});
// Publish a message to the Topic
topic.publish('Hello to distributed world');
});
import hazelcast
import logging
if __name__ == '__main__':
logging.basicConfig(format='%(asctime)s%(msecs)03d [%(name)s] %(levelname)s: %(message)s', datefmt="%H:%M%:%S,")
logging.getLogger().setLevel(logging.INFO)
logger = logging.getLogger("main")
# Start the Hazelcast Client and connect to an already running Hazelcast Cluster on 127.0.0.1
hz = hazelcast.HazelcastClient()
# Get an Atomic Counter, we'll call it "counter"
counter = hz.get_atomic_long("counter")
# Add and Get the "counter"
counter.add_and_get(3).result() # value is 3
# Display the "counter" value
print("counter: ", counter.get().result())
# Shutdown this Hazelcast Client
hz.shutdown()
import hazelcast
import logging
from hazelcast import ClientConfig
from hazelcast.serialization.api import StreamSerializer
class CustomSerializableType(object):
def __init__(self, value=None):
self.value = value
class CustomSerializer(StreamSerializer):
def write(self, out, obj):
out.write_int(len(obj.value))
out.write_from(obj.value)
def read(self, inp):
length = inp.read_int()
result = bytearray(length)
inp.read_into(result, 0, length)
return CustomSerializableType(result.decode("utf-8"))
def get_type_id(self):
return 10
def destroy(self):
pass
if __name__ == '__main__':
logging.basicConfig(format='%(asctime)s%(msecs)03d [%(name)s] %(levelname)s: %(message)s', datefmt="%H:%M%:%S,")
logging.getLogger().setLevel(logging.INFO)
logger = logging.getLogger("main")
config = ClientConfig()
config.serialization_config.set_custom_serializer(CustomSerializableType, CustomSerializer)
# Start the Hazelcast Client and connect to an already running Hazelcast Cluster on 127.0.0.1
hz = hazelcast.HazelcastClient(config)
# CustomSerializer will serialize/deserialize CustomSerializable objects
hz.shutdown()
import hazelcast
import logging
from hazelcast.serialization.api import IdentifiedDataSerializable
class IncEntryProcessor(IdentifiedDataSerializable):
FACTORY_ID = 66
CLASS_ID = 1
def read_data(self, object_data_input):
pass
def write_data(self, object_data_output):
pass
def get_factory_id(self):
return self.FACTORY_ID
def get_class_id(self):
return self.CLASS_ID
if __name__ == '__main__':
logging.basicConfig(format='%(asctime)s%(msecs)03d [%(name)s] %(levelname)s: %(message)s', datefmt="%H:%M%:%S,")
logging.getLogger().setLevel(logging.INFO)
logger = logging.getLogger("main")
# Start the Hazelcast Client and connect to an already running Hazelcast Cluster on 127.0.0.1
hz = hazelcast.HazelcastClient()
# Get the Distributed Map from Cluster.
map = hz.get_map("my-distributed-map")
# Put the integer value of 0 into the Distributed Map
map.put("key", 0).result()
# Run the IncEntryProcessor class on the Hazelcast Cluster Member holding the key called "key"
map.execute_on_key("key", IncEntryProcessor()).result()
# Show that the IncEntryProcessor updated the value.
print("new value:", map.get("key").result())
# Shutdown this Hazelcast Client
hz.shutdown()
import hazelcast
import logging
from hazelcast.serialization.api import Portable
class MessagePrinter(Portable):
FACTORY_ID = 1
CLASS_ID = 9
def __init__(self, message=None):
self.message = message
def write_portable(self, writer):
writer.write_utf("message", self.message)
def read_portable(self, reader):
self.message = reader.read_utf("message")
def get_factory_id(self):
return self.FACTORY_ID
def get_class_id(self):
return self.CLASS_ID
if __name__ == '__main__':
logging.basicConfig(format='%(asctime)s%(msecs)03d [%(name)s] %(levelname)s: %(message)s', datefmt="%H:%M%:%S,")
logging.getLogger().setLevel(logging.INFO)
logger = logging.getLogger("main")
# Start the Hazelcast Client and connect to an already running Hazelcast Cluster on 127.0.0.1
hz = hazelcast.HazelcastClient()
# Get the Distributed Executor Service
ex = hz.get_executor("my-distributed-executor")
# Get the first Hazelcast Cluster Member
firstMember = hz.cluster.members[0]
# Submit the MessagePrinter Runnable to the first Hazelcast Cluster Member
ex.execute_on_member(firstMember, MessagePrinter("message to very first member of the cluster"))
# Submit the MessagePrinter Runnable to all Hazelcast Cluster Members
ex.execute_on_all_members(MessagePrinter("message to all members in the cluster"))
# Submit the MessagePrinter Runnable to the Hazelcast Cluster Member owning the key called "key"
ex.execute_on_key_owner("key", MessagePrinter("message to the member that owns the following key"))
# Shutdown this Hazelcast Client
hz.shutdown()
from __future__ import print_function
import hazelcast
import logging
from hazelcast import ClientConfig
from hazelcast.serialization.api import StreamSerializer
class GlobalSerializer(StreamSerializer):
def write(self, out, obj):
# out.write_byte_array(MyFavoriteSerializer.serialize(obj))
pass
def read(self, inp):
# return MyFavoriteSerializer.deserialize(inp)
return None
def get_type_id(self):
return 20
def destroy(self):
pass
class CustomSerializableType(object):
def __init__(self, value=None):
self.value = value
if __name__ == '__main__':
logging.basicConfig(format='%(asctime)s%(msecs)03d [%(name)s] %(levelname)s: %(message)s', datefmt="%H:%M%:%S,")
logging.getLogger().setLevel(logging.INFO)
logger = logging.getLogger("main")
config = ClientConfig()
config.serialization_config.global_serializer = GlobalSerializer
# Start the Hazelcast Client and connect to an already running Hazelcast Cluster on 127.0.0.1
hz = hazelcast.HazelcastClient(config)
# GlobalSerializer will serialize/deserialize all non-builtin types
# Shutdown this Hazelcast Client
hz.shutdown()
from __future__ import print_function
import hazelcast
import logging
from hazelcast import ClientConfig
from hazelcast.serialization.api import IdentifiedDataSerializable
class Employee(IdentifiedDataSerializable):
FACTORY_ID = 1000
CLASS_ID = 100
def __init__(self, id=None, name=None):
self.id = id
self.name = name
def read_data(self, object_data_input):
self.id = object_data_input.read_int()
self.name = object_data_input.read_utf()
def write_data(self, object_data_output):
object_data_output.write_int(self.id)
object_data_output.write_utf(self.name)
def get_factory_id(self):
return Employee.FACTORY_ID
def get_class_id(self):
return Employee.CLASS_ID
def __repr__(self):
return '%s %s' % (self.id, self.name)
if __name__ == '__main__':
logging.basicConfig(format='%(asctime)s%(msecs)03d [%(name)s] %(levelname)s: %(message)s', datefmt="%H:%M%:%S,")
logging.getLogger().setLevel(logging.INFO)
logger = logging.getLogger("main")
config = ClientConfig()
my_factory = {Employee.CLASS_ID: Employee}
config.serialization_config.add_data_serializable_factory(Employee.FACTORY_ID, my_factory)
# Start the Hazelcast Client and connect to an already running Hazelcast Cluster on 127.0.0.1
hz = hazelcast.HazelcastClient(config)
# Employee can be used here
# Shutdown this Hazelcast Client
hz.shutdown()
import hazelcast
import logging
if __name__ == '__main__':
logging.basicConfig(format='%(asctime)s%(msecs)03d [%(name)s] %(levelname)s: %(message)s', datefmt="%H:%M%:%S,")
logging.getLogger().setLevel(logging.INFO)
logger = logging.getLogger("main")
# Start the Hazelcast Client and connect to an already running Hazelcast Cluster on 127.0.0.1
hz = hazelcast.HazelcastClient()
# Get the Distributed List from Cluster.
mlist = hz.get_list("my-distributed-list")
# Add element to the list
mlist.add("item1")
mlist.add("item2")
# Remove the first element
print("Removed: ", mlist.remove_at(0).result())
# There is only one element left
print("Current size is ", mlist.size().result())
# Clear the list
mlist.clear()
# Shutdown this Hazelcast Client
hz.shutdown()
import hazelcast
import logging
if __name__ == '__main__':
logging.basicConfig(format='%(asctime)s%(msecs)03d [%(name)s] %(levelname)s: %(message)s', datefmt="%H:%M%:%S,")
logging.getLogger().setLevel(logging.INFO)
logger = logging.getLogger("main")
# Start the Hazelcast Client and connect to an already running Hazelcast Cluster on 127.0.0.1
hz = hazelcast.HazelcastClient()
# Get a distributed lock called "my-distributed-lock"
lock = hz.get_lock("my-distributed-lock")
# Now create a lock and execute some guarded code.
lock.lock()
try:
# do something here
pass
finally:
lock.unlock()
# Shutdown this Hazelcast Client
hz.shutdown()
import hazelcast
import logging
if __name__ == '__main__':
logging.basicConfig(format='%(asctime)s%(msecs)03d [%(name)s] %(levelname)s: %(message)s', datefmt="%H:%M%:%S,")
logging.getLogger().setLevel(logging.INFO)
logger = logging.getLogger("main")
# Start the Hazelcast Client and connect to an already running Hazelcast Cluster on 127.0.0.1
hz = hazelcast.HazelcastClient()
# Get the Distributed Map from Cluster.
map = hz.get_map("my-distributed-map")
# Standard Put and Get
map.put("key", "value")
map.get("key")
# Concurrent Map methods, optimistic updating
map.put_if_absent("somekey", "somevalue")
map.replace_if_same("key", "value", "newvalue")
# Shutdown this Hazelcast Client
hz.shutdown()
import hazelcast
import logging
if __name__ == '__main__':
logging.basicConfig(format='%(asctime)s%(msecs)03d [%(name)s] %(levelname)s: %(message)s', datefmt="%H:%M%:%S,")
logging.getLogger().setLevel(logging.INFO)
logger = logging.getLogger("main")
# Start the Hazelcast Client and connect to an already running Hazelcast Cluster on 127.0.0.1
hz = hazelcast.HazelcastClient()
# Get the Distributed MultiMap from Cluster.
multiMap = hz.get_multi_map("my-distributed-multimap")
# Put values in the map against the same key
multiMap.put("my-key", "value1")
multiMap.put("my-key", "value2")
multiMap.put("my-key", "value3")
# Print out all the values for associated with key called "my-key"
values = multiMap.get("my-key").result()
print(values)
# remove specific key/value pair
multiMap.remove("my-key", "value2")
# Shutdown this Hazelcast Client
hz.shutdown()
from __future__ import print_function
import hazelcast
import logging
from hazelcast import ClientConfig
from hazelcast.serialization.api import Portable
class Customer(Portable):
FACTORY_ID = 1
CLASS_ID = 1
def __init__(self, id=None, name=None, last_order=None):
self.id = id
self.name = name
self.last_order = last_order
def read_portable(self, object_data_input):
self.id = object_data_input.read_int("id")
self.name = object_data_input.read_utf("name")
self.last_order = object_data_input.read_long("last_order")
def write_portable(self, object_data_output):
object_data_output.write_int("id", self.id)
object_data_output.write_utf("name", self.name)
object_data_output.write_long("last_order", self.last_order)
def get_factory_id(self):
return Customer.FACTORY_ID
def get_class_id(self):
return Customer.CLASS_ID
def __repr__(self):
return '%s %s %s' % (self.id, self.name, self.last_order)
if __name__ == '__main__':
logging.basicConfig(format='%(asctime)s%(msecs)03d [%(name)s] %(levelname)s: %(message)s', datefmt="%H:%M%:%S,")
logging.getLogger().setLevel(logging.INFO)
logger = logging.getLogger("main")
config = ClientConfig()
my_factory = {Customer.CLASS_ID: Customer}
config.serialization_config.add_portable_factory(Customer.FACTORY_ID, my_factory)
# Start the Hazelcast Client and connect to an already running Hazelcast Cluster on 127.0.0.1
hz = hazelcast.HazelcastClient(config)
# Customer can be used here
# Shutdown this Hazelcast Client
hz.shutdown()
import hazelcast
import logging
from hazelcast import ClientConfig
from hazelcast.serialization.api import Portable
from hazelcast.serialization.predicate import SqlPredicate, and_, is_between, is_equal_to
class User(Portable):
FACTORY_ID = 1
CLASS_ID = 1
def __init__(self, user_name=None, age=None, active=None):
self.user_name = user_name
self.age = age
self.active = active
def write_portable(self, writer):
writer.write_utf("user_name", self.user_name)
writer.write_int("age", self.age)
writer.write_boolean("active", self.active)
def read_portable(self, reader):
self.user_name = reader.read_utf("user_name")
self.age = reader.read_int("age")
self.active = reader.read_boolean("active")
def get_factory_id(self):
return self.FACTORY_ID
def get_class_id(self):
return self.CLASS_ID
def __repr__(self):
return '%s %s %s' % (self.user_name, self.age, self.active)
def generate_users(users):
users.put("Rod", User("Rod", 19, True))
users.put("Jane", User("Jane", 20, True))
users.put("Freddy", User("Freddy", 23, True))
if __name__ == '__main__':
logging.basicConfig(format='%(asctime)s%(msecs)03d [%(name)s] %(levelname)s: %(message)s', datefmt="%H:%M%:%S,")
logging.getLogger().setLevel(logging.INFO)
logger = logging.getLogger("main")
# Start the Hazelcast Client and connect to an already running Hazelcast Cluster on 127.0.0.1
config = ClientConfig()
portable_factory = {User.CLASS_ID: User}
config.serialization_config.add_portable_factory(User.FACTORY_ID, portable_factory)
hz = hazelcast.HazelcastClient(config)
# Get a Distributed Map called "users"
users = hz.get_map("users")
# Add some users to the Distributed Map
generate_users(users)
# Create a Predicate from a String (a SQL like Where clause)
sqlQuery = SqlPredicate("active AND age BETWEEN 18 AND 21)")
# Creating the same Predicate as above but with a builder
criteriaQuery = and_(is_equal_to("active", True), is_between("age", 18, 21))
# Get result collections using the two different Predicates
result1 = users.values(sqlQuery).result()
result2 = users.values(criteriaQuery).result()
# Print out the results
print(result1)
print(result2)
# Shutdown this Hazelcast Client
hz.shutdown()
import hazelcast
import logging
if __name__ == '__main__':
logging.basicConfig(format='%(asctime)s%(msecs)03d [%(name)s] %(levelname)s: %(message)s', datefmt="%H:%M%:%S,")
logging.getLogger().setLevel(logging.INFO)
logger = logging.getLogger("main")
# Start the Hazelcast Client and connect to an already running Hazelcast Cluster on 127.0.0.1
hz = hazelcast.HazelcastClient()
# Get a Blocking Queue called "my-distributed-queue"
queue = hz.get_queue("my-distributed-queue")
# Offer a String into the Distributed Queue
queue.offer("item")
# Poll the Distributed Queue and return the String
item = queue.poll().result()
# Timed blocking Operations
queue.offer("anotheritem", 1).result()
anotherItem = queue.poll(5).result()
# Indefinitely blocking Operations
queue.put("yetanotheritem").result()
print(queue.take().result())
# Shutdown this Hazelcast Client
hz.shutdown()
import hazelcast
import logging
if __name__ == '__main__':
logging.basicConfig(format='%(asctime)s%(msecs)03d [%(name)s] %(levelname)s: %(message)s', datefmt="%H:%M%:%S,")
logging.getLogger().setLevel(logging.INFO)
logger = logging.getLogger("main")
# Start the Hazelcast Client and connect to an already running Hazelcast Cluster on 127.0.0.1
hz = hazelcast.HazelcastClient()
# Get a Replicated Map called "my-replicated-map"
rmap = hz.get_replicated_map("my-replicated-map")
# Put and Get a value from the Replicated Map
replacedValue = rmap.put("key", "value").result() # Will be null as its first update
# key/value replicated to all members
print("replacedValue = ", replacedValue)
# Will be null as its first update
value = rmap.get("key").result()
# the value is retrieved from a random member in the cluster
print("value for key = ", value)
# Shutdown this Hazelcast Client
hz.shutdown()
import hazelcast
import logging
from hazelcast.proxy.ringbuffer import OVERFLOW_POLICY_FAIL
if __name__ == '__main__':
logging.basicConfig(format='%(asctime)s%(msecs)03d [%(name)s] %(levelname)s: %(message)s', datefmt="%H:%M%:%S,")
logging.getLogger().setLevel(logging.INFO)
logger = logging.getLogger("main")
# Start the Hazelcast Client and connect to an already running Hazelcast Cluster on 127.0.0.1
hz = hazelcast.HazelcastClient()
rb = hz.get_ringbuffer("rb")
# add two items into ring buffer
rb.add(100).result()
rb.add(200).result()
# we start from the oldest item.
# if you want to start from the next item, call rb.tailSequence()+1
sequence = rb.head_sequence().result()
print(rb.read_one(sequence).result())
sequence += 1
print(rb.read_one(sequence).result())
# Shutdown this Hazelcast Client
hz.shutdown()
import hazelcast
import logging
if __name__ == '__main__':
logging.basicConfig(format='%(asctime)s%(msecs)03d [%(name)s] %(levelname)s: %(message)s', datefmt="%H:%M%:%S,")
logging.getLogger().setLevel(logging.INFO)
logger = logging.getLogger("main")
# Start the Hazelcast Client and connect to an already running Hazelcast Cluster on 127.0.0.1
hz = hazelcast.HazelcastClient()
# Get the Distributed Set from Cluster.
mset = hz.get_set("my-distributed-set")
# Add items to the set with duplicates
mset.add("item1")
mset.add("item1")
mset.add("item2")
mset.add("item2")
mset.add("item2")
mset.add("item3")
# Get the items. Note that there are no duplicates.
print(mset.get_all().result())
# Shutdown this Hazelcast Client
hz.shutdown()
import hazelcast
import logging
def print_on_message(topic_message):
print("Got message ", topic_message.message)
if __name__ == '__main__':
logging.basicConfig(format='%(asctime)s%(msecs)03d [%(name)s] %(levelname)s: %(message)s', datefmt="%H:%M%:%S,")
logging.getLogger().setLevel(logging.INFO)
logger = logging.getLogger("main")
# Start the Hazelcast Client and connect to an already running Hazelcast Cluster on 127.0.0.1
hz = hazelcast.HazelcastClient()
# Get a Topic called "my-distributed-topic"
topic = hz.get_topic("my-distributed-topic")
# Add a Listener to the Topic
topic.add_listener(print_on_message)
# Publish a message to the Topic
topic.publish("Hello to distributed world")
# Shutdown this Hazelcast Client
hz.shutdown()
using System;
using Hazelcast.Client;
namespace Hazelcast.Examples.Org.Website.Samples
{
public class AtomicLongSample
{
public static void Run(string[] args)
{
// Start the Hazelcast Client and connect to an already running Hazelcast Cluster on 127.0.0.1
var hz = HazelcastClient.NewHazelcastClient();
// Get an Atomic Counter, we'll call it "counter"
var counter = hz.GetAtomicLong("counter");
// Add and Get the "counter"
counter.AddAndGet(3); // value is now 3
// Display the "counter" value
Console.WriteLine("counter: " + counter.Get());
// Shutdown this Hazelcast Client
hz.Shutdown();
}
}
}
using System.Text;
using Hazelcast.Client;
using Hazelcast.Config;
using Hazelcast.IO;
using Hazelcast.IO.Serialization;
namespace Hazelcast.Examples.Org.Website.Samples
{
internal class CustomSerializableType
{
public string Value { get; set; }
}
internal class CustomSerializer : IStreamSerializer<CustomSerializableType>
{
public int GetTypeId()
{
return 10;
}
public void Destroy()
{
}
public void Write(IObjectDataOutput output, CustomSerializableType t)
{
var array = Encoding.UTF8.GetBytes(t.Value);
output.WriteInt(array.Length);
output.Write(array);
}
public CustomSerializableType Read(IObjectDataInput input)
{
var len = input.ReadInt();
var array = new byte[len];
input.ReadFully(array);
return new CustomSerializableType {Value = Encoding.UTF8.GetString(array)};
}
}
public class CustomSerializerSample
{
public static void Run(string[] args)
{
var clientConfig = new ClientConfig();
clientConfig.GetSerializationConfig()
.AddSerializerConfig(new SerializerConfig()
.SetImplementation(new CustomSerializer())
.SetTypeClass(typeof(CustomSerializableType)));
// Start the Hazelcast Client and connect to an already running Hazelcast Cluster on 127.0.0.1
var hz = HazelcastClient.NewHazelcastClient(clientConfig);
//CustomSerializer will serialize/deserialize CustomSerializable objects
// Shutdown this Hazelcast Client
hz.Shutdown();
}
}
}
using System;
using Hazelcast.Client;
using Hazelcast.IO;
using Hazelcast.IO.Serialization;
using Hazelcast.Map;
namespace Hazelcast.Examples.Org.Website.Samples
{
public class IncEntryProcessor : IEntryProcessor, IIdentifiedDataSerializable
{
private const int ClassId = 1;
private const int FactoryId = 66;
public void ReadData(IObjectDataInput input)
{
}
public void WriteData(IObjectDataOutput output)
{
}
public int GetFactoryId()
{
return FactoryId;
}
public int GetId()
{
return ClassId;
}
}
public class EntryProcessorSample
{
public static void Run(string[] args)
{
// Start the Hazelcast Client and connect to an already running Hazelcast Cluster on 127.0.0.1
var hz = HazelcastClient.NewHazelcastClient();
// Get the Distributed Map from Cluster.
var map = hz.GetMap<string, int>("my-distributed-map");
// Put the integer value of 0 into the Distributed Map
map.Put("key", 0);
// Run the IncEntryProcessor class on the Hazelcast Cluster Member holding the key called "key"
map.ExecuteOnKey("key", new IncEntryProcessor());
// Show that the IncEntryProcessor updated the value.
Console.WriteLine("new value:" + map.Get("key"));
// Shutdown this Hazelcast Client
hz.Shutdown();
}
}
}
using Hazelcast.Client;
using Hazelcast.Config;
using Hazelcast.IO;
using Hazelcast.IO.Serialization;
namespace Hazelcast.Examples.Org.Website.Samples
{
public class GlobalSerializer : IStreamSerializer<object>
{
public int GetTypeId()
{
return 20;
}
public void Destroy()
{
}
public void Write(IObjectDataOutput output, object obj)
{
// out.write(MyFavoriteSerializer.serialize(obj))
}
public object Read(IObjectDataInput input)
{
// return MyFavoriteSerializer.deserialize(input);
return null;
}
}
public class GlobalSerializerSample
{
public static void Run(string[] args)
{
var clientConfig = new ClientConfig();
clientConfig.GetSerializationConfig().SetGlobalSerializerConfig(
new GlobalSerializerConfig().SetImplementation(new GlobalSerializer())
);
// Start the Hazelcast Client and connect to an already running Hazelcast Cluster on 127.0.0.1
var hz = HazelcastClient.NewHazelcastClient(clientConfig);
//GlobalSerializer will serialize/deserialize all non-builtin types
// Shutdown this Hazelcast Client
hz.Shutdown();
}
}
}
using System;
using Hazelcast.Client;
using Hazelcast.Config;
using Hazelcast.IO;
using Hazelcast.IO.Serialization;
namespace Hazelcast.Examples.Org.Website.Samples
{
public class Employee : IIdentifiedDataSerializable
{
private const int ClassId = 100;
public int Id { get; set; }
public string Name { get; set; }
public void ReadData(IObjectDataInput input)
{
Id = input.ReadInt();
Name = input.ReadUTF();
}
public void WriteData(IObjectDataOutput output)
{
output.WriteInt(Id);
output.WriteUTF(Name);
}
public int GetFactoryId()
{
return SampleDataSerializableFactory.FactoryId;
}
public int GetId()
{
return ClassId;
}
}
public class SampleDataSerializableFactory : IDataSerializableFactory
{
public const int FactoryId = 1000;
public IIdentifiedDataSerializable Create(int typeId)
{
if (typeId == 100) return new Employee();
return null;
}
}
public class IdentifiedDataSerializableSample
{
public static void Run(string[] args)
{
var clientConfig = new ClientConfig();
clientConfig.GetSerializationConfig()
.AddDataSerializableFactory(SampleDataSerializableFactory.FactoryId,
new SampleDataSerializableFactory());
// Start the Hazelcast Client and connect to an already running Hazelcast Cluster on 127.0.0.1
var hz = HazelcastClient.NewHazelcastClient(clientConfig);
//Employee can be used here
// Shutdown this Hazelcast Client
hz.Shutdown();
}
}
}
using System;
using Hazelcast.Client;
namespace Hazelcast.Examples.Org.Website.Samples
{
public class ListSample
{
public static void Run(string[] args)
{
// Start the Hazelcast Client and connect to an already running Hazelcast Cluster on 127.0.0.1
var hz = HazelcastClient.NewHazelcastClient();
// Get the Distributed List from Cluster.
var list = hz.GetList<string>("my-distributed-list");
// Add elements to the list
list.Add("item1");
list.Add("item2");
// Remove the first element
Console.WriteLine("Removed: " + list.Remove(0));
// There is only one element left
Console.WriteLine("Current size is " + list.Size());
// Clear the list
list.Clear();
// Shutdown this Hazelcast client
hz.Shutdown();
}
}
}
using Hazelcast.Client;
namespace Hazelcast.Examples.Org.Website.Samples
{
public class LockSample
{
public static void Run(string[] args)
{
// Start the Hazelcast Client and connect to an already running Hazelcast Cluster on 127.0.0.1
var hz = HazelcastClient.NewHazelcastClient();
// Get a distributed lock called "my-distributed-lock"
var lck = hz.GetLock("my-distributed-lock");
// Now create a lock and execute some guarded code.
lck.Lock();
try
{
//do something here
}
finally
{
lck.Unlock();
}
// Shutdown this Hazelcast Client
hz.Shutdown();
}
}
}
using Hazelcast.Client;
namespace Hazelcast.Examples.Org.Website.Samples
{
public class MapSample
{
public static void Run(string[] args)
{
// Start the Hazelcast Client and connect to an already running Hazelcast Cluster on 127.0.0.1
var hz = HazelcastClient.NewHazelcastClient();
// Get the Distributed Map from Cluster.
var map = hz.GetMap<string, string>("my-distributed-map");
//Standard Put and Get.
map.Put("key", "value");
map.Get("key");
//Concurrent Map methods, optimistic updating
map.PutIfAbsent("somekey", "somevalue");
map.Replace("key", "value", "newvalue");
// Shutdown this Hazelcast Client
hz.Shutdown();
}
}
}
using System;
using Hazelcast.Client;
namespace Hazelcast.Examples.Org.Website.Samples
{
public class DistributedMultiMapSample
{
public static void Run(string[] args)
{
// Start the Hazelcast Client and connect to an already running Hazelcast Cluster on 127.0.0.1
var hz = HazelcastClient.NewHazelcastClient();
// Get the Distributed MultiMap from Cluster.
var multiMap = hz.GetMultiMap<string, string>("my-distributed-multimap");
// Put values in the map against the same key
multiMap.Put("my-key", "value1");
multiMap.Put("my-key", "value2");
multiMap.Put("my-key", "value3");
// Print out all the values for associated with key called "my-key"
var values = multiMap.Get("my-key");
foreach (var item in values)
{
Console.WriteLine(item);
}
// remove specific key/value pair
multiMap.Remove("my-key", "value2");
// Shutdown this Hazelcast Client
hz.Shutdown();
}
}
}
using System;
using Hazelcast.Client;
using Hazelcast.Config;
using Hazelcast.IO.Serialization;
namespace Hazelcast.Examples.Org.Website.Samples
{
public class PortableSerializableSample
{
public class Customer : IPortable
{
public const int ClassId = 1;
public string Name { get; set; }
public int Id { get; set; }
public DateTime LastOrder { get; set; }
public int GetFactoryId()
{
return SamplePortableFactory.FactoryId;
}
public int GetClassId()
{
return ClassId;
}
public void WritePortable(IPortableWriter writer)
{
writer.WriteInt("id", Id);
writer.WriteUTF("name", Name);
writer.WriteLong("lastOrder", LastOrder.ToFileTimeUtc());
}
public void ReadPortable(IPortableReader reader)
{
Id = reader.ReadInt("id");
Name = reader.ReadUTF("name");
LastOrder = DateTime.FromFileTimeUtc(reader.ReadLong("lastOrder"));
}
}
public class SamplePortableFactory : IPortableFactory
{
public const int FactoryId = 1;
public IPortable Create(int classId)
{
if (classId == Customer.ClassId) return new Customer();
return null;
}
}
public static void Run(string[] args)
{
var clientConfig = new ClientConfig();
clientConfig.GetSerializationConfig()
.AddPortableFactory(SamplePortableFactory.FactoryId, new SamplePortableFactory());
// Start the Hazelcast Client and connect to an already running Hazelcast Cluster on 127.0.0.1
var hz = HazelcastClient.NewHazelcastClient(clientConfig);
//Customer can be used here
// Shutdown this Hazelcast Client
hz.Shutdown();
}
}
}
using System;
using Hazelcast.Client;
using Hazelcast.Config;
using Hazelcast.Core;
using Hazelcast.IO.Serialization;
namespace Hazelcast.Examples.Org.Website.Samples
{
public class QuerySample
{
public class User : IPortable
{
public const int ClassId = 1;
private string _username;
private int _age;
private bool _active;
public User()
{
}
public User(string username, int age, bool active)
{
_username = username;
_age = age;
_active = active;
}
public int GetFactoryId()
{
return PortableFactory.FactoryId;
}
public int GetClassId()
{
return ClassId;
}
public void ReadPortable(IPortableReader reader)
{
_username = reader.ReadUTF("username");
_age = reader.ReadInt("age");
_active = reader.ReadBoolean("active");
}
public void WritePortable(IPortableWriter writer)
{
writer.WriteUTF("username", _username);
writer.WriteInt("age", _age);
writer.WriteBoolean("active", _active);
}
}
public class PortableFactory : IPortableFactory
{
public const int FactoryId = 1;
public IPortable Create(int classId)
{
if (classId == User.ClassId) return new User();
return null;
}
}
public static void Run(string[] args)
{
// Start the Hazelcast Client and connect to an already running Hazelcast Cluster on 127.0.0.1
var clientConfig = new ClientConfig();
clientConfig.GetSerializationConfig()
.AddPortableFactory(PortableFactory.FactoryId, new PortableFactory());
var hz = HazelcastClient.NewHazelcastClient(clientConfig);
// Get a Distributed Map called "users"
var users = hz.GetMap<string, User>("users");
// Add some users to the Distributed Map
GenerateUsers(users);
// Create a Predicate from a String (a SQL like Where clause)
var sqlQuery = Predicates.Sql("active AND age BETWEEN 18 AND 21)");
// Creating the same Predicate as above but with a builder
var criteriaQuery = Predicates.And(
Predicates.IsEqual("active", true),
Predicates.IsBetween("age", 18, 21)
);
// Get result collections using the two different Predicates
var result1 = users.Values(sqlQuery);
var result2 = users.Values(criteriaQuery);
// Print out the results
Console.WriteLine(result1);
Console.WriteLine(result2);
// Shutdown this Hazelcast Client
hz.Shutdown();
}
private static void GenerateUsers(IMap<string, User> users)
{
users.Put("Rod", new User("Rod", 19, true));
users.Put("Jane", new User("Jane", 20, true));
users.Put("Freddy", new User("Freddy", 23, true));
}
}
}
using System;
using Hazelcast.Client;
using Hazelcast.Core;
namespace Hazelcast.Examples.Org.Website.Samples
{
public class QueueSample
{
public static void Run(string[] args)
{
// Start the Hazelcast Client and connect to an already running Hazelcast Cluster on 127.0.0.1
var hz = HazelcastClient.NewHazelcastClient();
// Get a Blocking Queue called "my-distributed-queue"
var queue = hz.GetQueue<string>("my-distributed-queue");
// Offer a String into the Distributed Queue
queue.Offer("item");
// Poll the Distributed Queue and return the String
queue.Poll();
//Timed blocking Operations
queue.Offer("anotheritem", 500, TimeUnit.Milliseconds);
queue.Poll(5, TimeUnit.Seconds);
//Indefinitely blocking Operations
queue.Put("yetanotheritem");
Console.WriteLine(queue.Take());
// Shutdown this Hazelcast Client
hz.Shutdown();
}
}
}
using System;
using Hazelcast.Client;
namespace Hazelcast.Examples.Org.Website.Samples
{
public class ReplicatedMapSample
{
public static void Run(string[] args)
{
// Start the Hazelcast Client and connect to an already running Hazelcast Cluster on 127.0.0.1
var hz = HazelcastClient.NewHazelcastClient();
// Get a Replicated Map called "my-replicated-map"
var map = hz.GetReplicatedMap<string, string>("my-replicated-map");
// Put and Get a value from the Replicated Map
var replacedValue = map.Put("key", "value"); // key/value replicated to all members
Console.WriteLine("replacedValue = " + replacedValue); // Will be null as its first update
var value = map.Get("key"); // the value is retrieved from a random member in the cluster
Console.WriteLine("value for key = " + value);
// Shutdown this Hazelcast Client
hz.Shutdown();
}
}
}
using System;
using Hazelcast.Client;
namespace Hazelcast.Examples.Org.Website.Samples
{
public class RingBufferSample
{
public static void Run(string[] args)
{
// Start the Hazelcast Client and connect to an already running Hazelcast Cluster on 127.0.0.1
var hz = HazelcastClient.NewHazelcastClient();
var rb = hz.GetRingbuffer<long>("rb");
// add two items into ring buffer
rb.Add(100);
rb.Add(200);
// we start from the oldest item.
// if you want to start from the next item, call rb.tailSequence()+1
var sequence = rb.HeadSequence();
Console.WriteLine(rb.ReadOne(sequence));
sequence += 1;
Console.WriteLine(rb.ReadOne(sequence));
// Shutdown this Hazelcast Client
hz.Shutdown();
}
}
}
using System;
using Hazelcast.Client;
namespace Hazelcast.Examples.Org.Website.Samples
{
public class SetSample
{
public static void Run(string[] args)
{
// Start the Hazelcast Client and connect to an already running Hazelcast Cluster on 127.0.0.1
var hz = HazelcastClient.NewHazelcastClient();
// Get the Distributed Set from Cluster.
var set = hz.GetSet<string>("my-distributed-set");
// Add items to the set with duplicates
set.Add("item1");
set.Add("item1");
set.Add("item2");
set.Add("item2");
set.Add("item2");
set.Add("item3");
// Get the items. Note that there are no duplicates.
foreach (var item in set)
{
Console.WriteLine(item);
}
// Shutdown this Hazelcast client
hz.Shutdown();
}
}
}
using System;
using Hazelcast.Client;
using Hazelcast.Core;
namespace Hazelcast.Examples.Org.Website.Samples
{
public class TopicSample : IMessageListener<string>
{
public void OnMessage(Message<string> message)
{
Console.WriteLine("Got message " + message.GetMessageObject());
}
public static void Run(string[] args)
{
// Start the Hazelcast Client and connect to an already running Hazelcast Cluster on 127.0.0.1
var hz = HazelcastClient.NewHazelcastClient();
// Get a Topic called "my-distributed-topic"
var topic = hz.GetTopic<string>("my-distributed-topic");
// Add a Listener to the Topic
topic.AddMessageListener(new TopicSample());
// Publish a message to the Topic
topic.Publish("Hello to distributed world");
// Shutdown this Hazelcast Client
hz.Shutdown();
}
}
}
#include <hazelcast/client/HazelcastAll.h>
using namespace hazelcast::client;
int main() {
// Start the Hazelcast Client and connect to an already running Hazelcast Cluster on 127.0.0.1
ClientConfig clientConfig;
HazelcastClient hz(clientConfig);
// Get an Atomic Counter, we'll call it "counter"
IAtomicLong counter = hz.getIAtomicLong("counter");
// Add and Get the "counter"
counter.addAndGet(3); // value is now 3
// Display the "counter" value
std::cout << "counter: " + counter.get() << std::endl;
// Shutdown this Hazelcast Client
hz.shutdown();
return 0;
}
#include <hazelcast/client/HazelcastAll.h>
using namespace hazelcast::client;
class CustomSerializable {
public:
CustomSerializable() {}
CustomSerializable(const std::string &value) : value(value) {}
virtual ~CustomSerializable() {
}
const std::string &getValue() const {
return value;
}
void setValue(const std::string &value) {
CustomSerializable::value = value;
}
private:
std::string value;
};
class CustomSerializer : public serialization::StreamSerializer {
public:
virtual int32_t getHazelcastTypeId() const {
return 10;
}
virtual void write(serialization::ObjectDataOutput &out, const void *object) {
const CustomSerializable *csObject = static_cast<const CustomSerializable *>(object);
const std::string &value = csObject->getValue();
int length = (int) value.length();
std::vector<hazelcast::byte> bytes;
for (int i = 0; i < length; ++i) {
bytes.push_back((hazelcast::byte) value[i]);
}
out.writeInt((int) length);
out.write(bytes);
}
virtual void *read(serialization::ObjectDataInput &in) {
int32_t len = in.readInt();
std::ostringstream value;
for (int i = 0; i < len; ++i) {
value << (char) in.readByte();
}
return new CustomSerializable(value.str());
}
};
int main() {
ClientConfig clientConfig;
clientConfig.getSerializationConfig().registerSerializer(
boost::shared_ptr<serialization::StreamSerializer>(new CustomSerializer()));
HazelcastClient hz(clientConfig);
IMap<long, CustomSerializable> map = hz.getMap<long, CustomSerializable>("customMap");
map.put(1L, CustomSerializable("fooooo"));
return 0;
}
#include <hazelcast/client/HazelcastAll.h>
using namespace hazelcast::client;
class IncEntryProcessor : public serialization::IdentifiedDataSerializable {
public:
virtual int getFactoryId() const {
return 66;
}
virtual int getClassId() const {
return 1;
}
virtual void writeData(serialization::ObjectDataOutput &writer) const {
}
virtual void readData(serialization::ObjectDataInput &reader) {
}
};
int main() {
// Start the Hazelcast Client and connect to an already running Hazelcast Cluster on 127.0.0.1
ClientConfig clientConfig;
HazelcastClient hz(clientConfig);
// Get the Distributed Map from Cluster.
IMap<std::string, int> map = hz.getMap<std::string, int>("my-distributed-map");
// Put the integer value of 0 into the Distributed Map
boost::shared_ptr<int> replacedValue = map.put("key", 0);
// Run the IncEntryProcessor class on the Hazelcast Cluster Member holding the key called "key"
boost::shared_ptr<std::string> returnValueFromIncEntryProcessor = map.executeOnKey<std::string, IncEntryProcessor>(
"key", IncEntryProcessor());
// Show that the IncEntryProcessor updated the value.
std::cout << "new value:" << map.get("key");
// Shutdown this Hazelcast Client
hz.shutdown();
return 0;
}
#include <hazelcast/client/HazelcastAll.h>
using namespace hazelcast::client;
class GlobalSerializer : public serialization::StreamSerializer {
public:
virtual int32_t getHazelcastTypeId() const {
return 20;
}
virtual void write(serialization::ObjectDataOutput &out, const void *object) {
// out.write(MyFavoriteSerializer.serialize(object))
}
virtual void *read(serialization::ObjectDataInput &in) {
// return MyFavoriteSerializer.deserialize(in);
return NULL;
}
};
int main() {
// Start the Hazelcast Client and connect to an already running Hazelcast Cluster on 127.0.0.1
ClientConfig clientConfig;
clientConfig.getSerializationConfig().setGlobalSerializer(
boost::shared_ptr<serialization::StreamSerializer>(new GlobalSerializer()));
HazelcastClient hz(clientConfig);
return 0;
}
#include <hazelcast/client/HazelcastAll.h>
using namespace hazelcast::client;
class SampleDataSerializableFactory;
class Employee : public serialization::IdentifiedDataSerializable {
public:
static const int TYPE_ID = 100;
virtual int getFactoryId() const {
return 1000;
}
virtual int getClassId() const {
return TYPE_ID;
}
virtual void writeData(serialization::ObjectDataOutput &writer) const {
writer.writeInt(id);
writer.writeUTF(&name);
}
virtual void readData(serialization::ObjectDataInput &reader) {
id = reader.readInt();
name = *reader.readUTF();
}
private:
int id;
std::string name;
};
class SampleDataSerializableFactory : public serialization::DataSerializableFactory {
public:
static const int FACTORY_ID = 1000;
virtual std::auto_ptr<serialization::IdentifiedDataSerializable> create(int32_t classId) {
switch (classId) {
case 100:
return std::auto_ptr<serialization::IdentifiedDataSerializable>(new Employee());
default:
return std::auto_ptr<serialization::IdentifiedDataSerializable>();
}
}
};
int main() {
ClientConfig clientConfig;
clientConfig.getSerializationConfig().addDataSerializableFactory(SampleDataSerializableFactory::FACTORY_ID,
boost::shared_ptr<serialization::DataSerializableFactory>(
new SampleDataSerializableFactory()));
HazelcastClient hz(clientConfig);
//Employee can be used here
hz.shutdown();
return 0;
}
#include <hazelcast/client/HazelcastAll.h>
using namespace hazelcast::client;
int main() {
// Start the Hazelcast Client and connect to an already running Hazelcast Cluster on 127.0.0.1
ClientConfig clientConfig;
HazelcastClient hz(clientConfig);
// Get the Distributed List from Cluster.
IList<std::string> list = hz.getList<std::string>("my-distributed-list");
// Add elements to the list
list.add("item1");
list.add("item2");
// Remove the first element
std::cout << "Removed: " << *list.remove(0);
// There is only one element left
std::cout << "Current size is " << list.size() << std::endl;
// Clear the list
list.clear();
// Shutdown this Hazelcast Client
hz.shutdown();
return 0;
}
#include <hazelcast/client/HazelcastAll.h>
using namespace hazelcast::client;
int main() {
// Start the Hazelcast Client and connect to an already running Hazelcast Cluster on 127.0.0.1
ClientConfig clientConfig;
HazelcastClient hz(clientConfig);
// Get a distributed lock called "my-distributed-lock"
ILock lock = hz.getILock("my-distributed-lock");
// Now create a lock and execute some guarded code.
lock.lock();
try {
//do something here
} catch (...) {
lock.unlock();
}
// Shutdown this Hazelcast Client
hz.shutdown();
return 0;
}
#include <hazelcast/client/HazelcastAll.h>
using namespace hazelcast::client;
int main() {
// Start the Hazelcast Client and connect to an already running Hazelcast Cluster on 127.0.0.1
ClientConfig clientConfig;
HazelcastClient hz(clientConfig);
// Get the Distributed Map from Cluster.
IMap<std::string, std::string> map = hz.getMap<std::string, std::string>("my-distributed-map");
//Standard Put and Get.
map.put("key", "value");
map.get("key");
//Concurrent Map methods, optimistic updating
map.putIfAbsent("somekey", "somevalue");
map.replace("key", "value", "newvalue");
// Shutdown this Hazelcast Client
hz.shutdown();
return 0;
}
#include <hazelcast/client/HazelcastAll.h>
using namespace hazelcast::client;
int main() {
// Start the Hazelcast Client and connect to an already running Hazelcast Cluster on 127.0.0.1
ClientConfig clientConfig;
HazelcastClient hz(clientConfig);
// Get the Distributed MultiMap from Cluster.
MultiMap<std::string, std::string> multiMap = hz.getMultiMap<std::string, std::string>("my-distributed-multimap");
// Put values in the map against the same key
multiMap.put("my-key", "value1");
multiMap.put("my-key", "value2");
multiMap.put("my-key", "value3");
// Print out all the values for associated with key called "my-key"
std::vector<std::string> values = multiMap.get("my-key");
for (std::vector<std::string>::const_iterator it = values.begin();it != values.end(); ++it) {
std::cout << *it << std::endl;
}
// remove specific key/value pair
multiMap.remove("my-key", "value2"); // Shutdown this Hazelcast Client
hz.shutdown();
return 0;
}
#include <hazelcast/client/HazelcastAll.h>
#include <hazelcast/client/serialization/PortableReader.h>
#include <hazelcast/client/serialization/PortableWriter.h>
using namespace hazelcast::client;
class PortableSerializableSample : public serialization::Portable {
public:
static const int CLASS_ID = 1;
virtual int getFactoryId() const {
return 1;
}
virtual int getClassId() const {
return CLASS_ID;
}
virtual void writePortable(serialization::PortableWriter &writer) const {
writer.writeInt("id", id);
writer.writeUTF("name", &name);
writer.writeLong("lastOrder", lastOrder);
}
virtual void readPortable(serialization::PortableReader &reader) {
id = reader.readInt("id");
name = *reader.readUTF("name");
lastOrder = reader.readLong("lastOrder");
}
private:
std::string name;
int32_t id;
int64_t lastOrder;
};
class SamplePortableFactory : public serialization::PortableFactory {
public:
static const int FACTORY_ID = 1;
virtual std::auto_ptr<serialization::Portable> create(int32_t classId) const {
switch (classId) {
case 1:
return std::auto_ptr<serialization::Portable>(new PortableSerializableSample());
default:
return std::auto_ptr<serialization::Portable>();
}
}
};
int main() {
ClientConfig clientConfig;
clientConfig.getSerializationConfig().addPortableFactory(SamplePortableFactory::FACTORY_ID,
boost::shared_ptr<serialization::PortableFactory>(
new SamplePortableFactory()));
HazelcastClient hz(clientConfig);
//Customer can be used here
hz.shutdown();
return 0;
}
#include <hazelcast/client/HazelcastAll.h>
#include <hazelcast/client/query/SqlPredicate.h>
#include <hazelcast/client/query/AndPredicate.h>
#include <hazelcast/client/query/EqualPredicate.h>
#include <hazelcast/client/query/BetweenPredicate.h>
#include <hazelcast/client/serialization/PortableWriter.h>
#include <hazelcast/client/serialization/PortableReader.h>
#include <ostream>
using namespace hazelcast::client;
class User : public serialization::Portable {
public:
static const int CLASS_ID = 1;
User(const std::string &username, int age, bool active) : username(username), age(age), active(active) {
}
User() : age(0), active(false) {
}
virtual int getFactoryId() const {
return 1;
}
virtual int getClassId() const {
return CLASS_ID;
}
virtual void writePortable(serialization::PortableWriter &writer) const {
writer.writeUTF("username", &username);
writer.writeInt("age", age);
writer.writeBoolean("active", active);
}
virtual void readPortable(serialization::PortableReader &reader) {
username = *reader.readUTF("username");
age = reader.readInt("age");
active = reader.readBoolean("active");
}
friend std::ostream &operator<<(std::ostream &os, const User &user) {
os << "User{" << " username: " << user.username << " age: " << user.age << " active: " << user.active << '}';
return os;
}
private:
std::string username;
int age;
bool active;
};
class ThePortableFactory : public serialization::PortableFactory {
public:
static const int FACTORY_ID = 1;
virtual std::auto_ptr<serialization::Portable> create(int32_t classId) const {
if (classId == User::CLASS_ID) {
return std::auto_ptr<serialization::Portable>(new User());
}
return std::auto_ptr<serialization::Portable>();
}
};
void generateUsers(IMap<std::string, User> &users) {
users.put("Rod", User("Rod", 19, true));
users.put("Jane", User("Jane", 20, true));
users.put("Freddy", User("Freddy", 23, true));
}
int main() {
ClientConfig clientConfig;
clientConfig.getSerializationConfig().addPortableFactory(ThePortableFactory::FACTORY_ID,
boost::shared_ptr<serialization::PortableFactory>(
new ThePortableFactory()));
HazelcastClient hz(clientConfig);
// Get a Distributed Map called "users"
IMap<std::string, User> users = hz.getMap<std::string, User>("users");
// Add some users to the Distributed Map
generateUsers(users);
// Create a Predicate from a String (a SQL like Where clause)
query::SqlPredicate sqlQuery = query::SqlPredicate("active AND age BETWEEN 18 AND 21)");
// Creating the same Predicate as above but with AndPredicate builder
query::AndPredicate criteriaQuery;
criteriaQuery.add(std::auto_ptr<query::Predicate>(new query::EqualPredicate<bool>("active", true)))
.add(std::auto_ptr<query::Predicate>(new query::BetweenPredicate<int>("age", 18, 21)));
// Get result collections using the two different Predicates
std::vector<User> result1 = users.values(sqlQuery);
std::vector<User> result2 = users.values(criteriaQuery);
// Print out the results
std::cout << "Result 1:" << std::endl;
for (std::vector<User>::const_iterator it = result1.begin(); it != result1.end(); ++it) {
std::cout << (*it) << std::endl;
}
std::cout << "Result 2:" << std::endl;
for (std::vector<User>::const_iterator it = result2.begin(); it != result2.end(); ++it) {
std::cout << (*it) << std::endl;
}
hz.shutdown();
return 0;
}
#include <hazelcast/client/HazelcastAll.h>
using namespace hazelcast::client;
int main() {
// Start the Hazelcast Client and connect to an already running Hazelcast Cluster on 127.0.0.1
ClientConfig clientConfig;
HazelcastClient hz(clientConfig);
// Get a Blocking Queue called "my-distributed-queue"
IQueue<std::string> queue = hz.getQueue<std::string>("my-distributed-queue");
// Offer a String into the Distributed Queue
queue.offer("item");
// Poll the Distributed Queue and return the String
boost::shared_ptr<std::string> item = queue.poll();
//Timed blocking Operations
queue.offer("anotheritem", 500);
boost::shared_ptr<std::string> anotherItem = queue.poll(5 * 1000);
//Indefinitely blocking Operations
queue.put("yetanotheritem");
std::cout << *queue.take() << std::endl;
// Shutdown this Hazelcast Client
hz.shutdown();
return 0;
}
#include <hazelcast/client/HazelcastAll.h>
using namespace hazelcast::client;
int main() {
// Start the Hazelcast Client and connect to an already running Hazelcast Cluster on 127.0.0.1
ClientConfig config;
HazelcastClient hz(config);
boost::shared_ptr<Ringbuffer<long> > rb = hz.getRingbuffer<long>("rb");
// add two items into ring buffer
rb->add(100);
rb->add(200);
// we start from the oldest item.
// if you want to start from the next item, call rb.tailSequence()+1
int64_t sequence = rb->headSequence();
std::cout << *rb->readOne(sequence) << std::endl;
sequence++;
std::cout << *rb->readOne(sequence) << std::endl;
// Shutdown this Hazelcast Client
hz.shutdown();
}
#include <hazelcast/client/HazelcastAll.h>
using namespace hazelcast::client;
int main() {
// Start the Hazelcast Client and connect to an already running Hazelcast Cluster on 127.0.0.1
ClientConfig clientConfig;
HazelcastClient hz(clientConfig);
// Get the Distributed Set from Cluster.
ISet<std::string> set = hz.getSet<std::string>("my-distributed-set");
// Add items to the set with duplicates
set.add("item1");
set.add("item1");
set.add("item2");
set.add("item2");
set.add("item2");
set.add("item3");
// Get the items. Note that there are no duplicates.
std::vector<std::string> values = set.toArray();
for (std::vector<std::string>::const_iterator it=values.begin();it != values.end();++it) {
std::cout << (*it) << std::endl;
}
// Shutdown this Hazelcast Client
hz.shutdown();
return 0;
}
#include <hazelcast/client/HazelcastAll.h>
using namespace hazelcast::client;
class TopicSample : public topic::MessageListener<std::string> {
public:
virtual void onMessage(std::auto_ptr<topic::Message<std::string> > message) {
std::cout << "Got message " << message->getMessageObject() << std::endl;
}
};
int main() {
// Start the Hazelcast Client and connect to an already running Hazelcast Cluster on 127.0.0.1
ClientConfig clientConfig;
HazelcastClient hz(clientConfig);
// Get a Topic called "my-distributed-topic"
ITopic<std::string> topic = hz.getTopic<std::string>("my-distributed-topic");
// Add a Listener to the Topic
TopicSample listener;
topic.addMessageListener<TopicSample>(listener);
// Publish a message to the Topic
topic.publish("Hello to distributed world");
// Shutdown this Hazelcast Client
hz.shutdown();
return 0;
}
package orgwebsite
import (
"reflect"
"github.com/hazelcast/hazelcast-go-client"
"github.com/hazelcast/hazelcast-go-client/serialization"
)
type CustomSerializable struct {
value string
}
type CustomSerializer struct {
}
func (s *CustomSerializer) ID() int32 {
return 10
}
func (s *CustomSerializer) Read(input serialization.DataInput) (obj interface{}, err error) {
array, err := input.ReadByteArray()
return &CustomSerializable{string(array)}, err
}
func (s *CustomSerializer) Write(output serialization.DataOutput, obj interface{}) (err error) {
array := []byte(obj.(CustomSerializable).value)
output.WriteByteArray(array)
return
}
func customSerializerSampleRun() {
clientConfig := hazelcast.NewConfig()
clientConfig.SerializationConfig().AddCustomSerializer(reflect.TypeOf((*CustomSerializable)(nil)), &CustomSerializer{})
// Start the Hazelcast Client and connect to an already running Hazelcast Cluster on 127.0.0.1
hz, _ := hazelcast.NewClientWithConfig(clientConfig)
// CustomSerializer will serialize/deserialize CustomSerializable objects
// Shutdown this hazelcast client
hz.Shutdown()
}
package orgwebsite
import (
"fmt"
"github.com/hazelcast/hazelcast-go-client"
"github.com/hazelcast/hazelcast-go-client/serialization"
)
const (
incEntryProcessorClassID = 1
incEntryProcessorFactoryID = 66
)
type IncEntryProcessor struct {
}
func (p *IncEntryProcessor) ReadData(input *serialization.DataInput) error {
return nil
}
func (p *IncEntryProcessor) WriteData(output *serialization.DataOutput) error {
return nil
}
func (p *IncEntryProcessor) FactoryID() int32 {
return incEntryProcessorFactoryID
}
func (p *IncEntryProcessor) ClassID() int32 {
return incEntryProcessorClassID
}
func entryProcessorSampleRun() {
// Start the Hazelcast Client and connect to an already running Hazelcast Cluster on 127.0.0.1
clientConfig := hazelcast.NewConfig()
entryProcessor := &IncEntryProcessor{}
hz, _ := hazelcast.NewClientWithConfig(clientConfig)
// Get the Distributed Map from Cluster.
mp, _ := hz.GetMap("my-distributed-map")
// Put the integer value of 0 into the Distributed Map
mp.Put("key", 0)
// Run the IncEntryProcessor class on the Hazelcast Cluster Member holding the key called "key"
mp.ExecuteOnKey("key", entryProcessor)
// Show that the IncEntryProcessor updated the value.
newValue, _ := mp.Get("key")
fmt.Println("new value:", newValue)
// Shutdown this hazelcast client
hz.Shutdown()
}
package orgwebsite
import (
"github.com/hazelcast/hazelcast-go-client"
"github.com/hazelcast/hazelcast-go-client/config"
"github.com/hazelcast/hazelcast-go-client/serialization"
)
type GlobalSerializer struct {
}
func (*GlobalSerializer) ID() int32 {
return 20
}
func (*GlobalSerializer) Read(input serialization.DataInput) (obj interface{}, err error) {
// return MyFavoriteSerializer.deserialize(input)
return
}
func (*GlobalSerializer) Write(output serialization.DataOutput, object interface{}) (err error) {
// output.write(MyFavoriteSerializer.serialize(object))
return
}
func globalSerializerSampleRun() {
clientConfig := config.New()
clientConfig.SerializationConfig().SetGlobalSerializer(&GlobalSerializer{})
// Start the Hazelcast Client and connect to an already running Hazelcast Cluster on 127.0.0.1
hz, _ := hazelcast.NewClientWithConfig(clientConfig)
//GlobalSerializer will serialize/deserialize all non-builtin types
// Shutdown this hazelcast client
hz.Shutdown()
}
package orgwebsite
import (
"github.com/hazelcast/hazelcast-go-client"
"github.com/hazelcast/hazelcast-go-client/config"
"github.com/hazelcast/hazelcast-go-client/serialization"
)
const (
employeeClassID = 100
sampleDataSerializableFactoryID = 1000
)
type Employee struct {
id int32
name string
}
func (e *Employee) ClassID() int32 {
return employeeClassID
}
func (e *Employee) FactoryID() int32 {
return sampleDataSerializableFactoryID
}
func (e *Employee) ReadData(input serialization.DataInput) (err error) {
e.id, err = input.ReadInt32()
if err != nil {
return
}
e.name, err = input.ReadUTF()
return
}
func (e *Employee) WriteData(output serialization.DataOutput) (err error) {
output.WriteInt32(e.id)
output.WriteUTF(e.name)
return
}
type SampleDataSerializableFactory struct {
}
func (*SampleDataSerializableFactory) Create(classID int32) serialization.IdentifiedDataSerializable {
if classID == classID {
return &Employee{}
}
return nil
}
func identifiedDataSerializableSampleRun() {
clientConfig := config.New()
clientConfig.SerializationConfig().AddDataSerializableFactory(sampleDataSerializableFactoryID, &SampleDataSerializableFactory{})
// Start the Hazelcast Client and connect to an already running Hazelcast Cluster on 127.0.0.1
hz, _ := hazelcast.NewClientWithConfig(clientConfig)
// Employee can be used here
// Shutdown this hazelcast client
hz.Shutdown()
}
package orgwebsite
import (
"fmt"
"github.com/hazelcast/hazelcast-go-client"
)
func listSampleRun() {
// Start the Hazelcast Client and connect to an already running Hazelcast Cluster on 127.0.0.1
hz, _ := hazelcast.NewClient()
// Get the distributed list from cluster
list, _ := hz.GetList("my-distributed-list")
// Add elements to the list
list.Add("item1")
list.Add("item2")
// Remove the first element
removed, _ := list.RemoveAt(0)
fmt.Println("removed: ", removed)
// There is only one element left
size, _ := list.Size()
fmt.Println("current size is: ", size)
// Clear the list
list.Clear()
// Shutdown this hazelcast client
hz.Shutdown()
}
package orgwebsite
import "github.com/hazelcast/hazelcast-go-client"
func mapSampleRun() {
// Start the Hazelcast Client and connect to an already running Hazelcast Cluster on 127.0.0.1
hz, _ := hazelcast.NewClient()
// Get the Distributed Map from Cluster.
mp, _ := hz.GetMap("myDistributedMap")
//Standard Put and Get.
mp.Put("key", "value")
mp.Get("key")
//Concurrent Map methods, optimistic updating
mp.PutIfAbsent("somekey", "somevalue")
mp.ReplaceIfSame("key", "value", "newvalue")
// Shutdown this hazelcast client
hz.Shutdown()
}
package orgwebsite
import (
"fmt"
"github.com/hazelcast/hazelcast-go-client"
)
func multimapSampleRun() {
// Start the Hazelcast Client and connect to an already running Hazelcast Cluster on 127.0.0.1
hz, _ := hazelcast.NewClient()
// Get the Distributed MultiMap from Cluster.
multiMap, _ := hz.GetMultiMap("myDistributedMultimap")
// Put values in the map against the same key
multiMap.Put("my-key", "value1")
multiMap.Put("my-key", "value2")
multiMap.Put("my-key", "value3")
// Print out all the values for associated with key called "my-key"
values, _ := multiMap.Get("my-key")
fmt.Println(values)
// remove specific key/value pair
multiMap.Remove("my-key", "value2")
// Shutdown this hazelcast client
hz.Shutdown()
}
package orgwebsite
import (
"time"
"github.com/hazelcast/hazelcast-go-client"
"github.com/hazelcast/hazelcast-go-client/config"
"github.com/hazelcast/hazelcast-go-client/serialization"
)
const (
customerClassID = 1
samplePortableFactoryID = 1
)
type Customer struct {
name string
id int32
lastOrder time.Time
}
func (c *Customer) FactoryID() int32 {
return samplePortableFactoryID
}
func (c *Customer) ClassID() int32 {
return customerClassID
}
func (c *Customer) WritePortable(writer serialization.PortableWriter) (err error) {
writer.WriteInt32("id", c.id)
writer.WriteUTF("name", c.name)
writer.WriteInt64("lastOrder", c.lastOrder.UnixNano()/int64(time.Millisecond))
return
}
func (c *Customer) ReadPortable(reader serialization.PortableReader) (err error) {
c.id, err = reader.ReadInt32("id")
if err != nil {
return
}
c.name, err = reader.ReadUTF("name")
if err != nil {
return
}
t, err := reader.ReadInt64("lastOrder")
if err != nil {
return
}
c.lastOrder = time.Unix(0, t*int64(time.Millisecond))
return
}
type SamplePortableFactory struct {
}
func (pf *SamplePortableFactory) Create(classID int32) serialization.Portable {
if classID == samplePortableFactoryID {
return &Customer{}
}
return nil
}
func portableSerializableSampleRun() {
clientConfig := config.New()
clientConfig.SerializationConfig().AddPortableFactory(samplePortableFactoryID, &SamplePortableFactory{})
// Start the Hazelcast Client and connect to an already running Hazelcast Cluster on 127.0.0.1
hz, _ := hazelcast.NewClientWithConfig(clientConfig)
// Customer can be used here
// Shutdown this hazelcast client
hz.Shutdown()
}
package orgwebsite
import (
"fmt"
"github.com/hazelcast/hazelcast-go-client"
"github.com/hazelcast/hazelcast-go-client/core"
"github.com/hazelcast/hazelcast-go-client/core/predicate"
"github.com/hazelcast/hazelcast-go-client/serialization"
)
const (
userClassID = 1
userFactoryID = 1
)
type User struct {
username string
age int32
active bool
}
func newUser(username string, age int32, active bool) *User {
return &User{
username: username,
age: age,
active: active,
}
}
func (u *User) FactoryID() int32 {
return userFactoryID
}
func (u *User) ClassID() int32 {
return userClassID
}
func (u *User) WritePortable(writer serialization.PortableWriter) error {
writer.WriteUTF("username", u.username)
writer.WriteInt32("age", u.age)
writer.WriteBool("active", u.active)
return nil
}
func (u *User) ReadPortable(reader serialization.PortableReader) error {
var err error
u.username, err = reader.ReadUTF("username")
if err != nil {
return err
}
u.age, err = reader.ReadInt32("age")
if err != nil {
return err
}
u.active, err = reader.ReadBool("active")
return err
}
type ThePortableFactory struct {
}
func (pf *ThePortableFactory) Create(classID int32) serialization.Portable {
if classID == userClassID {
return &User{}
}
return nil
}
func generateUsers(users core.Map) {
users.Put("Rod", newUser("Rod", 19, true))
users.Put("Jane", newUser("Jane", 20, true))
users.Put("Freddy", newUser("Freddy", 23, true))
}
func querySampleRun() {
clientConfig := hazelcast.NewConfig()
clientConfig.SerializationConfig().
AddPortableFactory(userFactoryID, &ThePortableFactory{})
// Start the Hazelcast Client and connect to an already running Hazelcast Cluster on 127.0.0.1
hz, _ := hazelcast.NewClientWithConfig(clientConfig)
// Get a Distributed Map called "users"
users, _ := hz.GetMap("users")
// Add some users to the Distributed Map
generateUsers(users)
// Create a Predicate from a String (a SQL like Where clause)
var sqlQuery = predicate.SQL("active AND age BETWEEN 18 AND 21)")
// Creating the same Predicate as above but with a builder
var criteriaQuery = predicate.And(
predicate.Equal("active", true),
predicate.Between("age", 18, 21))
// Get result collections using the two different Predicates
result1, _ := users.ValuesWithPredicate(sqlQuery)
result2, _ := users.ValuesWithPredicate(criteriaQuery)
// Print out the results
fmt.Println(result1)
fmt.Println(result2)
// Shutdown this hazelcast client
hz.Shutdown()
}
package orgwebsite
import (
"fmt"
"time"
"github.com/hazelcast/hazelcast-go-client"
)
func queueSampleRun() {
// Start the Hazelcast Client and connect to an already running Hazelcast Cluster on 127.0.0.1
hz, _ := hazelcast.NewClient()
// Get a Blocking Queue called "my-distributed-queue"
queue, _ := hz.GetQueue("my-distributed-queue")
// Offer a String into the Distributed Queue
queue.Offer("item")
// Poll the Distributed Queue and return the String
queue.Poll()
//Timed blocking Operations
queue.OfferWithTimeout("anotheritem", 500*time.Millisecond)
queue.PollWithTimeout(5 * time.Second)
//Indefinitely blocking Operations
queue.Put("yetanotheritem")
fmt.Println(queue.Take())
// Shutdown this hazelcast client
hz.Shutdown()
}
package orgwebsite
import (
"fmt"
"github.com/hazelcast/hazelcast-go-client"
)
func replicatedMapSampleRun() {
// Start the Hazelcast Client and connect to an already running Hazelcast Cluster on 127.0.0.1
hz, _ := hazelcast.NewClient()
// Get a Replicated Map called "my-replicated-map"
mp, _ := hz.GetReplicatedMap("my-replicated-map")
// Put and Get a value from the Replicated Map
replacedValue, _ := mp.Put("key", "value") // key/value replicated to all members
fmt.Println("replacedValue = ", replacedValue) // Will be null as its first update
value, _ := mp.Get("key") // the value is retrieved from a random member in the cluster
fmt.Println("value for key = ", value)
// Shutdown this hazelcast client
hz.Shutdown()
}
package orgwebsite
import (
"fmt"
"github.com/hazelcast/hazelcast-go-client"
"github.com/hazelcast/hazelcast-go-client/core"
)
func ringBufferSampleRun() {
// Start the Hazelcast Client and connect to an already running Hazelcast Cluster on 127.0.0.1
hz, _ := hazelcast.NewClient()
rb, _ := hz.GetRingbuffer("rb")
// we start from the oldest item.
// if you want to start from the next item, call rb.tailSequence()+1
// add two items into ring buffer
rb.Add(100, core.OverflowPolicyOverwrite)
rb.Add(200, core.OverflowPolicyOverwrite)
// we start from the oldest item.
// if you want to start from the next item, call rb.tailSequence()+1
sequence, _ := rb.HeadSequence()
fmt.Println(rb.ReadOne(sequence))
sequence++
fmt.Println(rb.ReadOne(sequence))
// Shutdown this hazelcast client
hz.Shutdown()
}
package orgwebsite
import (
"fmt"
"github.com/hazelcast/hazelcast-go-client"
)
func setSampleRun() {
// Start the Hazelcast Client and connect to an already running Hazelcast Cluster on 127.0.0.1
hz, _ := hazelcast.NewClient()
// Get the distributed set from cluster
set, _ := hz.GetSet("my-distributed-set")
// Add items to the set with duplicates
set.Add("item1")
set.Add("item1")
set.Add("item2")
set.Add("item2")
set.Add("item3")
set.Add("item3")
// Get the items. Note that no duplicates
items, _ := set.ToSlice()
fmt.Println(items)
// Shutdown this hazelcast client
hz.Shutdown()
}
package orgwebsite
import (
"fmt"
"github.com/hazelcast/hazelcast-go-client"
"github.com/hazelcast/hazelcast-go-client/core"
)
type topicMessageListener struct {
}
func (*topicMessageListener) OnMessage(message core.Message) {
fmt.Println("Got message: ", message.MessageObject())
}
func topicSampleRun() {
// Start the Hazelcast Client and connect to an already running Hazelcast Cluster on 127.0.0.1
hz, _ := hazelcast.NewClient()
// Get a Topic called "my-distributed-topic"
topic, _ := hz.GetTopic("my-distributed-topic")
// Add a Listener to the Topic
topic.AddMessageListener(&topicMessageListener{})
// Publish a message to the Topic
topic.Publish("Hello to distributed world")
// Shutdown this hazelcast client
hz.Shutdown()
}
Hazelcast IMDG is often used as an operation memory layer for databases in order to improve performance of applications, to distribute data across servers, clusters and geographies, to ingest data at very high rates, and to manage large data sets.
Hazelcast IMDG is one of the most popular open source caching solutions ensuring that data is in the right place when it’s needed for optimal performance.
Hazelcast IMDG can be used as the operational memory of a Microservices architecture.
Hazelcast IMDG provides web session clustering which maintains user sessions in-memory for redundancy and seamless backup.
Hazelcast IMDG has a broadcast messaging system offering a comparable set of features to JMS topics.
Hazelcast IMDG is a in-memory NoSQL Key Value store. Increasingly, more and more deployments are seeing the advantages of ever-expanding RAM sizes at lower costs.
Hazelcast IMDG combines distributed data structures, distributed caching capabilities, elasticity, memcached support, and integration with Spring and Hibernate. These capabilities bring several benefits to enterprise deployments, including the ability to handle thousands of operations per second, prevent the loss of data after crashes, and dynamically scale as new servers are added.
Get professional support from the same people who built the software.
To learn more, visit us at Hazelcast.com/support