07 august 2025

Kill process in Windows & Linux

Windows:

> netstat -ano | findstr :8080

> taskkill PID <pid> /F


Linux:

> sudo lsof -i 8080

> sudo kill -9 <pid>

02 august 2025

Proto vs Json performance test

 


public class PerformanceTest {
private static final Logger LOGGER = LoggerFactory.getLogger(PerformanceTest.class);
private static final ObjectMapper MAPPER = new ObjectMapper();

public static void main(String[] args) throws Exception {
var protoPerson = Person.newBuilder()
.setLastName(
"T")
.setAge(
32)
.setEmail(
"t@email.com")
.setEmployed(
true)
.setSalary(
123004.5)
.setBankAccountNumber(
38495959093040944L)
.setBalance(-
100)
.build();
LOGGER.info("how many bytes protoPerson has? {}", protoPerson.toByteArray().length); // 41

var jsonPerson = new JsonPerson("T", 32, "t@email.com", true, 123004.5, 38495959093040944L, -100);
var bytes = MAPPER.writeValueAsBytes(jsonPerson);
LOGGER.info("how many bytes jsonPerson has? {}", bytes.length); // 130 = 3x more!

for (int i=0; i<5; i++) { // first run to be ignored (warmup JVM)
runTest("json", () -> json(jsonPerson));
runTest("proto", () -> proto(protoPerson)); // 7x faster!
}
}

private static void runTest(String testName, Runnable runnable) {
var start = System.currentTimeMillis();
for (int i=0; i<5_000_000; i++) {
runnable.run();
}
var end = System.currentTimeMillis();
LOGGER.info("time taken for {} = {} ms", testName, (end - start));
}

private static void proto(Person person) {
try {
var bytes = person.toByteArray();
Person.
parseFrom(bytes);
}
catch (InvalidProtocolBufferException e) {
throw new RuntimeException(e);
}
}

private static void json(JsonPerson person) {
try {
var bytes = MAPPER.writeValueAsBytes(person);
MAPPER.readValue(bytes, JsonPerson.class);
}
catch (IOException e) {
throw new RuntimeException(e);
}
}
}

04 decembrie 2024

Implementarea unui Rate Limiter

public class RateLimiter {
private static final int ONE_SECOND = 1000;
private final Map<String, List<Long>> requests;
private boolean enabled = false;
private int requestsPerSecond;

public RateLimiter(Environment environment) {
requests = new HashMap<>();
if (environment.getProperty("enable.rate.limiter") != null) {
this.enabled = Boolean.parseBoolean(environment.getProperty("enable.rate.limiter"));
}
if (this.enabled) {
String property = environment.getProperty("max.requests.per.user.per.second");
Assert.notNull(property, "Max requests per second not defined!");
this.requestsPerSecond = Integer.parseInt(property);
}
}

public boolean allows(String ipAddress) {
if (!enabled) {
return true;
}

if (!requests.containsKey(ipAddress)) {
requests.put(ipAddress, new ArrayList<>());
}

Long now = System.currentTimeMillis();
cleanup(ipAddress, now);
requests.get(ipAddress).add(now);

return requests.get(ipAddress).size() <= requestsPerSecond;
}

private void cleanup(String ipAddress, Long now) {
List<Long> markedForDeletion = new ArrayList<>();
for (Long timestamp : requests.get(ipAddress)) {
if (now - timestamp > ONE_SECOND) {
markedForDeletion.add(timestamp);
}
}
requests.get(ipAddress).removeAll(markedForDeletion);
}
}

31 octombrie 2024

Test de integrare pt Kafka Consumer

@EmbeddedKafka
@SpringBootTest(properties = "spring.kafka.consumer.bootstrap-servers=${spring.embedded.kafka.brokers}")
public class ProductCreatedEventHandlerTest {
@MockBean
ProcessedEventRepository processedEventRepository;

@Autowired
KafkaTemplate<String, Object> kafkaTemplate;

@SpyBean // true object, its methods can be intercepted
ProductCreatedEventHandler productCreatedEventHandler;

@Test
public void testHandle() throws Exception {
// Arrange
ProductCreatedEvent event = new ProductCreatedEvent(UUID.randomUUID().toString(), "iPhone SE", BigDecimal.valueOf(24.5), 12);
ProducerRecord<String, Object> record = new ProducerRecord<>(PRODUCT_CREATED_EVT_TOPIC, event.getProductId(), event);
String messageId = UUID.randomUUID().toString();
record.headers().add("messageId", messageId.getBytes());
record.headers().add(KafkaHeaders.RECEIVED_KEY, event.getProductId().getBytes());

ProcessedEventEntity processedEventEntity = new ProcessedEventEntity();
when(processedEventRepository.findByMessageId(any())).thenReturn(processedEventEntity);

// Act
kafkaTemplate.send(record).get();

// Assert
ArgumentCaptor<String> messageIdCaptor = ArgumentCaptor.forClass(String.class);
ArgumentCaptor<String> messageKeyCaptor = ArgumentCaptor.forClass(String.class);
ArgumentCaptor<ProductCreatedEvent> eventCaptor = ArgumentCaptor.forClass(ProductCreatedEvent.class);
verify(productCreatedEventHandler, timeout(5000).times(1))
.handle(eventCaptor.capture(), messageIdCaptor.capture(), messageKeyCaptor.capture());
Assertions.assertEquals(messageId, messageIdCaptor.getValue());
Assertions.assertEquals(event.getProductId(), messageKeyCaptor.getValue());
Assertions.assertEquals(event.getProductId(), eventCaptor.getValue().getProductId());
}
}

30 octombrie 2024

Test de integrare pt Kafka Producer

1. Testarea serviciului care trimite mesaje Kafka

package com.hanul.pis.ProductsMicroservice;

import com.hanul.pis.ProductsMicroservice.rest.NewProductDto;
import com.hanul.pis.ProductsMicroservice.service.ProductService;
import com.hanul.pis.core.event.ProductCreatedEvent;
import org.apache.kafka.clients.consumer.ConsumerConfig;
import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.apache.kafka.common.serialization.StringDeserializer;
import org.junit.jupiter.api.*;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.test.context.SpringBootTest;
import org.springframework.core.env.Environment;
import org.springframework.kafka.core.DefaultKafkaConsumerFactory;
import org.springframework.kafka.listener.ContainerProperties;
import org.springframework.kafka.listener.KafkaMessageListenerContainer;
import org.springframework.kafka.listener.MessageListener;
import org.springframework.kafka.support.serializer.ErrorHandlingDeserializer;
import org.springframework.kafka.support.serializer.JsonDeserializer;
import org.springframework.kafka.test.EmbeddedKafkaBroker;
import org.springframework.kafka.test.context.EmbeddedKafka;
import org.springframework.kafka.test.utils.ContainerTestUtils;
import org.springframework.test.annotation.DirtiesContext;
import org.springframework.test.context.ActiveProfiles;

import java.math.BigDecimal;
import java.util.Map;
import java.util.concurrent.BlockingQueue;
import java.util.concurrent.LinkedBlockingQueue;
import java.util.concurrent.TimeUnit;

@DirtiesContext // it will corrupt the application context -> each test will start with a clean state
@TestInstance(TestInstance.Lifecycle.PER_CLASS) // one instance for all test methods, for expensive setup code
@ActiveProfiles("test") // look for application-test.properties
// use embedded Kafka server
@EmbeddedKafka (partitions = 3, count = 3, controlledShutdown = true)
@SpringBootTest(properties = "spring.kafka.producer.bootstrap-servers=${spring.embedded.kafka.brokers}")
public class ProductServiceImplTest {
@Autowired
private ProductService productService;

@Autowired
private EmbeddedKafkaBroker embeddedKafkaBroker;

@Autowired
private Environment environment;

private KafkaMessageListenerContainer<String, ProductCreatedEvent> container;
private BlockingQueue<ConsumerRecord<String, ProductCreatedEvent>> records = new LinkedBlockingQueue<>();

@BeforeAll
void setUp() {
Map<String, Object> consumerProperties = getConsumerProperties();
DefaultKafkaConsumerFactory<String, Object> consumerFactory = new DefaultKafkaConsumerFactory<>(consumerProperties);
ContainerProperties containerProperties = new ContainerProperties(environment.getProperty("product-created-evt-topic-name"));
container = new KafkaMessageListenerContainer<>(consumerFactory, containerProperties);
container.setupMessageListener((MessageListener<String, ProductCreatedEvent>) records::add);
container.start();
ContainerTestUtils.waitForAssignment(container, embeddedKafkaBroker.getPartitionsPerTopic());
}

@AfterAll
void tearDown() {
container.stop();
}

@Test
void testCreateProduct_validProduct_success() throws Exception {
// Arrange
NewProductDto newProductDto = new NewProductDto();
newProductDto.setPrice(new BigDecimal(1200));
newProductDto.setQuantity(10);
newProductDto.setTitle("Philips monitor 40\"");

// Act
String productId = productService.createProduct(newProductDto);

// Assert
Assertions.assertNotNull(productId);
ConsumerRecord<String, ProductCreatedEvent> message = records.poll(3, TimeUnit.SECONDS);
Assertions.assertNotNull(message);
Assertions.assertNotNull(message.key());
ProductCreatedEvent event = message.value();
Assertions.assertNotNull(event);
Assertions.assertEquals(newProductDto.getTitle(), event.getTitle());
Assertions.assertEquals(newProductDto.getPrice(), event.getPrice());
Assertions.assertEquals(newProductDto.getQuantity(), event.getQuantity());
}

private Map<String, Object> getConsumerProperties() {
// Brokers' port numbers will dynamically change during executions of this test
// Therefore, they should be dynamically retrieved in configuration
return Map.of(
ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, embeddedKafkaBroker.getBrokersAsString(),
ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class,
ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, ErrorHandlingDeserializer.class,
ErrorHandlingDeserializer.VALUE_DESERIALIZER_CLASS, JsonDeserializer.class,
ConsumerConfig.GROUP_ID_CONFIG, environment.getProperty("spring.kafka.consumer.group-id"),
JsonDeserializer.TRUSTED_PACKAGES, environment.getProperty("spring.kafka.consumer.properties.spring.json.trusted.packages"),
ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, environment.getProperty("spring.kafka.consumer.auto-offset-reset")
);
}
}

2. Testarea configurarilor - ex. ca producatorul este idempotent

package com.hanul.pis.ProductsMicroservice;

import com.hanul.pis.core.event.ProductCreatedEvent;
import org.apache.kafka.clients.producer.ProducerConfig;
import org.junit.jupiter.api.Assertions;
import org.junit.jupiter.api.Test;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.test.context.SpringBootTest;
import org.springframework.kafka.core.KafkaTemplate;
import org.springframework.kafka.core.ProducerFactory;

import java.util.Map;

@SpringBootTest
public class IdempotentProducerItTest { // it will test based on real configuration
@Autowired
KafkaTemplate<String, ProductCreatedEvent> kafkaTemplate;

@Test
void test_enabledIdempotence() {
// Arrange
ProducerFactory<String, ProductCreatedEvent> producerFactory = kafkaTemplate.getProducerFactory();

// Act
Map<String, Object> config = producerFactory.getConfigurationProperties();

// Assert
Assertions.assertEquals("true", config.get(ProducerConfig.ENABLE_IDEMPOTENCE_CONFIG));
Assertions.assertTrue("all".equalsIgnoreCase((String) config.get(ProducerConfig.ACKS_CONFIG)));
if (config.containsKey(ProducerConfig.RETRIES_CONFIG)) {
Assertions.assertTrue(Integer.parseInt(config.get(ProducerConfig.RETRIES_CONFIG).toString()) > 0);
}
}
}