Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion .github/workflows/ci.yml
Original file line number Diff line number Diff line change
Expand Up @@ -86,7 +86,7 @@ jobs:
restore-keys: |
${{ runner.os }}-m2
- name: Test with Maven
run: ./mvnw clean package -B -Dmaven.test.skip=false -pl fesod-common,fesod-shaded,fesod-sheet
run: ./mvnw clean package -B -Dmaven.test.skip=false -pl fesod-common,fesod-shaded,fesod-sheet,fesod-examples/fesod-sheet-examples
- name: Publish Unit Test Results
uses: EnricoMi/publish-unit-test-result-action@v2
if: (!cancelled())
Expand Down
57 changes: 57 additions & 0 deletions fesod-examples/fesod-sheet-examples/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -33,15 +33,72 @@
<packaging>jar</packaging>
<name>Fesod Sheet Examples</name>

<properties>
<maven.test.skip>false</maven.test.skip>
</properties>

<build>
<plugins>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-surefire-plugin</artifactId>
<configuration>
<includes>
<include>**/*Test.java</include>
<include>**/*ITCase.java</include>
</includes>
<!--
Tests that register global converters (e.g. CustomConverterExampleITCase)
mutate shared static state. Run them in isolated JVM forks to prevent
cross-test contamination. This follows the same pattern used by
Apache Kafka and Apache Flink for tests with static side-effects.
See: https://maven.apache.org/surefire/maven-surefire-plugin/examples/fork-options-and-parallel-execution.html
-->
<forkCount>1</forkCount>
<reuseForks>false</reuseForks>
</configuration>
</plugin>
</plugins>
</build>

<dependencies>

<dependency>
<groupId>org.apache.fesod</groupId>
<artifactId>fesod-sheet</artifactId>
<version>${project.version}</version>
</dependency>

<dependency>
<groupId>ch.qos.logback</groupId>
<artifactId>logback-classic</artifactId>
<version>${logback-classic.version}</version>
</dependency>

<dependency>
<groupId>org.apache.logging.log4j</groupId>
<artifactId>log4j-to-slf4j</artifactId>
</dependency>

<dependency>
<groupId>com.alibaba.fastjson2</groupId>
<artifactId>fastjson2</artifactId>
</dependency>

<dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-web</artifactId>
</dependency>

<dependency>
<groupId>org.junit.jupiter</groupId>
<artifactId>junit-jupiter</artifactId>
<scope>test</scope>
</dependency>

<dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-test</artifactId>
<scope>test</scope>
</dependency>
</dependencies>
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,136 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/

package org.apache.fesod.sheet.examples.advanced;

import java.util.ArrayList;
import java.util.Date;
import java.util.List;
import lombok.extern.slf4j.Slf4j;
import org.apache.fesod.sheet.FesodSheet;
import org.apache.fesod.sheet.context.AnalysisContext;
import org.apache.fesod.sheet.examples.advanced.converter.CustomStringStringConverter;
import org.apache.fesod.sheet.examples.advanced.data.CustomConverterData;
import org.apache.fesod.sheet.examples.util.ExampleFileUtil;
import org.apache.fesod.sheet.read.listener.ReadListener;

/**
* Demonstrates registering a custom converter at the builder level for read and write.
*
* <h2>Scenario</h2>
* <p>You need the same data transformation applied to ALL fields of a matching type,
* not just a specific annotated field. For example, adding a "Custom:" prefix to every
* string column, or encrypting/decrypting all string values.</p>
*
* <h2>Key Concepts: Per-Field vs. Global Converter Registration</h2>
* <table>
* <tr><th>Approach</th><th>Scope</th><th>How</th></tr>
* <tr>
* <td>Per-field</td>
* <td>Single field only</td>
* <td>{@code @ExcelProperty(converter = MyConverter.class)}</td>
* </tr>
* <tr>
* <td>Global (this example)</td>
* <td>All fields matching Java type + Excel type</td>
* <td>{@code .registerConverter(new MyConverter())} on the builder</td>
* </tr>
* </table>
*
* <h2>How It Works</h2>
* <ol>
* <li><b>Write:</b> The {@link CustomStringStringConverter} is registered on the write builder.
* During write, every {@code String} field is transformed (prefixed with "Custom:").</li>
* <li><b>Read:</b> The same converter is registered on the read builder.
* During read, every string cell is transformed back through the converter.</li>
* </ol>
*
* <h2>Converter Resolution Priority</h2>
* <pre>
* 1. Field-level converter (@ExcelProperty(converter = ...)) ← highest
* 2. Builder-level converter (.registerConverter(...)) ← this example
* 3. Built-in default converter ← lowest
* </pre>
*
* <h2>Related Examples</h2>
* <ul>
* <li>{@link org.apache.fesod.sheet.examples.read.ConverterReadExample} — Per-field converter via annotation.</li>
* </ul>
*
* @see CustomStringStringConverter
* @see org.apache.fesod.sheet.converters.Converter
*/
@Slf4j
public class CustomConverterExample {

public static void main(String[] args) {
String fileName = ExampleFileUtil.getTempPath("customConverter" + System.currentTimeMillis() + ".xlsx");
customConverterWrite(fileName);
customConverterRead(fileName);
}

/**
* Writes data with a globally registered converter that prefixes all string values.
*
* <p>The {@link CustomStringStringConverter} transforms "String0" → "Custom:String0"
* for every string field in the data model.</p>
*/
public static void customConverterWrite(String fileName) {
FesodSheet.write(fileName, CustomConverterData.class)
.registerConverter(new CustomStringStringConverter())
.sheet("CustomConverter")
.doWrite(data());
log.info("Successfully wrote file with custom converter: {}", fileName);
}

/**
* Reads the previously written file with the same converter registered.
*
* <p>The converter's {@code convertToJavaData()} method is applied during read,
* transforming cell values as they are parsed.</p>
*/
public static void customConverterRead(String fileName) {
FesodSheet.read(fileName, CustomConverterData.class, new ReadListener<CustomConverterData>() {
@Override
public void invoke(CustomConverterData data, AnalysisContext context) {
log.info("Read data with custom converter: {}", data);
}

@Override
public void doAfterAllAnalysed(AnalysisContext context) {
log.info("Custom converter read completed");
}
})
.registerConverter(new CustomStringStringConverter())
.sheet()
.doRead();
}

private static List<CustomConverterData> data() {
List<CustomConverterData> list = new ArrayList<>();
for (int i = 0; i < 10; i++) {
CustomConverterData data = new CustomConverterData();
data.setString("String" + i);
data.setDate(new Date());
data.setDoubleData(0.56);
list.add(data);
}
return list;
}
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,132 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/

package org.apache.fesod.sheet.examples.advanced;

import java.util.ArrayList;
import java.util.Date;
import java.util.List;
import lombok.extern.slf4j.Slf4j;
import org.apache.fesod.sheet.ExcelWriter;
import org.apache.fesod.sheet.FesodSheet;
import org.apache.fesod.sheet.examples.util.ExampleFileUtil;
import org.apache.fesod.sheet.examples.write.data.DemoData;
import org.apache.fesod.sheet.util.FileUtils;
import org.apache.fesod.sheet.write.handler.WorkbookWriteHandler;
import org.apache.fesod.sheet.write.handler.context.WorkbookWriteHandlerContext;
import org.apache.fesod.sheet.write.metadata.WriteSheet;
import org.apache.poi.ss.usermodel.Workbook;
import org.apache.poi.xssf.streaming.SXSSFWorkbook;

/**
* Demonstrates writing very large Excel files (100,000+ rows) with memory optimization.
*
* <h2>Scenario</h2>
* <p>You need to export a large dataset (e.g., database dump, log analysis) that would
* exhaust memory if all rows were held at once. Fesod uses Apache POI's streaming
* API (SXSSF) internally, but temporary XML files can consume significant disk space.</p>
*
* <h2>Key Optimization: Temporary File Compression</h2>
* <p>When POI writes large XLSX files, it creates temporary XML files on disk
* (one per sheet). These can be several times larger than the final file.
* Enabling compression via {@code setCompressTempFiles(true)} significantly reduces
* disk usage at the cost of slightly more CPU.</p>
*
* <h2>Architecture</h2>
* <pre>
* Data (in memory, batched) Fesod POI/SXSSF
* │ │ │
* ├─ 100 rows batch ───────────▶ write() ──────▶ temp XML (compressed)
* ├─ 100 rows batch ───────────▶ write() ──────▶ temp XML (append)
* │ ... (1000 batches) │ │
* └─ close() ──────────────────▶ finalize ─────▶ final .xlsx
* </pre>
*
* <h2>Performance Tips</h2>
* <ul>
* <li>Use {@code ExcelWriter} (try-with-resources) for batch writing instead of
* loading all data with {@code doWrite()}.</li>
* <li>Enable temp file compression for disk-constrained environments.</li>
* <li>Tune batch size (100 rows here) based on your row width and available memory.</li>
* <li>Monitor temp directory size: {@code FileUtils.getPoiFilesPath()}.</li>
* </ul>
*
* <h2>Expected Result</h2>
* <p>Writes 100,000 rows (1000 batches x 100 rows) to a single sheet without
* OutOfMemoryError, using compressed temp files on disk.</p>
*
* <h2>Related Examples</h2>
* <ul>
* <li>{@link org.apache.fesod.sheet.examples.write.BasicWriteExample} — Simple small-file write.</li>
* </ul>
*
* @see ExcelWriter
* @see org.apache.poi.xssf.streaming.SXSSFWorkbook#setCompressTempFiles(boolean)
*/
@Slf4j
public class LargeFileWriteExample {

public static void main(String[] args) {
compressedTemporaryFile();
}

/**
* Writes 100,000 rows in batches with compressed temporary files.
*
* <p>Uses a {@link WorkbookWriteHandler} to access the underlying POI
* {@link SXSSFWorkbook} and enable temp file compression. Writing is done
* in 1,000 batches of 100 rows each via the {@link ExcelWriter} API.</p>
*/
public static void compressedTemporaryFile() {
log.info("Temporary XML files are stored at: {}", FileUtils.getPoiFilesPath());
String fileName = ExampleFileUtil.getTempPath("largeFile" + System.currentTimeMillis() + ".xlsx");

try (ExcelWriter excelWriter = FesodSheet.write(fileName, DemoData.class)
.registerWriteHandler(new WorkbookWriteHandler() {
@Override
public void afterWorkbookCreate(WorkbookWriteHandlerContext context) {
Workbook workbook = context.getWriteWorkbookHolder().getWorkbook();
if (workbook instanceof SXSSFWorkbook) {
// Enable temporary file compression.
((SXSSFWorkbook) workbook).setCompressTempFiles(true);
}
}
})
.build()) {
WriteSheet writeSheet = FesodSheet.writerSheet("Template").build();
// Write 100,000 rows in batches.
for (int i = 0; i < 1000; i++) {
excelWriter.write(data(), writeSheet);
}
}
log.info("Successfully wrote large file: {}", fileName);
}

private static List<DemoData> data() {
List<DemoData> list = new ArrayList<>();
for (int i = 0; i < 100; i++) {
DemoData data = new DemoData();
data.setString("String" + i);
data.setDate(new Date());
data.setDoubleData(0.56);
list.add(data);
}
return list;
}
}
Loading
Loading