Aller au contenu

ItemWriter API

Ce contenu n’est pas encore disponible dans votre langue.

The ItemWriter<O> trait defines how to write batches of items to any destination. Writers receive chunks of items for efficient batch operations.

pub trait ItemWriter<O> {
/// Writes a batch of items
fn write(&self, items: &[O]) -> Result<(), BatchError>;
/// Opens the writer (called once at start)
fn open(&self) -> Result<(), BatchError> { Ok(()) }
/// Closes the writer (called once at end)
fn close(&self) -> Result<(), BatchError> { Ok(()) }
/// Flushes any buffered data
fn flush(&self) -> Result<(), BatchError> { Ok(()) }
}
pub type ItemWriterResult = Result<(), BatchError>;
MethodCalled WhenPurpose
open()Before first writeInitialize resources (files, connections)
write()For each chunkWrite batch of items
flush()After each writeFlush buffers to ensure data persistence
close()After all writesRelease resources and finalize output

Spring Batch RS provides 9 built-in writer implementations:

WriterFeature FlagDestinationDescription
CsvItemWriter<O, W>csvCSV filesWrites CSV records with headers
JsonItemWriter<O, W>jsonJSON filesWrites JSON arrays
XmlItemWriter<O, W>xmlXML filesWrites XML documents
PostgresItemWriter<O>rdbc-postgresPostgreSQLBulk inserts to PostgreSQL
MysqlItemWriter<O>rdbc-mysqlMySQL/MariaDBBulk inserts to MySQL
SqliteItemWriter<O>rdbc-sqliteSQLiteBulk inserts to SQLite
MongodbItemWriter<O>mongodbMongoDBBulk inserts to MongoDB
OrmItemWriter<O>ormSeaORMORM-based database writing
LoggerWriterbuilt-inLogsDebug output via logging

pub struct CsvItemWriterBuilder<O: Serialize, W: Write> { /* ... */ }
MethodTypeDefaultDescription
has_headers(bool)booltrueWrite header row
delimiter(u8)u8b','Field delimiter character
from_path(&str)--Write to file path
from_writer(W)--Write to any Write destination
use spring_batch_rs::item::csv::CsvItemWriterBuilder;
use serde::Serialize;
#[derive(Serialize)]
struct Product {
id: u32,
name: String,
price: f64,
}
let writer = CsvItemWriterBuilder::new()
.has_headers(true)
.delimiter(b',')
.from_path("products.csv")?;

pub struct JsonItemWriterBuilder<O: Serialize, W: Write> { /* ... */ }
MethodTypeDefaultDescription
pretty_formatter(bool)boolfalseEnable pretty-printing
from_path(&str)--Write to file path
from_writer(W)--Write to any Write destination
use spring_batch_rs::item::json::JsonItemWriterBuilder;
use serde::Serialize;
#[derive(Serialize)]
struct User {
id: u32,
name: String,
email: String,
}
let writer = JsonItemWriterBuilder::<User>::new()
.pretty_formatter(true) // Indented, readable output
.from_path("users.json")?;

pub struct XmlItemWriterBuilder<O: Serialize, W: Write> { /* ... */ }
MethodTypeDefaultDescription
root_tag(&str)&str"root"Root element name
item_tag(&str)&strrequiredItem element name
from_path(&str)--Write to file path
from_writer(W)--Write to any Write destination
use spring_batch_rs::item::xml::XmlItemWriterBuilder;
use serde::Serialize;
#[derive(Serialize)]
#[serde(rename = "vehicle")]
struct Vehicle {
#[serde(rename = "@type")]
vehicle_type: String,
make: String,
model: String,
year: i32,
}
let writer = XmlItemWriterBuilder::new()
.root_tag("vehicles")
.item_tag("vehicle")
.from_path("output.xml")?;

Output:

<?xml version="1.0" encoding="UTF-8"?>
<vehicles>
<vehicle type="car">
<make>Toyota</make>
<model>Camry</model>
<year>2023</year>
</vehicle>
</vehicles>

pub struct PostgresItemWriterBuilder<O> { /* ... */ }
MethodTypeDescription
pool(PgPool)PgPoolPostgreSQL connection pool
table(&str)&strTarget table name
binder(Fn)Fn(&mut QueryBuilder, &O)Function to bind item fields
use spring_batch_rs::item::rdbc::postgres::PostgresItemWriterBuilder;
use sqlx::PgPool;
use serde::{Deserialize, Serialize};
#[derive(Debug, Clone, Deserialize, Serialize)]
struct Person {
first_name: String,
last_name: String,
email: String,
}
#[tokio::main]
async fn main() -> Result<(), Box<dyn std::error::Error>> {
let pool = PgPool::connect("postgres://user:pass@localhost/db").await?;
let writer = PostgresItemWriterBuilder::new()
.pool(pool)
.table("persons")
.binder(|query, person: &Person| {
query
.push_values([person], |mut b, p| {
b.push_bind(&p.first_name)
.push_bind(&p.last_name)
.push_bind(&p.email);
});
})
.build();
Ok(())
}

Same as PostgreSQL writer, but use MySqlPool:

use spring_batch_rs::item::rdbc::mysql::MysqlItemWriterBuilder;
use sqlx::MySqlPool;
let pool = MySqlPool::connect("mysql://user:pass@localhost/db").await?;
let writer = MysqlItemWriterBuilder::new()
.pool(pool)
.table("persons")
.binder(|query, person: &Person| {
query.push_values([person], |mut b, p| {
b.push_bind(&p.first_name)
.push_bind(&p.last_name)
.push_bind(&p.email);
});
})
.build();

Same as PostgreSQL writer, but use SqlitePool:

use spring_batch_rs::item::rdbc::sqlite::SqliteItemWriterBuilder;
use sqlx::SqlitePool;
let pool = SqlitePool::connect("sqlite::memory:").await?;
let writer = SqliteItemWriterBuilder::new()
.pool(pool)
.table("persons")
.binder(|query, person: &Person| {
query.push_values([person], |mut b, p| {
b.push_bind(&p.first_name)
.push_bind(&p.last_name)
.push_bind(&p.email);
});
})
.build();

pub struct MongodbItemWriterBuilder<O> { /* ... */ }
MethodTypeDescription
collection(&Collection<O>)&Collection<O>MongoDB collection
use spring_batch_rs::item::mongodb::MongodbItemWriterBuilder;
use mongodb::{sync::Client, bson::doc};
use serde::{Deserialize, Serialize};
#[derive(Debug, Deserialize, Serialize, Clone)]
struct Book {
title: String,
author: String,
isbn: String,
}
fn main() -> Result<(), Box<dyn std::error::Error>> {
let client = Client::with_uri_str("mongodb://localhost:27017")?;
let db = client.database("library");
let collection = db.collection::<Book>("books");
let writer = MongodbItemWriterBuilder::new()
.collection(&collection)
.build();
Ok(())
}

Writes entities using SeaORM. Your entity must implement SeaORM’s ActiveModelTrait.

use spring_batch_rs::item::orm::OrmItemWriterBuilder;
use sea_orm::{Database, DatabaseConnection};
// Assuming you have a SeaORM entity
use entity::person::{Entity as PersonEntity, ActiveModel};
#[tokio::main]
async fn main() -> Result<(), Box<dyn std::error::Error>> {
let db: DatabaseConnection = Database::connect("sqlite::memory:").await?;
let writer = OrmItemWriterBuilder::new()
.entity(PersonEntity)
.connection(&db)
.build();
Ok(())
}

Writes items to logs for debugging purposes.

pub struct LoggerItemWriterBuilder { /* ... */ }
MethodTypeDefaultDescription
log_level(Level)log::LevelInfoLogging level
use spring_batch_rs::item::logger::LoggerItemWriterBuilder;
use log::Level;
// Log at INFO level
let writer = LoggerItemWriterBuilder::new()
.log_level(Level::Info)
.build();
// Log at DEBUG level
let writer = LoggerItemWriterBuilder::new()
.log_level(Level::Debug)
.build();

You can implement ItemWriter for any destination:

use spring_batch_rs::core::item::{ItemWriter, ItemWriterResult};
use spring_batch_rs::error::BatchError;
use std::sync::Mutex;
struct MyCustomWriter {
data: Mutex<Vec<String>>,
}
impl ItemWriter<String> for MyCustomWriter {
fn write(&self, items: &[String]) -> ItemWriterResult {
let mut data = self.data.lock().unwrap();
data.extend_from_slice(items);
Ok(())
}
fn open(&self) -> ItemWriterResult {
println!("Opening writer");
Ok(())
}
fn close(&self) -> ItemWriterResult {
println!("Closing writer, wrote {} items", self.data.lock().unwrap().len());
Ok(())
}
fn flush(&self) -> ItemWriterResult {
println!("Flushing writer");
Ok(())
}
}
impl MyCustomWriter {
fn new() -> Self {
Self {
data: Mutex::new(Vec::new()),
}
}
}

Writers can implement transactional behavior:

use std::sync::Mutex;
struct TransactionalWriter {
buffer: Mutex<Vec<String>>,
committed: Mutex<Vec<String>>,
}
impl ItemWriter<String> for TransactionalWriter {
fn write(&self, items: &[String]) -> ItemWriterResult {
// Write to buffer (transaction)
let mut buffer = self.buffer.lock().unwrap();
buffer.extend_from_slice(items);
Ok(())
}
fn flush(&self) -> ItemWriterResult {
// Commit transaction
let mut buffer = self.buffer.lock().unwrap();
let mut committed = self.committed.lock().unwrap();
committed.append(&mut buffer);
Ok(())
}
fn close(&self) -> ItemWriterResult {
// Final flush on close
self.flush()
}
}

Batch Operations

Use bulk insert/write APIs when available for better performance

Buffer Management

Implement flush() to ensure data is persisted, especially for buffered writers

Resource Cleanup

Always release resources in close() even if errors occurred

Error Context

Return descriptive BatchError::ItemWriter errors with details about what failed

  • Use buffered I/O (BufWriter) for file-based writers
  • Consider chunk size when writing large files
  • Use appropriate buffer sizes (8KB-64KB typical)