Batch Operations
Use bulk insert/write APIs when available for better performance
The ItemWriter<O> trait defines how to write batches of items to any destination. Writers receive chunks of items for efficient batch operations.
pub trait ItemWriter<O> { /// Writes a batch of items fn write(&self, items: &[O]) -> Result<(), BatchError>;
/// Opens the writer (called once at start) fn open(&self) -> Result<(), BatchError> { Ok(()) }
/// Closes the writer (called once at end) fn close(&self) -> Result<(), BatchError> { Ok(()) }
/// Flushes any buffered data fn flush(&self) -> Result<(), BatchError> { Ok(()) }}pub type ItemWriterResult = Result<(), BatchError>;| Method | Called When | Purpose |
|---|---|---|
open() | Before first write | Initialize resources (files, connections) |
write() | For each chunk | Write batch of items |
flush() | After each write | Flush buffers to ensure data persistence |
close() | After all writes | Release resources and finalize output |
Spring Batch RS provides 9 built-in writer implementations:
| Writer | Feature Flag | Destination | Description |
|---|---|---|---|
CsvItemWriter<O, W> | csv | CSV files | Writes CSV records with headers |
JsonItemWriter<O, W> | json | JSON files | Writes JSON arrays |
XmlItemWriter<O, W> | xml | XML files | Writes XML documents |
PostgresItemWriter<O> | rdbc-postgres | PostgreSQL | Bulk inserts to PostgreSQL |
MysqlItemWriter<O> | rdbc-mysql | MySQL/MariaDB | Bulk inserts to MySQL |
SqliteItemWriter<O> | rdbc-sqlite | SQLite | Bulk inserts to SQLite |
MongodbItemWriter<O> | mongodb | MongoDB | Bulk inserts to MongoDB |
OrmItemWriter<O> | orm | SeaORM | ORM-based database writing |
LoggerWriter | built-in | Logs | Debug output via logging |
pub struct CsvItemWriterBuilder<O: Serialize, W: Write> { /* ... */ }| Method | Type | Default | Description |
|---|---|---|---|
has_headers(bool) | bool | true | Write header row |
delimiter(u8) | u8 | b',' | Field delimiter character |
from_path(&str) | - | - | Write to file path |
from_writer(W) | - | - | Write to any Write destination |
use spring_batch_rs::item::csv::CsvItemWriterBuilder;use serde::Serialize;
#[derive(Serialize)]struct Product { id: u32, name: String, price: f64,}
let writer = CsvItemWriterBuilder::new() .has_headers(true) .delimiter(b',') .from_path("products.csv")?;use std::io::Cursor;
let buffer = Cursor::new(Vec::new());
let writer = CsvItemWriterBuilder::<Product>::new() .has_headers(true) .from_writer(buffer);let writer = CsvItemWriterBuilder::new() .has_headers(true) .delimiter(b';') // Semicolon-separated .from_path("data.csv")?;pub struct JsonItemWriterBuilder<O: Serialize, W: Write> { /* ... */ }| Method | Type | Default | Description |
|---|---|---|---|
pretty_formatter(bool) | bool | false | Enable pretty-printing |
from_path(&str) | - | - | Write to file path |
from_writer(W) | - | - | Write to any Write destination |
use spring_batch_rs::item::json::JsonItemWriterBuilder;use serde::Serialize;
#[derive(Serialize)]struct User { id: u32, name: String, email: String,}
let writer = JsonItemWriterBuilder::<User>::new() .pretty_formatter(true) // Indented, readable output .from_path("users.json")?;let writer = JsonItemWriterBuilder::<User>::new() .pretty_formatter(false) // Minified output .from_path("users.json")?;pub struct XmlItemWriterBuilder<O: Serialize, W: Write> { /* ... */ }| Method | Type | Default | Description |
|---|---|---|---|
root_tag(&str) | &str | "root" | Root element name |
item_tag(&str) | &str | required | Item element name |
from_path(&str) | - | - | Write to file path |
from_writer(W) | - | - | Write to any Write destination |
use spring_batch_rs::item::xml::XmlItemWriterBuilder;use serde::Serialize;
#[derive(Serialize)]#[serde(rename = "vehicle")]struct Vehicle { #[serde(rename = "@type")] vehicle_type: String, make: String, model: String, year: i32,}
let writer = XmlItemWriterBuilder::new() .root_tag("vehicles") .item_tag("vehicle") .from_path("output.xml")?;Output:
<?xml version="1.0" encoding="UTF-8"?><vehicles> <vehicle type="car"> <make>Toyota</make> <model>Camry</model> <year>2023</year> </vehicle></vehicles>pub struct PostgresItemWriterBuilder<O> { /* ... */ }| Method | Type | Description |
|---|---|---|
pool(PgPool) | PgPool | PostgreSQL connection pool |
table(&str) | &str | Target table name |
binder(Fn) | Fn(&mut QueryBuilder, &O) | Function to bind item fields |
use spring_batch_rs::item::rdbc::postgres::PostgresItemWriterBuilder;use sqlx::PgPool;use serde::{Deserialize, Serialize};
#[derive(Debug, Clone, Deserialize, Serialize)]struct Person { first_name: String, last_name: String, email: String,}
#[tokio::main]async fn main() -> Result<(), Box<dyn std::error::Error>> { let pool = PgPool::connect("postgres://user:pass@localhost/db").await?;
let writer = PostgresItemWriterBuilder::new() .pool(pool) .table("persons") .binder(|query, person: &Person| { query .push_values([person], |mut b, p| { b.push_bind(&p.first_name) .push_bind(&p.last_name) .push_bind(&p.email); }); }) .build();
Ok(())}Same as PostgreSQL writer, but use MySqlPool:
use spring_batch_rs::item::rdbc::mysql::MysqlItemWriterBuilder;use sqlx::MySqlPool;
let pool = MySqlPool::connect("mysql://user:pass@localhost/db").await?;
let writer = MysqlItemWriterBuilder::new() .pool(pool) .table("persons") .binder(|query, person: &Person| { query.push_values([person], |mut b, p| { b.push_bind(&p.first_name) .push_bind(&p.last_name) .push_bind(&p.email); }); }) .build();Same as PostgreSQL writer, but use SqlitePool:
use spring_batch_rs::item::rdbc::sqlite::SqliteItemWriterBuilder;use sqlx::SqlitePool;
let pool = SqlitePool::connect("sqlite::memory:").await?;
let writer = SqliteItemWriterBuilder::new() .pool(pool) .table("persons") .binder(|query, person: &Person| { query.push_values([person], |mut b, p| { b.push_bind(&p.first_name) .push_bind(&p.last_name) .push_bind(&p.email); }); }) .build();pub struct MongodbItemWriterBuilder<O> { /* ... */ }| Method | Type | Description |
|---|---|---|
collection(&Collection<O>) | &Collection<O> | MongoDB collection |
use spring_batch_rs::item::mongodb::MongodbItemWriterBuilder;use mongodb::{sync::Client, bson::doc};use serde::{Deserialize, Serialize};
#[derive(Debug, Deserialize, Serialize, Clone)]struct Book { title: String, author: String, isbn: String,}
fn main() -> Result<(), Box<dyn std::error::Error>> { let client = Client::with_uri_str("mongodb://localhost:27017")?; let db = client.database("library"); let collection = db.collection::<Book>("books");
let writer = MongodbItemWriterBuilder::new() .collection(&collection) .build();
Ok(())}Writes entities using SeaORM. Your entity must implement SeaORM’s ActiveModelTrait.
use spring_batch_rs::item::orm::OrmItemWriterBuilder;use sea_orm::{Database, DatabaseConnection};
// Assuming you have a SeaORM entityuse entity::person::{Entity as PersonEntity, ActiveModel};
#[tokio::main]async fn main() -> Result<(), Box<dyn std::error::Error>> { let db: DatabaseConnection = Database::connect("sqlite::memory:").await?;
let writer = OrmItemWriterBuilder::new() .entity(PersonEntity) .connection(&db) .build();
Ok(())}Writes items to logs for debugging purposes.
pub struct LoggerItemWriterBuilder { /* ... */ }| Method | Type | Default | Description |
|---|---|---|---|
log_level(Level) | log::Level | Info | Logging level |
use spring_batch_rs::item::logger::LoggerItemWriterBuilder;use log::Level;
// Log at INFO levellet writer = LoggerItemWriterBuilder::new() .log_level(Level::Info) .build();
// Log at DEBUG levellet writer = LoggerItemWriterBuilder::new() .log_level(Level::Debug) .build();You can implement ItemWriter for any destination:
use spring_batch_rs::core::item::{ItemWriter, ItemWriterResult};use spring_batch_rs::error::BatchError;use std::sync::Mutex;
struct MyCustomWriter { data: Mutex<Vec<String>>,}
impl ItemWriter<String> for MyCustomWriter { fn write(&self, items: &[String]) -> ItemWriterResult { let mut data = self.data.lock().unwrap(); data.extend_from_slice(items); Ok(()) }
fn open(&self) -> ItemWriterResult { println!("Opening writer"); Ok(()) }
fn close(&self) -> ItemWriterResult { println!("Closing writer, wrote {} items", self.data.lock().unwrap().len()); Ok(()) }
fn flush(&self) -> ItemWriterResult { println!("Flushing writer"); Ok(()) }}
impl MyCustomWriter { fn new() -> Self { Self { data: Mutex::new(Vec::new()), } }}Writers can implement transactional behavior:
use std::sync::Mutex;
struct TransactionalWriter { buffer: Mutex<Vec<String>>, committed: Mutex<Vec<String>>,}
impl ItemWriter<String> for TransactionalWriter { fn write(&self, items: &[String]) -> ItemWriterResult { // Write to buffer (transaction) let mut buffer = self.buffer.lock().unwrap(); buffer.extend_from_slice(items); Ok(()) }
fn flush(&self) -> ItemWriterResult { // Commit transaction let mut buffer = self.buffer.lock().unwrap(); let mut committed = self.committed.lock().unwrap(); committed.append(&mut buffer); Ok(()) }
fn close(&self) -> ItemWriterResult { // Final flush on close self.flush() }}Batch Operations
Use bulk insert/write APIs when available for better performance
Buffer Management
Implement flush() to ensure data is persisted, especially for buffered writers
Resource Cleanup
Always release resources in close() even if errors occurred
Error Context
Return descriptive BatchError::ItemWriter errors with details about what failed
BufWriter) for file-based writers