import org.apache.iceberg.Schema;
import org.apache.iceberg.Table;
import org.apache.iceberg.Tables;
import org.apache.iceberg.catalog.TableIdentifier;
import org.apache.iceberg.data.GenericRecord;
import org.apache.iceberg.types.Types;
import java.util.HashMap;
import java.util.Map;
public class IcebergExample {
public static void main(String[] args) {
Schema schema = new Schema(
Types.NestedField.optional(1, "id", Types.IntegerType.get()),
Types.NestedField.optional(2, "name", Types.StringType.get()),
Types.NestedField.optional(3, "age", Types.IntegerType.get())
);
Map<String, String> properties = new HashMap<>();
properties.put("warehouse", "hdfs://localhost:9000/warehouse");
Tables tables = Tables.create(properties);
TableIdentifier tableIdentifier = TableIdentifier.of("mydb", "mytable");
Table table = tables.create(tableIdentifier, schema);
}
}
import org.apache.iceberg.Transaction;
import org.apache.iceberg.DataFile;
import org.apache.iceberg.PartitionSpec;
import org.apache.iceberg.Table;
import org.apache.iceberg.ManifestFile;
import java.util.List;
public class IcebergExample {
public static void main(String[] args) {
Transaction transaction = table.newTransaction();
try {
DataFile file = DataFiles.builder(table.spec())
.withPath("/path/to/data.parquet")
.withFileSizeInBytes(1024)
.withPartitionPath("age=30")
.build();
transaction.newAppend()
.appendFile(file)
.commit();
} finally {
transaction.close();
}
}
}
import org.apache.iceberg.Table;
import org.apache.iceberg.Schema;
import org.apache.iceberg.types.Types;
import org.apache.iceberg.data.GenericRecord;
public class IcebergExample {
public static void main(String[] args) {
Table table = tables.load(tableIdentifier);
Schema schema = table.schema();
Iterable<GenericRecord> records = table
.select("name", "age")
.where("age >= 30")
.asIterable();
for (GenericRecord record : records) {
System.out.println(record.get("name") + ", " + record.get("age"));
}
}
}