|
|
10 роки тому | |
|---|---|---|
| .. | ||
| README.md | 10 роки тому | |
| tinyexr.cc | 10 роки тому | |
| tinyexr.h | 10 роки тому | |
tinyexr is a small, single header-only library to load and save OpenEXR(.exr) images.
tinyexr is written in portable C++(no library dependency except for STL), thus tinyexr is good to embed into your application.
To use tinyexr, simply copy tinyexr.h into your project.
tinyexr currently supports:
NOTE: API is still subject to change. See the source code for details.
Include tinyexr.h with TINYEXR_IMPLEMENTATION flag(do this only for one .cc file).
#define TINYEXR_IMPLEMENTATION
#include "tinyexr.h"
Quickly reading RGB(A) EXR file.
const char* input = "asakusa.exr";
float* out; // width * height * RGBA
int width;
int height;
const char* err;
int ret = LoadEXR(&out, &width, &height, input, &err);
Loading EXR from a file.
const char* input = "asakusa.exr";
const char* err;
EXRImage exrImage;
InitEXRImage(&exrImage);
int ret = ParseMultiChannelEXRHeaderFromFile(&exrImage, input, &err);
if (ret != 0) {
fprintf(stderr, "Parse EXR err: %s\n", err);
return;
}
//// Uncomment if you want reading HALF image as FLOAT.
//for (int i = 0; i < exrImage.num_channels; i++) {
// if (exrImage.pixel_types[i] = TINYEXR_PIXELTYPE_HALF) {
// exrImage.requested_pixel_types[i] = TINYEXR_PIXELTYPE_FLOAT;
// }
//}
ret = LoadMultiChannelEXRFromFile(&exrImage, input, &err);
if (ret != 0) {
fprintf(stderr, "Load EXR err: %s\n", err);
return;
}
Saving EXR file.
bool SaveEXR(const float* rgb, int width, int height, const char* outfilename) {
float* channels[3];
EXRImage image;
InitEXRImage(&image);
image.num_channels = 3;
// Must be BGR(A) order, since most of EXR viewers expect this channel order.
const char* channel_names[] = {"B", "G", "R"}; // "B", "G", "R", "A" for RGBA image
std::vector<float> images[3];
images[0].resize(width * height);
images[1].resize(width * height);
images[2].resize(width * height);
for (int i = 0; i < width * height; i++) {
images[0][i] = rgb[3*i+0];
images[1][i] = rgb[3*i+1];
images[2][i] = rgb[3*i+2];
}
float* image_ptr[3];
image_ptr[0] = &(images[2].at(0)); // B
image_ptr[1] = &(images[1].at(0)); // G
image_ptr[2] = &(images[0].at(0)); // R
image.channel_names = channel_names;
image.images = (unsigned char**)image_ptr;
image.width = width;
image.height = height;
image.compression = TINYEXR_COMPRESSIONTYPE_ZIP;
image.pixel_types = (int *)malloc(sizeof(int) * image.num_channels);
image.requested_pixel_types = (int *)malloc(sizeof(int) * image.num_channels);
for (int i = 0; i < image.num_channels; i++) {
image.pixel_types[i] = TINYEXR_PIXELTYPE_FLOAT; // pixel type of input image
image.requested_pixel_types[i] = TINYEXR_PIXELTYPE_HALF; // pixel type of output image to be stored in .EXR
}
const char* err;
int ret = SaveMultiChannelEXRToFile(&image, outfilename, &err);
if (ret != 0) {
fprintf(stderr, "Save EXR err: %s\n", err);
return ret;
}
printf("Saved exr file. [ %s ] \n", outfilename);
free(image.pixel_types);
free(image.requested_pixel_types);
return ret;
}
Reading deep image EXR file.
See example/deepview for actual usage.
const char* input = "deepimage.exr";
const char* err;
DeepImage deepImage;
int ret = LoadDeepEXR(&deepImage, input, &err);
// acccess to each sample in the deep pixel.
for (int y = 0; y < deepImage.height; y++) {
int sampleNum = deepImage.offset_table[y][deepImage.width-1];
for (int x = 0; x < deepImage.width-1; x++) {
int s_start = deepImage.offset_table[y][x];
int s_end = deepImage.offset_table[y][x+1];
if (s_start >= sampleNum) {
continue;
}
s_end = (s_end < sampleNum) ? s_end : sampleNum;
for (int s = s_start; s < s_end; s++) {
float val = deepImage.image[depthChan][y][s];
...
}
}
}
examples/deepview is simple deep image viewer in OpenGL.
Contribution is welcome!
3-clause BSD
tinyexr uses miniz, which is developed by Rich Geldreich [email protected], and licensed under public domain.
tinyexr tools uses stb, which is licensed under public domain: https://github.com/nothings/stb
tinyexr uses some code from OpenEXR, which is licensed under 3-clause BSD license.
Syoyo Fujita([email protected])