#define ULIM(x,y,z) ((y) < (z) ? LIM(x,y,z) : LIM(x,z,y))
#define CLIP(x) LIM((int)(x),0,65535)
#define SWAP(a,b) { a=a+b; b=a-b; a=a-b; }
+#define ZERO(var) memset(&var, 0, sizeof var)
/*
In order to inline this calculation, I make the risky
assumption that all filter patterns can be described
void CLASS reset()
{
-// uninitialized
-#define ZERO(var) memset(&var, 0, sizeof var);
- aperture = 0;
- ZERO(artist);
- black = 0;
- ZERO(cam_mul)
- canon_ev = 0;
- ZERO(cblack);
- ZERO(cmatrix);
- ZERO(cdesc);
- colors = 0;
- ZERO(cr2_slice);
- ZERO(curve);
- data_error = 0;
- data_offset = 0;
- dng_version = 0;
- exif_cfa = 0;
- ZERO(failure);
- filters = 0;
- ZERO(first_decode);
- flash_used = 0;
- flip = 0;
- focal_len = 0;
- fuji_layout = 0;
- fuji_width = 0;
- ZERO(gpsdata);
- height = 0;
- ZERO(histogram);
- ifname = 0;
- ifp = 0;
- iheight = 0;
- is_foveon = 0;
- iso_speed = 0;
- is_raw = 0;
- iwidth = 0;
- kodak_cbpp = 0;
- left_margin = 0;
- load_flags = 0;
- load_raw = 0;
- maximum = 0;
- meta_data = 0;
- meta_length = 0;
- meta_offset = 0;
- mix_green = 0;
- ZERO(model);
- ZERO(model2);
- ofp = 0;
- oprof = 0;
- order = 0;
- ZERO(ph1);
- pixel_aspect = 0;
- memset(pre_mul, 0, sizeof pre_mul);
- profile_length = 0;
- profile_offset = 0;
- raw_color = 0;
- raw_height = 0;
- raw_width = 0;
- raw_image = 0;
- memset(rgb_cam, 0, sizeof rgb_cam);
- shot_order = 0;
- shrink = 0;
- shutter = 0;
- strip_offset = 0;
- thumb_height = 0;
- thumb_length = 0;
- thumb_load_raw = 0;
- thumb_misc = 0;
- thumb_offset = 0;
- thumb_width = 0;
- tiff_bps = 0;
- tiff_compress = 0;
- tiff_flip = 0;
- tiff_nifds = 0;
- tiff_samples = 0;
- tile_length = 0;
- tile_width = 0;
- timestamp = 0;
- top_margin = 0;
- unique_id = 0;
- ZERO(white);
- width = 0;
- zero_after_ff = 0;
- zero_is_bad = 0;
-
-// initializer data
- shot_select = 0;
- multi_out = 0;
+// zero data segment
+ DCRaw_data *data = (DCRaw_data *)this;
+ memset(data, 0, sizeof(*data));
+// non-zero init data
aber[0] = aber[1] = aber[2] = aber[3] = 1;
- gamm[0] = 0.45; gamm[1] = 4.5; gamm[2] = 0;
- gamm[3] = 0; gamm[4] = 0; gamm[5] = 0;
+ gamm[0] = 0.45; gamm[1] = 4.5;
bright = 1;
- ZERO(user_mul);
- threshold = 0;
- half_size = 0;
- four_color_rgb = 0;
- document_mode = 0;
- highlight = 0;
- verbose = 0;
- use_auto_wb = 0;
- use_camera_wb = 0;
use_camera_matrix = 1;
output_color = 1;
output_bps = 8;
- output_tiff = 0;
- med_passes = 0;
- no_auto_bright = 0;
- greybox[0] = 0; greybox[1] = 0;
greybox[2] = UINT_MAX; greybox[3] = UINT_MAX;
-// local static
- gbh_bitbuf = 0; gbh_vbits = 0; gbh_reset = 0;
- ph1_bitbuf = 0; ph1_vbits = 0;
- ZERO(ljpeg_cs);
- ZERO(sony_pad); sony_p = 0;
- ZERO(fov_huff);
- ZERO(clb_cbrt); ZERO(clb_xyz_cam);
- ZERO(pana_buf); pana_vbits = 0;
}
#if 0
#include <stdint.h>
#include <time.h>
+class DCRaw_data;
+class DCRaw;
+
#define CLASS DCRaw::
#if !defined(uchar)
#define ushort unsigned short
#endif
-class DCRaw {
+struct jhead;
+struct tiff_tag;
+struct tiff_hdr;
+
+class DCRaw_data {
+// ZEROd by DCRaw reset
+public:
+ FILE *ifp, *ofp;
+ short order;
+ const char *ifname;
+ char *meta_data, xtrans[6][6], xtrans_abs[6][6];
+ char cdesc[5], desc[512], make[64], model[64], model2[64], artist[64];
+ float flash_used, canon_ev, iso_speed, shutter, aperture, focal_len;
+ time_t timestamp;
+ off_t strip_offset, data_offset;
+ off_t thumb_offset, meta_offset, profile_offset;
+ unsigned shot_order, kodak_cbpp, exif_cfa, unique_id;
+ unsigned thumb_length, meta_length, profile_length;
+ unsigned thumb_misc, *oprof, fuji_layout;
+ unsigned tiff_nifds, tiff_samples, tiff_bps, tiff_compress;
+ unsigned black, maximum, mix_green, raw_color, zero_is_bad;
+ unsigned zero_after_ff, is_raw, dng_version, is_foveon, data_error;
+ unsigned tile_width, tile_length, gpsdata[32], load_flags;
+ unsigned flip, tiff_flip, filters, colors;
+ ushort raw_height, raw_width, height, width, top_margin, left_margin;
+ ushort shrink, iheight, iwidth, fuji_width, thumb_width, thumb_height;
+ ushort *raw_image, (*image)[4], cblack[4102];
+ ushort white[8][8], curve[0x10000], cr2_slice[3], sraw_mul[4];
+
+ unsigned shot_select, multi_out;
+ double pixel_aspect, aber[4], gamm[6];
+ float bright, user_mul[4], threshold;
+ int mask[8][4];
+ int half_size, four_color_rgb, document_mode, highlight;
+ int verbose, use_auto_wb, use_camera_wb, use_camera_matrix;
+ int output_color, output_bps, output_tiff, med_passes;
+ int no_auto_bright;
+ unsigned greybox[4];
+ float cam_mul[4], pre_mul[4], cmatrix[3][4], rgb_cam[3][4];
+ int histogram[4][0x2000];
+ void (CLASS *write_thumb)(), (CLASS *write_fun)();
+ void (CLASS *load_raw)(), (CLASS *thumb_load_raw)();
+ jmp_buf failure;
+
+ struct decode {
+ struct decode *branch[2];
+ int leaf;
+ } first_decode[2048], /* *second_decode, CINELERRA */ *free_decode;
+
+ struct tiff_ifd {
+ int width, height, bps, comp, phint, offset, flip, samples, bytes;
+ int tile_width, tile_length;
+ float shutter;
+ } tiff_ifd[10];
+
+ struct ph1 {
+ int format, key_off, tag_21a;
+ int black, split_col, black_col, split_row, black_row;
+ float tag_210;
+ } ph1;
+
+// local static data
+ unsigned gbh_bitbuf;
+ int gbh_vbits, gbh_reset;
+ uint64_t ph1_bitbuf;
+ int ph1_vbits;
+ float ljpeg_cs[106];
+ unsigned sony_pad[128], sony_p;
+ unsigned fov_huff[1024];
+ float clb_cbrt[0x10000], clb_xyz_cam[3][4];
+ uchar pana_buf[0x4000]; int pana_vbits;
+};
+
+class DCRaw : public DCRaw_data {
+private:
int fcol(int row,int col);
#if 0
char *my_memmem(char *haystack,size_t haystacklen,char *needle,size_t needlelen);
void write_ppm_tiff(void);
void write_cinelerra(void);
void reset();
-
- FILE *ifp, *ofp;
- short order;
- const char *ifname;
- char *meta_data, xtrans[6][6], xtrans_abs[6][6];
- char cdesc[5], desc[512], make[64], model[64], model2[64], artist[64];
- float flash_used, canon_ev, iso_speed, shutter, aperture, focal_len;
- time_t timestamp;
- off_t strip_offset, data_offset;
- off_t thumb_offset, meta_offset, profile_offset;
- unsigned shot_order, kodak_cbpp, exif_cfa, unique_id;
- unsigned thumb_length, meta_length, profile_length;
- unsigned thumb_misc, *oprof, fuji_layout;
- unsigned tiff_nifds, tiff_samples, tiff_bps, tiff_compress;
- unsigned black, maximum, mix_green, raw_color, zero_is_bad;
- unsigned zero_after_ff, is_raw, dng_version, is_foveon, data_error;
- unsigned tile_width, tile_length, gpsdata[32], load_flags;
- unsigned flip, tiff_flip, filters, colors;
- ushort raw_height, raw_width, height, width, top_margin, left_margin;
- ushort shrink, iheight, iwidth, fuji_width, thumb_width, thumb_height;
- ushort *raw_image, (*image)[4], cblack[4102];
- ushort white[8][8], curve[0x10000], cr2_slice[3], sraw_mul[4];
-
- unsigned shot_select, multi_out;
- double pixel_aspect, aber[4], gamm[6];
- float bright, user_mul[4], threshold;
- int mask[8][4];
- int half_size, four_color_rgb, document_mode, highlight;
- int verbose, use_auto_wb, use_camera_wb, use_camera_matrix;
- int output_color, output_bps, output_tiff, med_passes;
- int no_auto_bright;
- unsigned greybox[4];
- float cam_mul[4], pre_mul[4], cmatrix[3][4], rgb_cam[3][4];
+//const data
const double xyz_rgb[3][3] = { /* XYZ from RGB */
{ 0.412453, 0.357580, 0.180423 },
{ 0.212671, 0.715160, 0.072169 },
{ 0.019334, 0.119193, 0.950227 } };
const float d65_white[3] = { 0.950456, 1, 1.088754 };
- int histogram[4][0x2000];
- void (CLASS *write_thumb)(), (CLASS *write_fun)();
- void (CLASS *load_raw)(), (CLASS *thumb_load_raw)();
- jmp_buf failure;
-
- struct decode {
- struct decode *branch[2];
- int leaf;
- } first_decode[2048], /* *second_decode, CINELERRA */ *free_decode;
-
- struct tiff_ifd {
- int width, height, bps, comp, phint, offset, flip, samples, bytes;
- int tile_width, tile_length;
- float shutter;
- } tiff_ifd[10];
-
- struct ph1 {
- int format, key_off, tag_21a;
- int black, split_col, black_col, split_row, black_row;
- float tag_210;
- } ph1;
-
-// local static data
- unsigned gbh_bitbuf;
- int gbh_vbits, gbh_reset;
- uint64_t ph1_bitbuf;
- int ph1_vbits;
- float ljpeg_cs[106];
- unsigned sony_pad[128], sony_p;
- unsigned fov_huff[1024];
- float clb_cbrt[0x10000], clb_xyz_cam[3][4];
- uchar pana_buf[0x4000]; int pana_vbits;
public:
DCRaw();
~DCRaw();
-
+// CINELERRA
char info[1024];
float **data;
int alpha;
int main(int argc, const char **argv);
};
-struct jhead;
-struct tiff_tag;
-struct tiff_hdr;
-
mbsz = 0;
length = 0;
resample_context = 0;
+ swr_ichs = swr_ifmt = swr_irate = 0;
aud_bfr_sz = 0;
aud_bfr = 0;
delete [] bfr;
}
+void FFAudioStream::init_swr(int ichs, int ifmt, int irate)
+{
+ if( resample_context ) {
+ if( swr_ichs == ichs && swr_ifmt == ifmt && swr_irate == irate )
+ return;
+ swr_free(&resample_context);
+ }
+ swr_ichs = ichs; swr_ifmt = ifmt; swr_irate = irate;
+ if( ichs == channels && ifmt == AV_SAMPLE_FMT_FLT && irate == sample_rate )
+ return;
+ uint64_t ilayout = av_get_default_channel_layout(ichs);
+ if( !ilayout ) ilayout = ((uint64_t)1<<ichs) - 1;
+ uint64_t olayout = av_get_default_channel_layout(channels);
+ if( !olayout ) olayout = ((uint64_t)1<<channels) - 1;
+ resample_context = swr_alloc_set_opts(NULL,
+ olayout, AV_SAMPLE_FMT_FLT, sample_rate,
+ ilayout, (AVSampleFormat)ifmt, irate,
+ 0, NULL);
+ if( resample_context )
+ swr_init(resample_context);
+}
+
int FFAudioStream::get_samples(float *&samples, uint8_t **data, int len)
{
samples = *(float **)data;
for( int i=0; ret>=0 && !flushed && curr_pos<end_pos && i<MAX_RETRY; ++i ) {
ret = read_frame(frame);
if( ret > 0 ) {
+ init_swr(frame->channels, frame->format, frame->sample_rate);
load_history(&frame->extended_data[0], frame->nb_samples);
curr_pos += frame->nb_samples;
}
int FFAudioStream::audio_seek(int64_t pos)
{
- if( decode_activate() < 0 ) return -1;
+ if( decode_activate() <= 0 ) return -1;
if( !st->codecpar ) return -1;
if( in_history(pos) ) return 0;
if( pos == curr_pos ) return 0;
int FFVideoStream::video_seek(int64_t pos)
{
- if( decode_activate() < 0 ) return -1;
+ if( decode_activate() <= 0 ) return -1;
if( !st->codecpar ) return -1;
if( pos == curr_pos-1 && !seeked ) return 0;
// if close enough, just read up to current
aud->sample_rate = avpar->sample_rate;
double secs = to_secs(st->duration, st->time_base);
aud->length = secs * aud->sample_rate;
- if( avpar->format != AV_SAMPLE_FMT_FLT ) {
- uint64_t layout = av_get_default_channel_layout(avpar->channels);
- if( !layout ) layout = ((uint64_t)1<<aud->channels) - 1;
- AVSampleFormat sample_format = (AVSampleFormat)avpar->format;
- aud->resample_context = swr_alloc_set_opts(NULL,
- layout, AV_SAMPLE_FMT_FLT, avpar->sample_rate,
- layout, sample_format, avpar->sample_rate,
- 0, NULL);
- swr_init(aud->resample_context);
- }
+ aud->init_swr(aud->channels, avpar->format, aud->sample_rate);
aud->nudge = st->start_time;
aud->reading = -1;
if( opt_audio_filter )
ret = avcodec_open2(avctx, decoder, &copts);
}
av_dict_free(&copts);
- if( ret < 0 ) {
- fprintf(stderr,"FFMPEG::scan: ");
- fprintf(stderr,_("codec open failed\n"));
- continue;
- }
- AVCodecParameters *avpar = st->codecpar;
- switch( avpar->codec_type ) {
- case AVMEDIA_TYPE_VIDEO: {
- int vidx = ffvideo.size();
- while( --vidx>=0 && ffvideo[vidx]->fidx != i );
- if( vidx < 0 ) break;
- ffvideo[vidx]->avctx = avctx;
- break; }
- case AVMEDIA_TYPE_AUDIO: {
- int aidx = ffaudio.size();
- while( --aidx>=0 && ffaudio[aidx]->fidx != i );
- if( aidx < 0 ) continue;
- ffaudio[aidx]->avctx = avctx;
- break; }
- default: break;
+ if( ret >= 0 ) {
+ AVCodecParameters *avpar = st->codecpar;
+ switch( avpar->codec_type ) {
+ case AVMEDIA_TYPE_VIDEO: {
+ int vidx = ffvideo.size();
+ while( --vidx>=0 && ffvideo[vidx]->fidx != i );
+ if( vidx < 0 ) break;
+ ffvideo[vidx]->avctx = avctx;
+ continue; }
+ case AVMEDIA_TYPE_AUDIO: {
+ int aidx = ffaudio.size();
+ while( --aidx>=0 && ffaudio[aidx]->fidx != i );
+ if( aidx < 0 ) break;
+ ffaudio[aidx]->avctx = avctx;
+ continue; }
+ default: break;
+ }
}
+ fprintf(stderr,"FFMPEG::scan: ");
+ fprintf(stderr,_("codec open failed\n"));
+ avcodec_free_context(&avctx);
}
decode_activate();
while( --vidx>=0 && ffvideo[vidx]->fidx != i );
if( vidx < 0 ) break;
FFVideoStream *vid = ffvideo[vidx];
+ if( !vid->avctx ) break;
int64_t tstmp = pkt.dts;
if( tstmp == AV_NOPTS_VALUE ) tstmp = pkt.pts;
if( tstmp != AV_NOPTS_VALUE && (pkt.flags & AV_PKT_FLAG_KEY) && pkt.pos > 0 ) {
while( --aidx>=0 && ffaudio[aidx]->fidx != i );
if( aidx < 0 ) break;
FFAudioStream *aud = ffaudio[aidx];
+ if( !aud->avctx ) break;
int64_t tstmp = pkt.pts;
if( tstmp == AV_NOPTS_VALUE ) tstmp = pkt.dts;
if( tstmp != AV_NOPTS_VALUE && (pkt.flags & AV_PKT_FLAG_KEY) && pkt.pos > 0 ) {
index_state->pad_data(ch, nch, aud->curr_pos);
}
while( (ret=aud->decode_frame(frame)) > 0 ) {
- if( frame->channels != nch ) break;
+ //if( frame->channels != nch ) break;
+ aud->init_swr(frame->channels, frame->format, frame->sample_rate);
float *samples;
int len = aud->get_samples(samples,
&frame->extended_data[0], frame->nb_samples);
virtual ~FFAudioStream();
int is_audio() { return 1; }
int is_video() { return 0; }
+ void init_swr(int ichs, int ifmt, int irate);
int get_samples(float *&samples, uint8_t **data, int len);
int load_history(uint8_t **data, int len);
int decode_frame(AVFrame *frame);
int64_t length;
SwrContext *resample_context;
+ int swr_ichs, swr_ifmt, swr_irate;
int aud_bfr_sz;
float *aud_bfr;
};
int FileJPEG::check_sig(Asset *asset)
{
- FILE *stream = fopen(asset->path, "rb");
-
- if(stream)
- {
- char test[10];
- (void)fread(test, 10, 1, stream);
- fclose(stream);
-
- if(test[6] == 'J' && test[7] == 'F' && test[8] == 'I' && test[9] == 'F')
- {
- return 1;
+ FILE *fp = fopen(asset->path, "r");
+ if( !fp ) return 0;
+ char test[10];
+ int result = -1;
+ if( fread(test, 1, sizeof(test), fp) == sizeof(test) ) {
+ if( test[6] == 'J' && test[7] == 'F' && test[8] == 'I' && test[9] == 'F' ) {
+ fseek(fp, 0, SEEK_SET);
+ int w = 0, h = 0;
+ result = read_header(fp, w, h);
}
- else
- if(test[0] == 'J' && test[1] == 'P' && test[2] == 'E' && test[3] == 'G' &&
- test[4] == 'L' && test[5] == 'I' && test[6] == 'S' && test[7] == 'T')
- {
- return 1;
+ else if(test[0] == 'J' && test[1] == 'P' && test[2] == 'E' && test[3] == 'G' &&
+ test[4] == 'L' && test[5] == 'I' && test[6] == 'S' && test[7] == 'T') {
+ result = 0;
}
}
+ fclose(fp);
- if(strlen(asset->path) > 4)
- {
- int len = strlen(asset->path);
- if(!strncasecmp(asset->path + len - 4, ".jpg", 4)) return 1;
+ if( result < 0 ) {
+ int i = strlen(asset->path) - 4;
+ if( i >= 0 && !strcasecmp(asset->path+i, ".jpg") )
+ result = 0;
}
- return 0;
+ return !result ? 1 : 0;
}
-
void FileJPEG::get_parameters(BC_WindowBase *parent_window,
Asset *asset,
BC_WindowBase* &format_window,
int FileJPEG::read_frame_header(char *path)
{
- int result = 0;
-
-
- FILE *stream;
-
- if(!(stream = fopen(path, "rb")))
- {
+ FILE *fp = fopen(path, "rb");
+ if( !fp ) {
eprintf("FileJPEG::read_frame_header %s: %m\n", path);
return 1;
}
-
-
+ int w = 0, h = 0, result = 1;
unsigned char test[2];
- (void)fread(test, 2, 1, stream);
- if(test[0] != 0xff || test[1] != 0xd8)
- {
- eprintf("FileJPEG::read_frame_header %s bad header %02x%02x\n",
- path, test[0], test[1]);
- fclose(stream);
- return 1;
+ if( fread(test, 1, sizeof(test), fp) == sizeof(test) &&
+ test[0] == 0xff && test[1] == 0xd8 ) {
+ fseek(fp, 0, SEEK_SET);
+ result = read_header(fp, w, h);
}
- fseek(stream, 0, SEEK_SET);
+ fclose(fp);
+ if( !result ) {
+ asset->width = w; asset->height = h;
+ asset->interlace_mode = ILACE_MODE_NOTINTERLACED;
+ }
+ else
+ eprintf("FileJPEG::read_frame_header %s bad header\n", path);
+ return result;
+}
- struct jpeg_decompress_struct jpeg_decompress;
+int FileJPEG::read_header(FILE *fp, int &w, int &h)
+{
+ int result = 0;
struct jpeg_error_mgr jpeg_error;
-
+ struct jpeg_decompress_struct jpeg_decompress;
jpeg_decompress.err = jpeg_std_error(&jpeg_error);
jpeg_create_decompress(&jpeg_decompress);
-
- jpeg_stdio_src(&jpeg_decompress, stream);
- jpeg_read_header(&jpeg_decompress, TRUE);
-
- asset->width = jpeg_decompress.image_width;
- asset->height = jpeg_decompress.image_height;
-
- asset->interlace_mode = ILACE_MODE_NOTINTERLACED;
-
+ jpeg_stdio_src(&jpeg_decompress, fp);
+ if( jpeg_read_header(&jpeg_decompress, TRUE) != JPEG_HEADER_OK ) result = 1;
+ if( !result && jpeg_decompress.jpeg_color_space != JCS_YCbCr ) result = 1;
+ if( !result && jpeg_decompress.comp_info[0].h_samp_factor > 2 ) result = 1;
+ if( !result && jpeg_decompress.comp_info[0].v_samp_factor > 2 ) result = 1;
+ if( !result ) {
+ w = jpeg_decompress.image_width;
+ h = jpeg_decompress.image_height;
+ }
jpeg_destroy((j_common_ptr)&jpeg_decompress);
- fclose(stream);
-
return result;
}
-
int FileJPEG::read_frame(VFrame *output, VFrame *input)
{
if(input->get_compressed_size() < 2 ||
int write_frame(VFrame *frame, VFrame *data, FrameWriterUnit *unit);
int can_copy_from(Asset *asset, int64_t position);
int read_frame_header(char *path);
+ static int read_header(FILE *fp, int &w, int &h);
FrameWriterUnit* new_writer_unit(FrameWriter *writer);
void *decompressor;
case SF_FORMAT_AIFF: asset->format = FILE_AIFF; break;
case SF_FORMAT_AU: asset->format = FILE_AU; break;
case SF_FORMAT_RAW: asset->format = FILE_PCM; break;
+ default:
case SF_FORMAT_PAF: asset->format = FILE_SND; break;
case SF_FORMAT_SVX: asset->format = FILE_SND; break;
case SF_FORMAT_NIST: asset->format = FILE_SND; break;
int best_colormodel = get_best_colormodel(colormodel);
// Only create OpenGL Pbuffer and texture.
- if(device->out_config->driver == PLAYBACK_X11_GL)
- {
+ if( device->out_config->driver == PLAYBACK_X11_GL ) {
// Create bitmap for initial load into texture.
// Not necessary to do through Playback3D.....yet
- if(!output_frame)
- {
+ if( !output_frame ) {
output_frame = new VFrame(0, -1,
- device->out_w, device->out_h,
- colormodel, -1);
+ device->out_w, device->out_h, colormodel, -1);
//BUFFER2(output_frame->get_rows()[0], "VDeviceX11::new_output_buffer 1");
}
window_id = output->get_canvas()->get_id();
output_frame->set_opengl_state(VFrame::RAM);
}
- else
- {
+ else {
// Conform existing bitmap to new colormodel and output size
- if(bitmap)
- {
+ if( bitmap ) {
// Restart if output size changed or output colormodel changed.
// May have to recreate if transferring between windowed and fullscreen.
- if(!color_model_selected ||
- (!bitmap->hardware_scaling() &&
- (bitmap->get_w() != output->get_canvas()->get_w() ||
- bitmap->get_h() != output->get_canvas()->get_h())) ||
- colormodel != output_frame->get_color_model())
- {
- int size_change = (bitmap->get_w() != output->get_canvas()->get_w() ||
- bitmap->get_h() != output->get_canvas()->get_h());
+ if( !color_model_selected || ( !bitmap->hardware_scaling() &&
+ (bitmap->get_w() != output->get_canvas()->get_w() ||
+ bitmap->get_h() != output->get_canvas()->get_h()) ) ||
+ colormodel != output_frame->get_color_model() ) {
+ int size_change =
+ bitmap->get_w() != output->get_canvas()->get_w() ||
+ bitmap->get_h() != output->get_canvas()->get_h();
//printf("VDeviceX11::new_output_buffer %d\n", __LINE__);
- delete bitmap;
- delete output_frame;
- bitmap = 0;
- output_frame = 0;
+ delete bitmap; bitmap = 0;
+ delete output_frame; output_frame = 0;
// Blank only if size changed
- if(size_change)
- {
+ if( size_change ) {
output->get_canvas()->set_color(BLACK);
output->get_canvas()->draw_box(0, 0, output->w, output->h);
output->get_canvas()->flash();
}
}
- else
-// Update the ring buffer
- if(bitmap_type == BITMAP_PRIMARY)
- {
-//printf("VDeviceX11::new_output_buffer %d\n", __LINE__);
- output_frame->set_memory(bitmap);
- }
}
// Create new bitmap
- if(!bitmap)
- {
+ if( !bitmap ) {
// Try hardware accelerated
- switch(best_colormodel)
- {
- case BC_YUV420P:
- if(device->out_config->driver == PLAYBACK_X11_XV &&
- output->get_canvas()->accel_available(best_colormodel, 0) &&
- !output->use_scrollbars)
- {
- bitmap = new BC_Bitmap(output->get_canvas(),
- device->out_w, device->out_h,
- best_colormodel, 1);
- output_frame = new VFrame(bitmap,
- device->out_w, device->out_h,
- best_colormodel, -1);
- bitmap_type = BITMAP_PRIMARY;
- }
- break;
-
- case BC_YUV422P:
- if(device->out_config->driver == PLAYBACK_X11_XV &&
- output->get_canvas()->accel_available(best_colormodel, 0) &&
- !output->use_scrollbars)
- {
- bitmap = new BC_Bitmap(output->get_canvas(),
- device->out_w, device->out_h,
- best_colormodel, 1);
- output_frame = new VFrame(bitmap,
- device->out_w, device->out_h,
- best_colormodel, -1);
- bitmap_type = BITMAP_PRIMARY;
- }
- else
- if(device->out_config->driver == PLAYBACK_X11_XV &&
- output->get_canvas()->accel_available(BC_YUV422, 0))
- {
- bitmap = new BC_Bitmap(output->get_canvas(),
- device->out_w,
- device->out_h,
- BC_YUV422,
- 1);
- bitmap_type = BITMAP_TEMP;
- }
- break;
-
- case BC_YUV422:
- if(device->out_config->driver == PLAYBACK_X11_XV &&
- output->get_canvas()->accel_available(best_colormodel, 0) &&
- !output->use_scrollbars)
- {
- bitmap = new BC_Bitmap(output->get_canvas(),
- device->out_w, device->out_h,
- best_colormodel, 1);
- output_frame = new VFrame(bitmap,
- device->out_w, device->out_h,
- best_colormodel, -1);
- bitmap_type = BITMAP_PRIMARY;
- }
- else
- if(device->out_config->driver == PLAYBACK_X11_XV &&
- output->get_canvas()->accel_available(BC_YUV422P, 0))
- {
- bitmap = new BC_Bitmap(output->get_canvas(),
- device->out_w, device->out_h,
- BC_YUV422P, 1);
- bitmap_type = BITMAP_TEMP;
- }
- break;
- }
+ bitmap_type = BITMAP_TEMP;
+ switch( best_colormodel ) {
+ case BC_YUV420P:
+ if( device->out_config->driver == PLAYBACK_X11_XV &&
+ output->get_canvas()->accel_available(best_colormodel, 0) &&
+ !output->use_scrollbars )
+ bitmap_type = BITMAP_PRIMARY;
+ break;
+
+ case BC_YUV422P:
+ if( device->out_config->driver == PLAYBACK_X11_XV &&
+ output->get_canvas()->accel_available(best_colormodel, 0) &&
+ !output->use_scrollbars )
+ bitmap_type = BITMAP_PRIMARY;
+ else if( device->out_config->driver == PLAYBACK_X11_XV &&
+ output->get_canvas()->accel_available(BC_YUV422, 0) ) {
+ bitmap = new BC_Bitmap(output->get_canvas(),
+ device->out_w, device->out_h, BC_YUV422, 1);
+ }
+ break;
+ case BC_YUV422:
+ if( device->out_config->driver == PLAYBACK_X11_XV &&
+ output->get_canvas()->accel_available(best_colormodel, 0) &&
+ !output->use_scrollbars )
+ bitmap_type = BITMAP_PRIMARY;
+ else if( device->out_config->driver == PLAYBACK_X11_XV &&
+ output->get_canvas()->accel_available(BC_YUV422P, 0) ) {
+ bitmap = new BC_Bitmap(output->get_canvas(),
+ device->out_w, device->out_h, BC_YUV422P, 1);
+ }
+ break;
+ }
+ if( bitmap_type == BITMAP_PRIMARY ) {
+ bitmap = new BC_Bitmap(output->get_canvas(),
+ device->out_w, device->out_h, best_colormodel, 1);
+ output_frame = new VFrame(bitmap,
+ device->out_w, device->out_h, best_colormodel, -1);
+ }
+ else {
// Try default colormodel
- if(!bitmap)
- {
best_colormodel = output->get_canvas()->get_color_model();
bitmap = new BC_Bitmap(output->get_canvas(),
- output->get_canvas()->get_w(),
- output->get_canvas()->get_h(),
+ output->get_canvas()->get_w(), output->get_canvas()->get_h(),
best_colormodel, 1);
- bitmap_type = BITMAP_TEMP;
- }
-
- if(bitmap_type == BITMAP_TEMP)
- {
// Intermediate frame
output_frame = new VFrame(0, -1,
- device->out_w, device->out_h,
- colormodel, -1);
-//BUFFER2(output_frame->get_rows()[0], "VDeviceX11::new_output_buffer 2");
- bitmap_type = BITMAP_TEMP;
+ device->out_w, device->out_h, colormodel, -1);
}
+
color_model_selected = 1;
}
+ else if( bitmap_type == BITMAP_PRIMARY ) {
+// Update the ring buffer
+ output_frame->set_memory(bitmap);
+ }
}
*result = output_frame;
{
// The reason for not drawing single frame is that it is _always_ drawn
// when drawing draw_refresh in cwindowgui and vwindowgui
- if (device->single_frame)
+ if( device->single_frame )
return 0;
output->lock_canvas("VDeviceX11::write_buffer");
}
else
#endif
- if(bitmap_type == BITMAP_TEMP) {
+ if( bitmap_type == BITMAP_TEMP ) {
// printf("VDeviceX11::write_buffer 1 %d %d, %d %d %d %d -> %d %d %d %d\n",
// output->w, output->h, in_x, in_y, in_w, in_h, out_x, out_y, out_w, out_h );
// fflush(stdout);
//printf("VDeviceX11::write_buffer %d output_channels=%p\n", __LINE__, output_channels);
-
-
- if(bitmap->hardware_scaling())
- {
- BC_CModels::transfer(bitmap->get_row_pointers(),
- output_channels->get_rows(), 0, 0, 0,
- output_channels->get_y(),
- output_channels->get_u(),
- output_channels->get_v(),
- 0, 0,
- output_channels->get_w(),
- output_channels->get_h(),
- 0, 0,
- bitmap->get_w(),
- bitmap->get_h(),
- output_channels->get_color_model(),
- bitmap->get_color_model(),
- 0,
- output_channels->get_w(),
- bitmap->get_w());
+ if( bitmap->hardware_scaling() ) {
+ BC_CModels::transfer(bitmap->get_row_pointers(), output_channels->get_rows(),
+ 0, 0, 0, output_channels->get_y(), output_channels->get_u(), output_channels->get_v(),
+ 0, 0, output_channels->get_w(), output_channels->get_h(),
+ 0, 0, bitmap->get_w(), bitmap->get_h(),
+ output_channels->get_color_model(), bitmap->get_color_model(),
+ 0, output_channels->get_w(), bitmap->get_w());
}
- else
- {
- BC_CModels::transfer(bitmap->get_row_pointers(),
- output_channels->get_rows(), 0, 0, 0,
- output_channels->get_y(),
- output_channels->get_u(),
- output_channels->get_v(),
- (int)output_x1,
- (int)output_y1,
- (int)(output_x2 - output_x1),
- (int)(output_y2 - output_y1),
- 0, 0,
- (int)(canvas_x2 - canvas_x1),
- (int)(canvas_y2 - canvas_y1),
- output_channels->get_color_model(),
- bitmap->get_color_model(),
- 0,
- output_channels->get_w(),
- bitmap->get_w());
+ else {
+ BC_CModels::transfer(bitmap->get_row_pointers(), output_channels->get_rows(),
+ 0, 0, 0, output_channels->get_y(), output_channels->get_u(), output_channels->get_v(),
+ (int)output_x1, (int)output_y1, (int)(output_x2 - output_x1), (int)(output_y2 - output_y1),
+ 0, 0, (int)(canvas_x2 - canvas_x1), (int)(canvas_y2 - canvas_y1),
+ output_channels->get_color_model(), bitmap->get_color_model(),
+ 0, output_channels->get_w(), bitmap->get_w());
}
}
// canvas_x1, canvas_y1, canvas_x2, canvas_y2);
// Cause X server to display it
- if(device->out_config->driver == PLAYBACK_X11_GL)
- {
+ if( device->out_config->driver == PLAYBACK_X11_GL ) {
// Output is drawn in close_all if no video.
- if(output->get_canvas()->get_video_on())
- {
+ if( output->get_canvas()->get_video_on() ) {
// Draw output frame directly. Not used for compositing.
output->get_canvas()->unlock_window();
output->unlock_canvas();
output->get_canvas()->lock_window("VDeviceX11::write_buffer 2");
}
}
- else
- if(bitmap->hardware_scaling())
- {
- output->get_canvas()->draw_bitmap(bitmap,
- !device->single_frame,
+ else if( bitmap->hardware_scaling() ) {
+ output->get_canvas()->draw_bitmap(bitmap, !device->single_frame,
(int)canvas_x1, (int)canvas_y1,
- (int)(canvas_x2 - canvas_x1),
- (int)(canvas_y2 - canvas_y1),
+ (int)(canvas_x2 - canvas_x1), (int)(canvas_y2 - canvas_y1),
(int)output_x1, (int)output_y1,
- (int)(output_x2 - output_x1),
- (int)(output_y2 - output_y1),
- 0);
+ (int)(output_x2 - output_x1), (int)(output_y2 - output_y1), 0);
}
- else
- {
-//printf("VDeviceX11::write_buffer %d bitmap=%p\n", __LINE__, bitmap);
- output->get_canvas()->draw_bitmap(bitmap,
- !device->single_frame,
+ else {
+ output->get_canvas()->draw_bitmap(bitmap, !device->single_frame,
(int)canvas_x1, (int)canvas_y1,
- (int)(canvas_x2 - canvas_x1),
- (int)(canvas_y2 - canvas_y1),
- 0, 0,
- (int)(canvas_x2 - canvas_x1),
- (int)(canvas_y2 - canvas_y1),
- 0);
-//printf("VDeviceX11::write_buffer %d bitmap=%p\n", __LINE__, bitmap);
+ (int)(canvas_x2 - canvas_x1), (int)(canvas_y2 - canvas_y1), 0, 0,
+ (int)(canvas_x2 - canvas_x1), (int)(canvas_y2 - canvas_y1), 0);
}
delete [] buffers;
buffers = new_buffers;
buffer_count = count;
+ XFlush(top_level->display);
if( lock_avail ) avail_lock->unlock();
//top_level->unlock_window();
}
int hardware_scaling() {
return xv_portid < 0 ? 0 :
(get_color_model() == BC_YUV420P ||
- get_color_model() == BC_YUV422P ||
+ // get_color_model() == BC_YUV422P || not in bc_to_x
get_color_model() == BC_YUV422) ? 1 : 0;
}
int get_w() { return w; }
return vframe;
}
-
VFrame::VFrame(VFrame &frame)
{
reset_parameters(1);
params = new BC_Hash;
allocate_data(0, -1, 0, 0, 0, frame.w, frame.h,
frame.color_model, frame.bytes_per_line);
- memcpy(data, frame.data, bytes_per_line * h);
- copy_stacks(&frame);
+ copy_from(&frame);
}
VFrame::VFrame(int w, int h, int color_model, long bytes_per_line)
int VFrame::write_png(const char *path)
{
+ VFrame *vframe = this;
png_structp png_ptr = png_create_write_struct(PNG_LIBPNG_VER_STRING, 0, 0, 0);
png_infop info_ptr = png_create_info_struct(png_ptr);
FILE *out_fd = fopen(path, "w");
- if(!out_fd)
- {
+ if(!out_fd) {
printf("VFrame::write_png %d %s %s\n", __LINE__, path, strerror(errno));
return 1;
}
int png_cmodel = PNG_COLOR_TYPE_RGB;
- switch(get_color_model())
- {
- case BC_RGB888:
- case BC_YUV888:
- png_cmodel = PNG_COLOR_TYPE_RGB;
- break;
-
- case BC_RGBA8888:
- case BC_YUVA8888:
+ int bc_cmodel = get_color_model();
+ switch( bc_cmodel ) {
+ case BC_RGB888: break;
+ case BC_RGBA8888: png_cmodel = PNG_COLOR_TYPE_RGB_ALPHA; break;
+ case BC_A8: png_cmodel = PNG_COLOR_TYPE_GRAY; break;
+ default:
+ bc_cmodel = BC_RGB888;
+ if( BC_CModels::has_alpha(bc_cmodel) ) {
+ bc_cmodel = BC_RGBA8888;
png_cmodel = PNG_COLOR_TYPE_RGB_ALPHA;
- break;
-
- case BC_A8:
- png_cmodel = PNG_COLOR_TYPE_GRAY;
- break;
+ }
+ vframe = new VFrame(get_w(), get_h(), bc_cmodel, -1);
+ vframe->transfer_from(this);
+ break;
}
-
png_init_io(png_ptr, out_fd);
png_set_compression_level(png_ptr, 9);
- png_set_IHDR(png_ptr,
- info_ptr,
- get_w(),
- get_h(),
- 8,
- png_cmodel,
- PNG_INTERLACE_NONE,
- PNG_COMPRESSION_TYPE_DEFAULT,
- PNG_FILTER_TYPE_DEFAULT);
+ png_set_IHDR(png_ptr, info_ptr, get_w(), get_h(), 8, png_cmodel,
+ PNG_INTERLACE_NONE, PNG_COMPRESSION_TYPE_DEFAULT, PNG_FILTER_TYPE_DEFAULT);
png_write_info(png_ptr, info_ptr);
- png_write_image(png_ptr, get_rows());
+ png_write_image(png_ptr, vframe->get_rows());
png_write_end(png_ptr, info_ptr);
png_destroy_write_struct(&png_ptr, &info_ptr);
fclose(out_fd);
+ if( vframe != this ) delete vframe;
return 0;
}