package imseProc.sensicam;

import imseProc.core.ByteBufferImage;


import java.lang.ref.WeakReference;
import java.nio.ByteBuffer;
import java.nio.ByteOrder;
import java.util.HashMap;
import java.util.LinkedList;
import java.util.Map;

import otherSupport.bufferControl.DirectBufferControl;


import sensicamJNI.CamTypes;
import sensicamJNI.SenbufDev;
import sensicamJNI.SencamDef;
import sensicamJNI.SensicamException;

import sensicamJNI.Sensicam;
//import sensicamJNI.fake.Sensicam;

/** Thread runner used to capture images.
 *  
 * It contains all the actual JNI calls to the camera library since
 * we init the library and open the device each time we run a capture series
 * 
 * However, only 1 of these should ever be created (for each board)
 * so that it won't try multiple at the same time.
 * 
 */
public class SensicamCapture implements Runnable {
	public final static String EXPOSURE_METADATA_NAME = "Sensicam/seriesExposureMS";
		
	private SensicamSource activeSource;
	
	private String status = "Not inited";
	private int nextImgIdx;
	
	private ByteBufferImage images[];
	
	private SensicamConfig cfg; 
	
	private Thread thread;
	
	/** Internal state */
	private long hDriver = 0;
	private int kernelBufferNo[];
	/** Number of buffers (in camera?, in library?) */
	private int imgDataSize;
	
	private int cameraType;
	private int ccdWidth, ccdHeight;
	private int actWidth, actHeight;
	private int bitDepth;

	private boolean death = false;
	private boolean modifyConfig = false;
	
	private boolean spinDebug = false;
	
	private long seqStartTimeNS;
	private long seqStartTimeMS;
	private long lastSelectTimeNS;
	private long lastSelectTimeMS;
	private int exposureTimeAtSelect;
	private HashMap<String,Object> metaDataMap;
	
	@Override
	public void run() {
		try{
			if(hDriver != 0){
				throw new RuntimeException("Camera driver already open, aborting both");
			}
			
			setStatus("Capture init");
			initCamera(); 				
			setupCOC();
			initBuffers();
			initImages();
			setStatus("Capturing");
			doCapture();
			setStatus("Capture done");
		}catch(Exception e){
			e.printStackTrace();
			setStatus("ERROR: " + e.getMessage());
		}finally{
			deinit();
			cfg = null;
			activeSource = null;
		}
	}
	
	
	private void initCamera() {
		kernelBufferNo = new int[cfg.nKernelBuffers];
		for(int i=0; i < cfg.nKernelBuffers; i++)
			kernelBufferNo[i] = -1;
		
		/* Setup logging */
		if(cfg.logFile != null){
			Sensicam.sen_enable_message_log(0xffff, cfg.logFile);
			log(false, ".init(): Camera logging to '" + cfg.logFile + "'.");
		}
		if(cfg.enableSyslog)		
			Sensicam.sen_set_syslog_facility(0xffff);
		
		/* Open device */
		long retL[] = Sensicam.sen_initboard(cfg.boardNo); 
		hDriver = retL[1];
		log(false, ".init(): initboard() = " + retL[0] + ", hDriver = " + hDriver);

		int ret = Sensicam.sen_setup_camera(hDriver);
		if(ret < 0)			  
			throw new SensicamException("sen_setup_camera", ret);
		System.out.println("sen_setup_camera: " + ret);

		SencamDef.cam_param params = new SencamDef.cam_param();
		
		Sensicam.sen_get_cam_param(hDriver, params);
		this.cameraType = params.cam_typ;
		
		log(false, ".setupCOC(): Camera type:" + CamTypes.camTypeToString(cameraType));
		
		switch(cameraType){
			case CamTypes.LONGEXP:				
			case CamTypes.OEM:
			case CamTypes.FASTEXP:
			case CamTypes.LONGEXPI:
			case CamTypes.LONGEXPQE:
			case CamTypes.FASTEXPQE:
			case CamTypes.TESTCAM:
					break;
			case CamTypes.NOCAM:
				throw new SensicamException("Apparently there's no camera. What?");				
			case CamTypes.DICAM:
				throw new SensicamException("DICAM not supported. Please add config here");
			default:
				throw new SensicamException("Unknown camera '" + params.cam_typ + "'. Please add config here");				
		}
	}
	
	private void setupCOC(){

		//Currently this only supports:
		// "’sensicam long exposure’, all ‘sensicam qe’, the ‘sensicam em’ and ‘sensicam uv’ versions of the sensicam camera series."
		// under the 'Long Exposure' settings, which actually support down to 500ns - yea, 'long' then!		
		if(cfg.exposureType != CamTypes.M_LONG)
			throw new SensicamException("Only M_LONG exposure mode supported. Please add config here");
		
		int exposureMode = ((cfg.exposureType & 0xFF) | ((cfg.gainMode & 0xFF) << 8) | ((cfg.exposureSubMode & 0xFF) << 16));
		int trig = cfg.triggerMode;
		
		// ROI bounds are in units of 32 pixels and are 1 based... *GROAN*
		if(cfg.roiW == 0 || cfg.roiH == 0){
			int retI[] = Sensicam.sen_getsizes(hDriver);
			int ret = retI[0];
			ccdWidth = retI[1]; ccdHeight = retI[2];
			actWidth = retI[3]; actHeight = retI[4];
			bitDepth = retI[5];
			
			if(cfg.roiW == 0) cfg.roiW = ccdWidth;		
			if(cfg.roiH == 0) cfg.roiH = ccdHeight;
		}
		//if(cfg.roiX0 % 32 != 0 || cfg.roiY0 % 32 != 0)// || cfg.roiW % 32 != 0 || cfg.roiH % 32 != 0)
			//throw new IllegalArgumentException("ROI must be on 32 pixel boundaries");
		int roiX0b32 = (int)Math.floor((double)cfg.roiX0 / 32.0) + 1;
		int roiX1b32 = roiX0b32 + (int)Math.ceil((double)cfg.roiW / 32.0) - 1;
		int roiY0b32 = cfg.roiY0 / 32 + 1;
		int roiY1b32 = roiY0b32 + (int)Math.ceil((double)cfg.roiH / 32.0) - 1;
		
		String timeValues;		
		
		switch(cfg.exposureSubMode){
			case CamTypes.NORMALLONG: 	timeValues = cfg.delayMS + "," 	+ cfg.exposureMS + ",-1,-1"; break;
			case CamTypes.VIDEO: 		timeValues = "0," 			+ cfg.exposureMS + ",-1,-1"; break;
			case CamTypes.MECHSHUT:				
			case CamTypes.MECHSHUTV: 	
					timeValues = ((cfg.exposureSubMode == CamTypes.MECHSHUTV) ? "0" : cfg.delayMS) + ","
									+ cfg.exposureMS + ","
									+ ((cfg.shutterEndMS < cfg.shutterEndMS) ? "-1,-1" : "0,0,")
									+ cfg.shutterStartMS + "," + cfg.shutterEndMS
									+ ",-1,-1"; 
					break;			
			case CamTypes.QE_FAST: 		timeValues = cfg.delayNS + "," 	+ cfg.exposureNS + ",-1,-1"; break;
			case CamTypes.QE_DOUBLE: 	timeValues = "-1,-1"; break;
			default:
				//all the rest weren't in my documentation
				throw new SensicamException("Unsupported LONG exposure sub mode. Please add config here.");
		}
		
		//modes are just
		// 	CamTypes.LONGEXP / CamTypes.NORMALLONG		"Delay(ms),	Exposure(ms),	-1,-1"
		// 	CamTypes.LONGEXP / CamTypes.VIDEO	 		"0,			Exposure(ms),	-1,-1:
		//	CamTypes.LONGEXP / CamTypes.MECHSHUT 		"Delay(ms),	Exposure(ms),	AV,AV,	START,	STOP,	-1, -1";
		//	CamTypes.LONGEXP / CamTypes.MECHSHUTTV 		"0,			Exposure(ms),	AV,AV,	START,	STOP,	-1, -1";
		//	CamTypes.LONGEXP / CamTypes.QE_FAST			"Delay(ns),	Exposure(ns),	-1,-1"	Exposure min = 500(ns), nearest 100ns will be used
		//	CamTypes.LONGEXP / CamTypes.QE_DOUBLE		"-1,-1"	External trigger only
		
		int ret = Sensicam.sen_set_coc(hDriver, exposureMode, trig, 
										roiX0b32, roiX1b32, roiY0b32, roiY1b32, 
										cfg.binX, cfg.binY, timeValues);
		
		if(ret < 0)
			throw new SensicamException("sen_set_coc", ret);			    
		log(false, ".setupCOC(): sen_set_coc() = " + ret);
	}
	
	private void initBuffers(){

		/* allocate and map the device-buffers with ccdsize */
		int retI[] = Sensicam.sen_getsizes(hDriver);
		int ret = retI[0];
		ccdWidth = retI[1]; ccdHeight = retI[2];
		actWidth = retI[3]; actHeight = retI[4];
		bitDepth = retI[5];

		if(ret < 0)
			throw new SensicamException("sen_getsizes", ret);
		
		for(int i=0;i < cfg.nKernelBuffers; i++)
		{
			kernelBufferNo[i] = -1;
			
			int size=ccdWidth * ccdHeight * 2;	
			retI = Sensicam.sen_allocate_buffer(hDriver, kernelBufferNo[i], size);
			ret = retI[0]; kernelBufferNo[i] = retI[1]; size = retI[2];
			
			if(ret < 0)
				throw new SensicamException("sen_allocate_buffer", ret);
		}
	}
	
	private void initImages(){
		imgDataSize = actWidth * actHeight * (bitDepth == 12 ? 2 : 1);
				
		
		try{
			ByteBufferImage newImgs[] = ByteBufferImage.checkBulkAllocation(activeSource, images, actWidth, actHeight, bitDepth, cfg.nImagesToCapture, ByteOrder.LITTLE_ENDIAN);			
			if(newImgs != images){
				log(false, ".initImages() Cleared and reallocated " + cfg.nImagesToCapture + " images");
				images = newImgs;
				activeSource.newImageArray(images);
			}else{
				log(false, ".initImages() Reusing existing " + cfg.nImagesToCapture + " images");
				if(newImgs != null){
					for(int j=0; j < newImgs.length; j++){
						if(newImgs[j] != null){
							newImgs[j].invalidate();
						}
					}
				}
			}

		}catch(OutOfMemoryError err){
			log(false, ".initImages() Not enough memory for images.");
			throw new RuntimeException("Not enough memory for image capture");
		}

	}
	
	private void doCapture() throws InterruptedException{
		
		int ret;
		
		SenbufDev.DEVBUF stat = new SenbufDev.DEVBUF();
		SencamDef.cam_values values = new SencamDef.cam_values();
		
		startAndEnableBuffers(values);
		int nextBuff = 0;
		
		seqStartTimeNS = System.nanoTime();
		seqStartTimeMS = System.currentTimeMillis(); //as close as possible to nanoT0, since everything is then relative to this
		lastSelectTimeMS = System.currentTimeMillis();
		lastSelectTimeNS = seqStartTimeNS;
		
		//hang config and timing info to the first image only
		SencamDef.cam_param param = new SencamDef.cam_param();
		Sensicam.sen_get_cam_param(hDriver, param);
		activeSource.addMetaDataMap(param.toMap("Sensicam/param"));
		activeSource.addMetaDataMap(cfg.toMap("Sensicam/config"));
		activeSource.addMetaDataMap(values.toMap("Sensicam/values"));
		activeSource.setSeriesMetaData("SequenceT0", seqStartTimeMS);
		activeSource.setSeriesMetaData("totalFrameTimeMS", values.totalFrameTimeMS());
				
		nextImgIdx = 0;
		//for each image in capture sequence
		do{
			
			exposureTimeAtSelect = cfg.exposureMS;

			if(modifyConfig){
				log(false, ".doCapture(): Modifying config... Stopping camera and clearing buffers");
				//we need to clear the buffers so that we don't get old images when we think we have the new config
				
				stopAndClearBuffers();
				
				log(false, ".doCapture().modifyCfg: Camera stopped");
				
				setupCOC();
				startAndEnableBuffers(values);
				modifyConfig = false; // that was the point at which we read the config object, so any further changes, change again
				checkSizesNotModified();
				
				log(false, ".doCapture().modifyCfg: Done, continuing.");	
				
				modifyConfig = false;
			}

			//search for filled buffers, starting from the last one
			int buffIdx = nextBuff;
			do{
				//dumpBufferStats();
				
				if(buffIdx >= kernelBufferNo.length)
					buffIdx=0;
				
				Sensicam.sen_get_buffer_status(hDriver, kernelBufferNo[buffIdx], 0, stat, 4);
				log(true, ".doCapture(): Buffer["+buffIdx+"]: buf = " + buffIdx + ", stat = " + stat);
				
				//if we reach the buffer that is waiting to be read, then we've done all the filled ones
				if(stat.SEN_BUF_STAT_WRITE_DONE() == 0){
					log(true, ".doCapture(): Reached next reading buffer at " + buffIdx);					
					break;
				}				
				
				//if(stat.SEN_BUF_STAT_WRITE_DONE() != 0) { //has some data
				log(true, ".doCapture(): Image complete in buffer "+buffIdx+" with status "+stat);					

				copyFilledBuffer(buffIdx, stat);
				
				//dumpBufferStats();
			
				buffIdx++;
				
				//if we've reached the one we started at again, we /may/ have overrun
				if(buffIdx == nextBuff){
					System.err.println("WARNING: Sensicam kernel buffers possible overrun.");
				}
				
				
			}while(true);
			
			nextBuff = buffIdx;
			
			if(death)break;
			
			//dumpBufferStats();
			
			waitForBufferFill(values.readtime);
			
		}while(nextImgIdx < cfg.nImagesToCapture); //only fires in non-continuous mode
		
		log(false, ".doCapture(): Sequence complete or death requested: " + nextImgIdx + " of " + cfg.nImagesToCapture + ", death = " + death);
		
		stopAndClearBuffers();
		
		log(false, ".doCapture(): END");		
	}
	
	private void copyFilledBuffer(int buffIdx, SenbufDev.DEVBUF stat){
		int ret;
		
		if(stat.SEN_BUF_STAT_ERROR() != 0){
			log(true, ".doCapture(): Buffer status has error flags set: " + stat.SEN_BUF_STAT_ERROR());
			return;
		}
		
		try {
			images[nextImgIdx].startWriting();
			ret = Sensicam.sen_copy_buffer(hDriver, kernelBufferNo[buffIdx], images[nextImgIdx].getWritableBuffer(), imgDataSize, 0);
			images[nextImgIdx].endWriting();
			
			if(ret <0)
				throw new SensicamException("sen_copy_buffer", ret);
			
			log(true, ".doCapture(): Buffer copied to images[" + nextImgIdx + "]");
			
		} catch (InterruptedException e) {
			log(false, "Interrupted while waiting to write to java image.");
			return;
		}
				
		long copyTimeNano = System.nanoTime();
				
		activeSource.setImageMetaData(EXPOSURE_METADATA_NAME, nextImgIdx, exposureTimeAtSelect);
		activeSource.setImageMetaData("Sensicam/selectTimeNano", nextImgIdx, (lastSelectTimeNS - seqStartTimeNS));
		activeSource.setImageMetaData("Sensicam/copyTimeNano", nextImgIdx, (copyTimeNano - seqStartTimeNS));
		activeSource.setImageMetaData("Sensicam/selectTimeSecs", nextImgIdx, (double)lastSelectTimeMS / 1000.0);
		activeSource.setImageMetaData("Sensicam/devBuf", nextImgIdx, stat.clone()); //I've never used this, so maybe a bit of a waste of time?		
	
		Sensicam.sen_get_buffer_status(hDriver, kernelBufferNo[buffIdx], 0, stat, 4);
		if(stat.SEN_BUF_STAT_QUEUED() == 0)
		{
			log(true, ".doCapture(): Adding buffer "+buffIdx+" (stat "+stat+") back to list.");
			
			ret = Sensicam.sen_add_buffer_to_list(hDriver, kernelBufferNo[buffIdx], imgDataSize, 0, 0);
			if(ret < 0)
				throw new SensicamException("sen_add_buffer_to_list", ret);
		}
		log(true, ".doCapture(): Image["+nextImgIdx+"] done and copied, notifiying source.");
		
		activeSource.imageCaptured(nextImgIdx);
	
		nextImgIdx++;
		if(nextImgIdx >= cfg.nImagesToCapture){
			if(cfg.wrap == SensicamConfig.WRAP_ALL)
				nextImgIdx = 0;
			else if(cfg.wrap == SensicamConfig.WRAP_NOTFIRST && cfg.nImagesToCapture > 1)
				nextImgIdx = 1;
		}
				
		
	}
	
	private void waitForBufferFill(int readTime) throws InterruptedException {
		double maxSelectWait = 0.100; //100ms is a reasonable user response time
		double timeout = readTime / 1000.0 + cfg.captureTimeout;		
		log(true, ".doCapture(): capture timeout = " + timeout + "s, waiting... ");

		int ret;
		
		long selectT0 = System.nanoTime();
		do{
			//use unix select() call to wait nicely for something to happen from device, or for timeout
			//but it can't be interrupted by a java thread interrupt so we have a maximum time 
			//that we will actually sit in select() before checking if we've been interrupted
			ret = Sensicam.select_with_timeout(hDriver, Math.min(timeout, maxSelectWait));
			
			lastSelectTimeNS = System.nanoTime();
			lastSelectTimeMS = System.currentTimeMillis(); //this is the closest thing we can get to a real time
			
			if(Thread.interrupted()){
				if(death){
					System.err.println("Aborted during JNI select");
					break;
				}else
					throw new InterruptedException("Interrupted during JNI select for no apparant reason.");
			}
			
			if(ret == 0 && (lastSelectTimeNS - selectT0)*1e-9 > timeout){
				throw new SensicamException("Select timed out. If using HW-trigger mode, check if trigger-signal is connected");					
			}
			
			if(ret < 0)
				throw new SensicamException("Error in select(): " + ret);
		}while(ret == 0); // keep looping if it just timed out on the short timeout
		
	}
	
	private void startAndEnableBuffers(SencamDef.cam_values values) {
		// Load the command into the camera
		int ret = Sensicam.sen_run_coc(hDriver, SencamDef.CAM_CONT);
		if(ret < 0)
			throw new SensicamException("sen_run_coc", ret);
		
		// Mark all the buffers as available for use
		for(int i=0;i<cfg.nKernelBuffers;i++)
		{
			ret = Sensicam.sen_add_buffer_to_list(hDriver, kernelBufferNo[i], imgDataSize, 0, 0);
			if(ret < 0)
				throw new SensicamException("sen_add_buffer_to_list", ret);

			ret = Sensicam.sen_set_buffer_event(hDriver, kernelBufferNo[i], 1);
			if(ret < 0)
				throw new SensicamException("sen_set_buffer_event", ret);
		}
		
		Sensicam.sen_get_cam_values(hDriver, values);
		log(false, ".doCapture(): Times: cocTime = " + values.coctime + ", belTime = " + values.beltime + ", readTime = " + values.readtime);

	}
	
	private void dumpBufferStats() {
		SenbufDev.DEVBUF stat = new SenbufDev.DEVBUF();
		log(true, "Buff status:");
		for(int i=0; i < cfg.nKernelBuffers; i++) {
			Sensicam.sen_get_buffer_status(hDriver, kernelBufferNo[i], 0, stat, 4);
			log(true, i + ": " + stat);
			
		}
		
	}
		
	private void stopAndClearBuffers() {
		SenbufDev.DEVBUF stat = new SenbufDev.DEVBUF();
		
		int ret = Sensicam.sen_stop_coc(hDriver, 0);
		if(ret < 0)
			throw new SensicamException("sen_stop_camera", ret);		

		//remove device buffers from working list
		
		log(false, ".doCapture(): Camera stopped");
		
		for(int i=0; i < cfg.nKernelBuffers; i++)
		{
			Sensicam.sen_get_buffer_status(hDriver, kernelBufferNo[i], 0, stat, 4);
			//if(SEN_BUF_STAT_QUEUED(&stat))
			{
				log(false, ".doCapture(): Removing buffer "+i+" stat "+stat + " from list. ");
				
				ret = Sensicam.sen_remove_buffer_from_list(hDriver, kernelBufferNo[i]);
				if(ret < 0)
					throw new SensicamException("sen_remove_buffer_from_list", ret);
			}
			//disable select for device buffers
			ret = Sensicam.sen_set_buffer_event(hDriver, kernelBufferNo[i], 0);

			if(ret < 0)
				throw new SensicamException("sen_set_buffer_event", ret);
			
			Sensicam.sen_get_buffer_status(hDriver, kernelBufferNo[i], 0, stat, 4);			
		}
		
	}
	
	private void checkSizesNotModified() {
		int retI[] = Sensicam.sen_getsizes(hDriver);
		int ret = retI[0];
		if(ccdWidth != retI[1] || ccdHeight != retI[2] ||
				actWidth != retI[3] || actHeight != retI[4] ||
				bitDepth != retI[5]){
			throw new SensicamException("Sizes changed in inCaptureModify: " +
					"(ccd="+ccdWidth+"x"+ccdHeight+" , act="+actWidth+"x"+actHeight+", bpp="+bitDepth+")" +
					" --> " +
					"("+retI[1]+"x"+retI[2]+" , "+retI[3]+"x"+retI[4]+", bpp="+retI[5]+")");
		}

	}


	private void deinit(){
		log(false, ".deInit()");
		
		//free buffers
		for(int i=0; i < cfg.nKernelBuffers; i++){
			if(kernelBufferNo[i] >= 0){
				int ret = Sensicam.sen_free_buffer(hDriver, kernelBufferNo[i]);
				if(ret < 0)
					log(false, "deinit(): WARNING: sen_free_buffer returned error: " + ret);
			}
		}

		if(hDriver != 0){
			int ret = Sensicam.sen_closeboard(hDriver);
			log(false, "deinit(): closeboard() = " + ret);
			hDriver = 0;
		}
	}
	
	private void log(boolean inSpin, String str){
		if(!inSpin || spinDebug)
			System.err.println((inSpin ? "SensicamCapture[spin]" : "SensicamCapture")  + "." + str);
	}
		
	public void startCapture(SensicamSource source, SensicamConfig cfg, ByteBufferImage images[]) {
		if(isCapturing())
			throw new SensicamException("Capture already active");
		
		//if(thread != null && thread.isAlive()){
		//	throw new SensicamException("Thread already active");
		//}
		
		this.cfg = cfg.clone();
		this.images = images;
		this.nextImgIdx = 0;
		this.activeSource = source;
		setStatus("Thread starting");
		this.cameraType = -1;
		this.death = false;
		
		thread = new Thread(this);
		thread.setPriority(Thread.MAX_PRIORITY);
		System.out.println("Starting sensicam capture thread with priority = " + thread.getPriority());
		thread.start();
	}
	
	public void stopCapture(boolean awaitDeath){
		if(thread != null && thread.isAlive()){
			death = true;
			thread.interrupt();
			if(awaitDeath){
				System.out.println("SensicamCapture: Waiting for thread to die.");
				while(thread.isAlive()){
					try {
						Thread.sleep(100);
					} catch (InterruptedException e) { }
				}
				System.out.println("SensicamCapture: Thread died.");					
			}	
		}
	}
	
	public void destroy() { stopCapture(false);	}

	public String getStatus(){ return status; }
	
	public boolean isCapturing(){ return thread != null && thread.isAlive() && !death; }

	public String getCCDInfo() {
		return CamTypes.camTypeToString(cameraType) + ": " + ccdWidth + " x " + ccdHeight + " x " + bitDepth;
	}
	
	private void setStatus(String status){
		this.status = status;
		if(activeSource != null){
			activeSource.statusChanged();
		}
	}
	
	/** Call to load a new configuration into the camera during a run sequence.
	 *   
	 *  Only things which don't change the image resolution can be modified:
		exposureType, exposureSubMode, gainMode, 
		roiX0, roiY0, 
		delayMS, exposureMS, delayNS, exposureNS, 
		shutterStartMS, shutterEndMS
		captureTimeout
	 */
	public void inCaptureModifyConfig(SensicamConfig cfg){
		if(!isCapturing())
			throw new SensicamException("SensicamCapture.modifyConfig() called but capture is not running.");
		
		if(
				cfg.boardNo != this.cfg.boardNo ||
				!cfg.logFile.equalsIgnoreCase(this.cfg.logFile) ||
				cfg.enableSyslog != this.cfg.enableSyslog ||
				cfg.nImagesToCapture != this.cfg.nImagesToCapture ||
				cfg.wrap != this.cfg.wrap ||
				cfg.nKernelBuffers != this.cfg.nKernelBuffers){
			throw new SensicamException("SensicamCapture.modifyConfig(): Modification of parameters not allowed in mid-capture modification");
		}
				
		if(cfg.roiX0 != this.cfg.roiX0 ||
			cfg.roiY0 != this.cfg.roiY0){
			
			System.err.println("WARNING: SensicamCapture.modifyConfig() called for a different ROI position. This works, but the info isn't stored in the meta data");
		}
		
		
		//signal the modify
		this.cfg = cfg;
		modifyConfig = true;
		
	}
}
