Source code for allensdk.internal.pipeline_modules.run_dff_computation

import os
import argparse
import h5py
import logging
from allensdk.brain_observatory.dff import calculate_dff
import allensdk.core.json_utilities as ju


[docs]def parse_input(data): input_file = data.get("input_file", None) if input_file is None: raise IOError("input JSON missing required field 'input_file'") if not os.path.exists(input_file): raise IOError("input file does not exists: %s" % input_file) output_file = data.get("output_file", None) if output_file is None: raise IOError("input JSON missing required field 'output_file'") return input_file, output_file
[docs]def main(): parser = argparse.ArgumentParser() parser.add_argument("input_json") parser.add_argument("output_json") parser.add_argument("--log_level", default=logging.DEBUG) parser.add_argument("--input_dataset", default="FC") parser.add_argument("--roi_field", default="roi_names") parser.add_argument("--output_dataset", default="data") args = parser.parse_args() logging.getLogger().setLevel(args.log_level) input_data = ju.read(args.input_json) input_file, output_file = parse_input(input_data) # read from "data" input_h5 = h5py.File(input_file, "r") traces = input_h5[args.input_dataset].value roi_names = input_h5[args.roi_field][:] input_h5.close() dff = calculate_dff(traces) # write to "data" output_h5 = h5py.File(output_file, "w") output_h5[args.output_dataset] = dff output_h5[args.roi_field] = roi_names output_h5.close() output_data = {} ju.write(args.output_json, output_data)
if __name__ == "__main__": main()